diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..4acd06b141088d6309646081a22991d25b76f220
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1 @@
+config.py
diff --git a/config.py b/config.py
deleted file mode 100644
index 76ec86002cad574187dc04d2eb7a3bde511013ef..0000000000000000000000000000000000000000
--- a/config.py
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env python3
-
-DB_HOST="localhost"
-DB_USER="user"
-DB_NAME="db_tp2"
-DB_PASS="user"
diff --git a/README.md b/monEnvTP/README.md
similarity index 100%
rename from README.md
rename to monEnvTP/README.md
diff --git a/README_en.md b/monEnvTP/README_en.md
similarity index 100%
rename from README_en.md
rename to monEnvTP/README_en.md
diff --git a/monEnvTP/bin/Activate.ps1 b/monEnvTP/bin/Activate.ps1
new file mode 100644
index 0000000000000000000000000000000000000000..2fb3852c3cf1a565ccf813f876a135ecf6f99712
--- /dev/null
+++ b/monEnvTP/bin/Activate.ps1
@@ -0,0 +1,241 @@
+<#
+.Synopsis
+Activate a Python virtual environment for the current PowerShell session.
+
+.Description
+Pushes the python executable for a virtual environment to the front of the
+$Env:PATH environment variable and sets the prompt to signify that you are
+in a Python virtual environment. Makes use of the command line switches as
+well as the `pyvenv.cfg` file values present in the virtual environment.
+
+.Parameter VenvDir
+Path to the directory that contains the virtual environment to activate. The
+default value for this is the parent of the directory that the Activate.ps1
+script is located within.
+
+.Parameter Prompt
+The prompt prefix to display when this virtual environment is activated. By
+default, this prompt is the name of the virtual environment folder (VenvDir)
+surrounded by parentheses and followed by a single space (ie. '(.venv) ').
+
+.Example
+Activate.ps1
+Activates the Python virtual environment that contains the Activate.ps1 script.
+
+.Example
+Activate.ps1 -Verbose
+Activates the Python virtual environment that contains the Activate.ps1 script,
+and shows extra information about the activation as it executes.
+
+.Example
+Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
+Activates the Python virtual environment located in the specified location.
+
+.Example
+Activate.ps1 -Prompt "MyPython"
+Activates the Python virtual environment that contains the Activate.ps1 script,
+and prefixes the current prompt with the specified string (surrounded in
+parentheses) while the virtual environment is active.
+
+.Notes
+On Windows, it may be required to enable this Activate.ps1 script by setting the
+execution policy for the user. You can do this by issuing the following PowerShell
+command:
+
+PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
+
+For more information on Execution Policies:
+https://go.microsoft.com/fwlink/?LinkID=135170
+
+#>
+Param(
+ [Parameter(Mandatory = $false)]
+ [String]
+ $VenvDir,
+ [Parameter(Mandatory = $false)]
+ [String]
+ $Prompt
+)
+
+<# Function declarations --------------------------------------------------- #>
+
+<#
+.Synopsis
+Remove all shell session elements added by the Activate script, including the
+addition of the virtual environment's Python executable from the beginning of
+the PATH variable.
+
+.Parameter NonDestructive
+If present, do not remove this function from the global namespace for the
+session.
+
+#>
+function global:deactivate ([switch]$NonDestructive) {
+ # Revert to original values
+
+ # The prior prompt:
+ if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
+ Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
+ Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
+ }
+
+ # The prior PYTHONHOME:
+ if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
+ Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
+ Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
+ }
+
+ # The prior PATH:
+ if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
+ Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
+ Remove-Item -Path Env:_OLD_VIRTUAL_PATH
+ }
+
+ # Just remove the VIRTUAL_ENV altogether:
+ if (Test-Path -Path Env:VIRTUAL_ENV) {
+ Remove-Item -Path env:VIRTUAL_ENV
+ }
+
+ # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
+ if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
+ Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
+ }
+
+ # Leave deactivate function in the global namespace if requested:
+ if (-not $NonDestructive) {
+ Remove-Item -Path function:deactivate
+ }
+}
+
+<#
+.Description
+Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
+given folder, and returns them in a map.
+
+For each line in the pyvenv.cfg file, if that line can be parsed into exactly
+two strings separated by `=` (with any amount of whitespace surrounding the =)
+then it is considered a `key = value` line. The left hand string is the key,
+the right hand is the value.
+
+If the value starts with a `'` or a `"` then the first and last character is
+stripped from the value before being captured.
+
+.Parameter ConfigDir
+Path to the directory that contains the `pyvenv.cfg` file.
+#>
+function Get-PyVenvConfig(
+ [String]
+ $ConfigDir
+) {
+ Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
+
+ # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
+ $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
+
+ # An empty map will be returned if no config file is found.
+ $pyvenvConfig = @{ }
+
+ if ($pyvenvConfigPath) {
+
+ Write-Verbose "File exists, parse `key = value` lines"
+ $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
+
+ $pyvenvConfigContent | ForEach-Object {
+ $keyval = $PSItem -split "\s*=\s*", 2
+ if ($keyval[0] -and $keyval[1]) {
+ $val = $keyval[1]
+
+ # Remove extraneous quotations around a string value.
+ if ("'""".Contains($val.Substring(0, 1))) {
+ $val = $val.Substring(1, $val.Length - 2)
+ }
+
+ $pyvenvConfig[$keyval[0]] = $val
+ Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
+ }
+ }
+ }
+ return $pyvenvConfig
+}
+
+
+<# Begin Activate script --------------------------------------------------- #>
+
+# Determine the containing directory of this script
+$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
+$VenvExecDir = Get-Item -Path $VenvExecPath
+
+Write-Verbose "Activation script is located in path: '$VenvExecPath'"
+Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
+Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
+
+# Set values required in priority: CmdLine, ConfigFile, Default
+# First, get the location of the virtual environment, it might not be
+# VenvExecDir if specified on the command line.
+if ($VenvDir) {
+ Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
+}
+else {
+ Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
+ $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
+ Write-Verbose "VenvDir=$VenvDir"
+}
+
+# Next, read the `pyvenv.cfg` file to determine any required value such
+# as `prompt`.
+$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
+
+# Next, set the prompt from the command line, or the config file, or
+# just use the name of the virtual environment folder.
+if ($Prompt) {
+ Write-Verbose "Prompt specified as argument, using '$Prompt'"
+}
+else {
+ Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
+ if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
+ Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
+ $Prompt = $pyvenvCfg['prompt'];
+ }
+ else {
+ Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virutal environment)"
+ Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
+ $Prompt = Split-Path -Path $venvDir -Leaf
+ }
+}
+
+Write-Verbose "Prompt = '$Prompt'"
+Write-Verbose "VenvDir='$VenvDir'"
+
+# Deactivate any currently active virtual environment, but leave the
+# deactivate function in place.
+deactivate -nondestructive
+
+# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
+# that there is an activated venv.
+$env:VIRTUAL_ENV = $VenvDir
+
+if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
+
+ Write-Verbose "Setting prompt to '$Prompt'"
+
+ # Set the prompt to include the env name
+ # Make sure _OLD_VIRTUAL_PROMPT is global
+ function global:_OLD_VIRTUAL_PROMPT { "" }
+ Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
+ New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
+
+ function global:prompt {
+ Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
+ _OLD_VIRTUAL_PROMPT
+ }
+}
+
+# Clear PYTHONHOME
+if (Test-Path -Path Env:PYTHONHOME) {
+ Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
+ Remove-Item -Path Env:PYTHONHOME
+}
+
+# Add the venv to the PATH
+Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
+$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
diff --git a/monEnvTP/bin/activate b/monEnvTP/bin/activate
new file mode 100644
index 0000000000000000000000000000000000000000..cc2ba481796dab0be5fd44a5cdd8f7127b2d5ac2
--- /dev/null
+++ b/monEnvTP/bin/activate
@@ -0,0 +1,76 @@
+# This file must be used with "source bin/activate" *from bash*
+# you cannot run it directly
+
+deactivate () {
+ # reset old environment variables
+ if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
+ PATH="${_OLD_VIRTUAL_PATH:-}"
+ export PATH
+ unset _OLD_VIRTUAL_PATH
+ fi
+ if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
+ PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
+ export PYTHONHOME
+ unset _OLD_VIRTUAL_PYTHONHOME
+ fi
+
+ # This should detect bash and zsh, which have a hash command that must
+ # be called to get it to forget past commands. Without forgetting
+ # past commands the $PATH changes we made may not be respected
+ if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
+ hash -r
+ fi
+
+ if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
+ PS1="${_OLD_VIRTUAL_PS1:-}"
+ export PS1
+ unset _OLD_VIRTUAL_PS1
+ fi
+
+ unset VIRTUAL_ENV
+ if [ ! "${1:-}" = "nondestructive" ] ; then
+ # Self destruct!
+ unset -f deactivate
+ fi
+}
+
+# unset irrelevant variables
+deactivate nondestructive
+
+VIRTUAL_ENV="/home/ubuntu/TP/isi-tp2-injection/monEnvTP"
+export VIRTUAL_ENV
+
+_OLD_VIRTUAL_PATH="$PATH"
+PATH="$VIRTUAL_ENV/bin:$PATH"
+export PATH
+
+# unset PYTHONHOME if set
+# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
+# could use `if (set -u; : $PYTHONHOME) ;` in bash
+if [ -n "${PYTHONHOME:-}" ] ; then
+ _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
+ unset PYTHONHOME
+fi
+
+if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
+ _OLD_VIRTUAL_PS1="${PS1:-}"
+ if [ "x(monEnvTP) " != x ] ; then
+ PS1="(monEnvTP) ${PS1:-}"
+ else
+ if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then
+ # special case for Aspen magic directories
+ # see https://aspen.io/
+ PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1"
+ else
+ PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1"
+ fi
+ fi
+ export PS1
+fi
+
+# This should detect bash and zsh, which have a hash command that must
+# be called to get it to forget past commands. Without forgetting
+# past commands the $PATH changes we made may not be respected
+if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
+ hash -r
+fi
diff --git a/monEnvTP/bin/activate.csh b/monEnvTP/bin/activate.csh
new file mode 100644
index 0000000000000000000000000000000000000000..7c38dd5cf184e0c03c8fcb537bf058f378c91d40
--- /dev/null
+++ b/monEnvTP/bin/activate.csh
@@ -0,0 +1,37 @@
+# This file must be used with "source bin/activate.csh" *from csh*.
+# You cannot run it directly.
+# Created by Davide Di Blasi <davidedb@gmail.com>.
+# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
+
+alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
+
+# Unset irrelevant variables.
+deactivate nondestructive
+
+setenv VIRTUAL_ENV "/home/ubuntu/TP/isi-tp2-injection/monEnvTP"
+
+set _OLD_VIRTUAL_PATH="$PATH"
+setenv PATH "$VIRTUAL_ENV/bin:$PATH"
+
+
+set _OLD_VIRTUAL_PROMPT="$prompt"
+
+if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
+ if ("monEnvTP" != "") then
+ set env_name = "monEnvTP"
+ else
+ if (`basename "VIRTUAL_ENV"` == "__") then
+ # special case for Aspen magic directories
+ # see https://aspen.io/
+ set env_name = `basename \`dirname "$VIRTUAL_ENV"\``
+ else
+ set env_name = `basename "$VIRTUAL_ENV"`
+ endif
+ endif
+ set prompt = "[$env_name] $prompt"
+ unset env_name
+endif
+
+alias pydoc python -m pydoc
+
+rehash
diff --git a/monEnvTP/bin/activate.fish b/monEnvTP/bin/activate.fish
new file mode 100644
index 0000000000000000000000000000000000000000..8cad63761f5a051d65ea691472224b9bd098a8cb
--- /dev/null
+++ b/monEnvTP/bin/activate.fish
@@ -0,0 +1,75 @@
+# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org)
+# you cannot run it directly
+
+function deactivate -d "Exit virtualenv and return to normal shell environment"
+ # reset old environment variables
+ if test -n "$_OLD_VIRTUAL_PATH"
+ set -gx PATH $_OLD_VIRTUAL_PATH
+ set -e _OLD_VIRTUAL_PATH
+ end
+ if test -n "$_OLD_VIRTUAL_PYTHONHOME"
+ set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
+ set -e _OLD_VIRTUAL_PYTHONHOME
+ end
+
+ if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
+ functions -e fish_prompt
+ set -e _OLD_FISH_PROMPT_OVERRIDE
+ functions -c _old_fish_prompt fish_prompt
+ functions -e _old_fish_prompt
+ end
+
+ set -e VIRTUAL_ENV
+ if test "$argv[1]" != "nondestructive"
+ # Self destruct!
+ functions -e deactivate
+ end
+end
+
+# unset irrelevant variables
+deactivate nondestructive
+
+set -gx VIRTUAL_ENV "/home/ubuntu/TP/isi-tp2-injection/monEnvTP"
+
+set -gx _OLD_VIRTUAL_PATH $PATH
+set -gx PATH "$VIRTUAL_ENV/bin" $PATH
+
+# unset PYTHONHOME if set
+if set -q PYTHONHOME
+ set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
+ set -e PYTHONHOME
+end
+
+if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
+ # fish uses a function instead of an env var to generate the prompt.
+
+ # save the current fish_prompt function as the function _old_fish_prompt
+ functions -c fish_prompt _old_fish_prompt
+
+ # with the original prompt function renamed, we can override with our own.
+ function fish_prompt
+ # Save the return status of the last command
+ set -l old_status $status
+
+ # Prompt override?
+ if test -n "(monEnvTP) "
+ printf "%s%s" "(monEnvTP) " (set_color normal)
+ else
+ # ...Otherwise, prepend env
+ set -l _checkbase (basename "$VIRTUAL_ENV")
+ if test $_checkbase = "__"
+ # special case for Aspen magic directories
+ # see https://aspen.io/
+ printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal)
+ else
+ printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal)
+ end
+ end
+
+ # Restore the return status of the previous command.
+ echo "exit $old_status" | .
+ _old_fish_prompt
+ end
+
+ set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
+end
diff --git a/monEnvTP/bin/calc-prorate b/monEnvTP/bin/calc-prorate
new file mode 100755
index 0000000000000000000000000000000000000000..2cf6560b7f3f6a736a4471c4f4d1dcfc2ba02b06
--- /dev/null
+++ b/monEnvTP/bin/calc-prorate
@@ -0,0 +1,8 @@
+#!/home/ubuntu/TP/isi-tp2-injection/monEnvTP/bin/python3
+# -*- coding: utf-8 -*-
+import re
+import sys
+from tempora import calculate_prorated_values
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(calculate_prorated_values())
diff --git a/monEnvTP/bin/cheroot b/monEnvTP/bin/cheroot
new file mode 100755
index 0000000000000000000000000000000000000000..1449ff69a28ac05a66153640a1b8072046584d65
--- /dev/null
+++ b/monEnvTP/bin/cheroot
@@ -0,0 +1,8 @@
+#!/home/ubuntu/TP/isi-tp2-injection/monEnvTP/bin/python3
+# -*- coding: utf-8 -*-
+import re
+import sys
+from cheroot.cli import main
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(main())
diff --git a/monEnvTP/bin/cherryd b/monEnvTP/bin/cherryd
new file mode 100755
index 0000000000000000000000000000000000000000..29a1ba1c224879728116edb7bdd695533bee532e
--- /dev/null
+++ b/monEnvTP/bin/cherryd
@@ -0,0 +1,8 @@
+#!/home/ubuntu/TP/isi-tp2-injection/monEnvTP/bin/python3
+# -*- coding: utf-8 -*-
+import re
+import sys
+from cherrypy.__main__ import run
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(run())
diff --git a/monEnvTP/bin/easy_install b/monEnvTP/bin/easy_install
new file mode 100755
index 0000000000000000000000000000000000000000..7a69263fd24e1388756c98d7dd0a09c9e28ae919
--- /dev/null
+++ b/monEnvTP/bin/easy_install
@@ -0,0 +1,8 @@
+#!/home/ubuntu/TP/isi-tp2-injection/monEnvTP/bin/python3
+# -*- coding: utf-8 -*-
+import re
+import sys
+from setuptools.command.easy_install import main
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(main())
diff --git a/monEnvTP/bin/easy_install-3.8 b/monEnvTP/bin/easy_install-3.8
new file mode 100755
index 0000000000000000000000000000000000000000..7a69263fd24e1388756c98d7dd0a09c9e28ae919
--- /dev/null
+++ b/monEnvTP/bin/easy_install-3.8
@@ -0,0 +1,8 @@
+#!/home/ubuntu/TP/isi-tp2-injection/monEnvTP/bin/python3
+# -*- coding: utf-8 -*-
+import re
+import sys
+from setuptools.command.easy_install import main
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(main())
diff --git a/monEnvTP/bin/pip b/monEnvTP/bin/pip
new file mode 100755
index 0000000000000000000000000000000000000000..8f5d286d08c732f96f5e30d3f450d28ca1aa8d99
--- /dev/null
+++ b/monEnvTP/bin/pip
@@ -0,0 +1,8 @@
+#!/home/ubuntu/TP/isi-tp2-injection/monEnvTP/bin/python3
+# -*- coding: utf-8 -*-
+import re
+import sys
+from pip._internal.cli.main import main
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(main())
diff --git a/monEnvTP/bin/pip3 b/monEnvTP/bin/pip3
new file mode 100755
index 0000000000000000000000000000000000000000..8f5d286d08c732f96f5e30d3f450d28ca1aa8d99
--- /dev/null
+++ b/monEnvTP/bin/pip3
@@ -0,0 +1,8 @@
+#!/home/ubuntu/TP/isi-tp2-injection/monEnvTP/bin/python3
+# -*- coding: utf-8 -*-
+import re
+import sys
+from pip._internal.cli.main import main
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(main())
diff --git a/monEnvTP/bin/pip3.8 b/monEnvTP/bin/pip3.8
new file mode 100755
index 0000000000000000000000000000000000000000..8f5d286d08c732f96f5e30d3f450d28ca1aa8d99
--- /dev/null
+++ b/monEnvTP/bin/pip3.8
@@ -0,0 +1,8 @@
+#!/home/ubuntu/TP/isi-tp2-injection/monEnvTP/bin/python3
+# -*- coding: utf-8 -*-
+import re
+import sys
+from pip._internal.cli.main import main
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(main())
diff --git a/monEnvTP/bin/python b/monEnvTP/bin/python
new file mode 120000
index 0000000000000000000000000000000000000000..b8a0adbbb97ea11f36eb0c6b2a3c2881e96f8e26
--- /dev/null
+++ b/monEnvTP/bin/python
@@ -0,0 +1 @@
+python3
\ No newline at end of file
diff --git a/monEnvTP/bin/python3 b/monEnvTP/bin/python3
new file mode 120000
index 0000000000000000000000000000000000000000..ae65fdaa12936b0d7525b090d198249fa7623e66
--- /dev/null
+++ b/monEnvTP/bin/python3
@@ -0,0 +1 @@
+/usr/bin/python3
\ No newline at end of file
diff --git a/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/INSTALLER b/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/LICENSE.md b/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/LICENSE.md
new file mode 100644
index 0000000000000000000000000000000000000000..96b866459e2827d3e1e0e1e38d6cbd8dae6e6d06
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/LICENSE.md
@@ -0,0 +1,30 @@
+Copyright © 2004-2019, CherryPy Team (team@cherrypy.org)
+
+All rights reserved.
+
+* * *
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+* Neither the name of CherryPy nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/METADATA b/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..02674a70fe9940d945389d1ae00cbd71f7ef19b0
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/METADATA
@@ -0,0 +1,198 @@
+Metadata-Version: 2.1
+Name: CherryPy
+Version: 18.6.1
+Summary: Object-Oriented HTTP framework
+Home-page: https://www.cherrypy.org
+Author: CherryPy Team
+Author-email: team@cherrypy.org
+License: UNKNOWN
+Project-URL: CI: AppVeyor, https://ci.appveyor.com/project/cherrypy/cherrypy
+Project-URL: CI: Travis, https://travis-ci.org/cherrypy/cherrypy
+Project-URL: CI: Circle, https://circleci.com/gh/cherrypy/cherrypy
+Project-URL: Docs: RTD, https://docs.cherrypy.org
+Project-URL: GitHub: issues, https://github.com/cherrypy/cherrypy/issues
+Project-URL: GitHub: repo, https://github.com/cherrypy/cherrypy
+Project-URL: Tidelift: funding, https://tidelift.com/subscription/pkg/pypi-cherrypy?utm_source=pypi-cherrypy&utm_medium=referral&utm_campaign=pypi
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: Freely Distributable
+Classifier: Operating System :: OS Independent
+Classifier: Framework :: CherryPy
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: Jython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Internet :: WWW/HTTP
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Internet :: WWW/HTTP :: HTTP Servers
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Server
+Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
+Requires-Python: >=3.5
+License-File: LICENSE.md
+Requires-Dist: cheroot (>=8.2.1)
+Requires-Dist: portend (>=2.1.1)
+Requires-Dist: more-itertools
+Requires-Dist: zc.lockfile
+Requires-Dist: jaraco.collections
+Requires-Dist: pywin32 (>=227) ; sys_platform == "win32" and implementation_name == "cpython" and python_version < "3.10"
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: docutils ; extra == 'docs'
+Requires-Dist: alabaster ; extra == 'docs'
+Requires-Dist: sphinxcontrib-apidoc (>=0.3.0) ; extra == 'docs'
+Requires-Dist: rst.linker (>=1.11) ; extra == 'docs'
+Requires-Dist: jaraco.packaging (>=3.2) ; extra == 'docs'
+Requires-Dist: setuptools ; extra == 'docs'
+Provides-Extra: json
+Requires-Dist: simplejson ; extra == 'json'
+Provides-Extra: memcached_session
+Requires-Dist: python-memcached (>=1.58) ; extra == 'memcached_session'
+Provides-Extra: routes_dispatcher
+Requires-Dist: routes (>=2.3.1) ; extra == 'routes_dispatcher'
+Provides-Extra: ssl
+Requires-Dist: pyOpenSSL ; extra == 'ssl'
+Provides-Extra: testing
+Requires-Dist: coverage ; extra == 'testing'
+Requires-Dist: codecov ; extra == 'testing'
+Requires-Dist: objgraph ; extra == 'testing'
+Requires-Dist: pytest (>=5.3.5) ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-forked ; extra == 'testing'
+Requires-Dist: pytest-sugar ; extra == 'testing'
+Requires-Dist: path.py ; extra == 'testing'
+Requires-Dist: requests-toolbelt ; extra == 'testing'
+Requires-Dist: pytest-services (>=2) ; extra == 'testing'
+Requires-Dist: setuptools ; extra == 'testing'
+Provides-Extra: xcgi
+Requires-Dist: flup ; extra == 'xcgi'
+
+.. image:: https://img.shields.io/pypi/v/cherrypy.svg
+ :target: https://pypi.org/project/cherrypy
+
+.. image:: https://tidelift.com/badges/package/pypi/CherryPy
+ :target: https://tidelift.com/subscription/pkg/pypi-cherrypy?utm_source=pypi-cherrypy&utm_medium=readme
+ :alt: CherryPy is available as part of the Tidelift Subscription
+
+.. image:: https://img.shields.io/badge/Python%203%20only-pip%20install%20%22%3E%3D18.0.0%22-%234da45e.svg
+ :target: https://python3statement.org/
+
+.. image:: https://img.shields.io/badge/Python%203%20and%202-pip%20install%20%22%3C18.0.0%22-%2349a7e9.svg
+ :target: https://python3statement.org/#sections40-timeline
+
+
+
+.. image:: https://readthedocs.org/projects/cherrypy/badge/?version=latest
+ :target: https://docs.cherrypy.org/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/StackOverflow-CherryPy-blue.svg
+ :target: https://stackoverflow.com/questions/tagged/cheroot+or+cherrypy
+
+.. image:: https://img.shields.io/badge/Mailing%20list-cherrypy--users-orange.svg
+ :target: https://groups.google.com/group/cherrypy-users
+
+.. image:: https://img.shields.io/gitter/room/cherrypy/cherrypy.svg
+ :target: https://gitter.im/cherrypy/cherrypy
+
+.. image:: https://img.shields.io/travis/cherrypy/cherrypy/master.svg?label=Linux%20build%20%40%20Travis%20CI
+ :target: https://travis-ci.org/cherrypy/cherrypy
+
+.. image:: https://circleci.com/gh/cherrypy/cherrypy/tree/master.svg?style=svg
+ :target: https://circleci.com/gh/cherrypy/cherrypy/tree/master
+
+.. image:: https://img.shields.io/appveyor/ci/CherryPy/cherrypy/master.svg?label=Windows%20build%20%40%20Appveyor
+ :target: https://ci.appveyor.com/project/CherryPy/cherrypy/branch/master
+
+.. image:: https://img.shields.io/badge/license-BSD-blue.svg?maxAge=3600
+ :target: https://pypi.org/project/cheroot
+
+.. image:: https://img.shields.io/pypi/pyversions/cherrypy.svg
+ :target: https://pypi.org/project/cherrypy
+
+.. image:: https://badges.github.io/stability-badges/dist/stable.svg
+ :target: https://github.com/badges/stability-badges
+ :alt: stable
+
+.. image:: https://api.codacy.com/project/badge/Grade/48b11060b5d249dc86e52dac2be2c715
+ :target: https://www.codacy.com/app/webknjaz/cherrypy-upstream?utm_source=github.com&utm_medium=referral&utm_content=cherrypy/cherrypy&utm_campaign=Badge_Grade
+
+.. image:: https://codecov.io/gh/cherrypy/cherrypy/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/cherrypy/cherrypy
+ :alt: codecov
+
+Welcome to the GitHub repository of `CherryPy <https://cherrypy.org/>`_!
+
+CherryPy is a pythonic, object-oriented HTTP framework.
+
+1. It allows building web applications in much the same way one would
+ build any other object-oriented program.
+2. This design results in more concise and readable code developed faster.
+ It's all just properties and methods.
+3. It is now more than ten years old and has proven fast and very
+ stable.
+4. It is being used in production by many sites, from the simplest to
+ the most demanding.
+5. And perhaps most importantly, it is fun to work with :-)
+
+Here's how easy it is to write "Hello World" in CherryPy:
+
+.. code:: python
+
+ import cherrypy
+
+ class HelloWorld(object):
+ @cherrypy.expose
+ def index(self):
+ return "Hello World!"
+
+ cherrypy.quickstart(HelloWorld())
+
+And it continues to work that intuitively when systems grow, allowing
+for the Python object model to be dynamically presented as a website
+and/or API.
+
+While CherryPy is one of the easiest and most intuitive frameworks out
+there, the prerequisite for understanding the `CherryPy
+documentation <https://docs.cherrypy.org/en/latest/>`_ is that you have
+a general understanding of Python and web development.
+Additionally:
+
+- Tutorials are included in the repository:
+ https://github.com/cherrypy/cherrypy/tree/master/cherrypy/tutorial
+- A general wiki at:
+ https://github.com/cherrypy/cherrypy/wiki
+
+If the docs are insufficient to address your needs, the CherryPy
+community has several `avenues for support
+<https://docs.cherrypy.org/en/latest/support.html>`_.
+
+For Enterprise
+--------------
+
+CherryPy is available as part of the Tidelift Subscription.
+
+The CherryPy maintainers and the maintainers of thousands of other packages
+are working with Tidelift to deliver one enterprise subscription that covers
+all of the open source you use.
+
+`Learn more <https://tidelift.com/subscription/pkg/pypi-cherrypy?utm_source=pypi-cherrypy&utm_medium=referral&utm_campaign=github>`_.
+
+Contributing
+------------
+
+Please follow the `contribution guidelines
+<https://docs.cherrypy.org/en/latest/contribute.html>`_.
+And by all means, absorb the `Zen of
+CherryPy <https://github.com/cherrypy/cherrypy/wiki/The-Zen-of-CherryPy>`_.
+
+
diff --git a/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/RECORD b/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..9fb4483a7b6557998ec154c316819024c57c65ac
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/RECORD
@@ -0,0 +1,234 @@
+../../../bin/cherryd,sha256=Cwgc8TojxEeeMV87CzAlqoxPYB5rCwe_hyo-FdpiNOk,252
+CherryPy-18.6.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+CherryPy-18.6.1.dist-info/LICENSE.md,sha256=bH-Q1LfG4IGE-IrBJ1C5TWvyVO7jRaGNJheHF75cJ9I,1511
+CherryPy-18.6.1.dist-info/METADATA,sha256=h0lVZbObh6CYPaaqhyKDCpWZrhDXhsagqKgCLjwAzFA,8445
+CherryPy-18.6.1.dist-info/RECORD,,
+CherryPy-18.6.1.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110
+CherryPy-18.6.1.dist-info/entry_points.txt,sha256=oD4d44eul1o3w3KNRliN2Qg0_6QtXZEyxYUIw03BeXw,51
+CherryPy-18.6.1.dist-info/top_level.txt,sha256=mOBE-r7Ej1kFrXNKlOj3yY9QfGA6Xkz6vZK7VNJF3YE,9
+cherrypy/__init__.py,sha256=BHFq6fz2ILLDGq7OCALzmX8MaxPGI6AiR-fYLJo1kIw,11285
+cherrypy/__main__.py,sha256=gb89ZhhQ1kjYvU61axGuZt5twA4q3kfBa3WKrPT6D2k,107
+cherrypy/__pycache__/__init__.cpython-38.pyc,,
+cherrypy/__pycache__/__main__.cpython-38.pyc,,
+cherrypy/__pycache__/_cpchecker.cpython-38.pyc,,
+cherrypy/__pycache__/_cpcompat.cpython-38.pyc,,
+cherrypy/__pycache__/_cpconfig.cpython-38.pyc,,
+cherrypy/__pycache__/_cpdispatch.cpython-38.pyc,,
+cherrypy/__pycache__/_cperror.cpython-38.pyc,,
+cherrypy/__pycache__/_cplogging.cpython-38.pyc,,
+cherrypy/__pycache__/_cpmodpy.cpython-38.pyc,,
+cherrypy/__pycache__/_cpnative_server.cpython-38.pyc,,
+cherrypy/__pycache__/_cpreqbody.cpython-38.pyc,,
+cherrypy/__pycache__/_cprequest.cpython-38.pyc,,
+cherrypy/__pycache__/_cpserver.cpython-38.pyc,,
+cherrypy/__pycache__/_cptools.cpython-38.pyc,,
+cherrypy/__pycache__/_cptree.cpython-38.pyc,,
+cherrypy/__pycache__/_cpwsgi.cpython-38.pyc,,
+cherrypy/__pycache__/_cpwsgi_server.cpython-38.pyc,,
+cherrypy/__pycache__/_helper.cpython-38.pyc,,
+cherrypy/__pycache__/_json.cpython-38.pyc,,
+cherrypy/__pycache__/daemon.cpython-38.pyc,,
+cherrypy/_cpchecker.py,sha256=aAZfQTmW7_f0MfKZmmaHKs_7IjeOi8j-voeDmSbLEA0,14566
+cherrypy/_cpcompat.py,sha256=2gZtFj0h6RRGlkY-R6WoEl-S8BBSIwCeuj7F2Tf3mU0,1992
+cherrypy/_cpconfig.py,sha256=abftUSXmOWoqpak3VHOR50XBBo6wxy8qngz5VmsRXDQ,9647
+cherrypy/_cpdispatch.py,sha256=8GyvAkCOw84CZFbr3VdQmPqyMjlWHXG8SY2bnHvnPjA,25392
+cherrypy/_cperror.py,sha256=tnnFu5d2Xu0JjMgzySlBtdVYcpY95pnwrH0duCKrKSI,23070
+cherrypy/_cplogging.py,sha256=RyNDWhVt2K_QRsv1vOi2gG-yYOgW3UxjOhvQJ1Tv6qM,16402
+cherrypy/_cpmodpy.py,sha256=plLF1jQ_qhsox4Ky8aKTsvSLXsNHs8gxTXIWCNB2PN0,11141
+cherrypy/_cpnative_server.py,sha256=BtmMuMtdBmjb_kavTt6NdoREmqkHRhi1bUAG9EdhQuU,6677
+cherrypy/_cpreqbody.py,sha256=85XtrtUq6wd1XeyRK2i2wiSY3QvJfaPnoLGMhhaks68,36382
+cherrypy/_cprequest.py,sha256=ImUeaDMaqLMWuwoHzhXFayCkXrrtlYVBEj15Q92EVCo,34313
+cherrypy/_cpserver.py,sha256=sai48_tDCkUrhqLKaSZSyk3aSIRxgUiMv6FNUe03PBY,8320
+cherrypy/_cptools.py,sha256=Y35JizxKfVy3JNCbVido0rSXlyIzv4MrYDxdRuYG0bQ,18163
+cherrypy/_cptree.py,sha256=_DLxtDfnPksYk5wlkLzsyCIk5TqrwE4wgkF1yFPdbko,10977
+cherrypy/_cpwsgi.py,sha256=W3u6BKh1P_TZw9d5DzuwZhINXeQcMGvUVshN0iaFEOk,16394
+cherrypy/_cpwsgi_server.py,sha256=ZILHc_ouGC_eARqrVTA1z5pdm9T5QLGURN4Idr0eiJQ,4187
+cherrypy/_helper.py,sha256=bTZvfBpsZ0FDAGqtuuSblaiDloogxCpOyBy-XJF39vE,11653
+cherrypy/_json.py,sha256=zxl6rLuW4xE5aiXJ0UPeI22NhdeLG-gDN_FFOMGikg0,440
+cherrypy/daemon.py,sha256=kgiqlnWFx-PkLEcPPRJmXmB6zbI7X0cN2cCRtosow4c,3950
+cherrypy/favicon.ico,sha256=jrNK5SnKfbnSFgX_xQyLX3khmeImw8IbbHgJVIsGci0,1406
+cherrypy/lib/__init__.py,sha256=5_heysJFsUQMDLVNEFuCUbH03wWbA3lqsErmXLvPfFU,2745
+cherrypy/lib/__pycache__/__init__.cpython-38.pyc,,
+cherrypy/lib/__pycache__/auth_basic.cpython-38.pyc,,
+cherrypy/lib/__pycache__/auth_digest.cpython-38.pyc,,
+cherrypy/lib/__pycache__/caching.cpython-38.pyc,,
+cherrypy/lib/__pycache__/covercp.cpython-38.pyc,,
+cherrypy/lib/__pycache__/cpstats.cpython-38.pyc,,
+cherrypy/lib/__pycache__/cptools.cpython-38.pyc,,
+cherrypy/lib/__pycache__/encoding.cpython-38.pyc,,
+cherrypy/lib/__pycache__/gctools.cpython-38.pyc,,
+cherrypy/lib/__pycache__/httputil.cpython-38.pyc,,
+cherrypy/lib/__pycache__/jsontools.cpython-38.pyc,,
+cherrypy/lib/__pycache__/locking.cpython-38.pyc,,
+cherrypy/lib/__pycache__/profiler.cpython-38.pyc,,
+cherrypy/lib/__pycache__/reprconf.cpython-38.pyc,,
+cherrypy/lib/__pycache__/sessions.cpython-38.pyc,,
+cherrypy/lib/__pycache__/static.cpython-38.pyc,,
+cherrypy/lib/__pycache__/xmlrpcutil.cpython-38.pyc,,
+cherrypy/lib/auth_basic.py,sha256=FkySKMk0WC7OeLIefylkNyGORLpr9Rf98jQuxdBGHsA,4421
+cherrypy/lib/auth_digest.py,sha256=ulK5cf7SNrFJAASg1ORrgi5mHxtJz8qBaPV8IUFDcw0,15342
+cherrypy/lib/caching.py,sha256=rJ_fPGlZ3Dpbg-BEDZQHL-m4_LJHJg69tBrAqgfzOW8,17517
+cherrypy/lib/covercp.py,sha256=ScvlzRydsh0Gn5FwZUKTRdI6ANKlYMDRE4niYRSVtQE,11566
+cherrypy/lib/cpstats.py,sha256=luAeI7Dttzb3HhWUNsZUrNCrFp9LDXiBIOeVEzVmxiU,22854
+cherrypy/lib/cptools.py,sha256=fzdi6K6TzlwLKFFKfQp9OCncwMpH5t9ThaXL2IGd20M,23402
+cherrypy/lib/encoding.py,sha256=pnUVBiRioH9LpLU-K81u47yCviPebPNyX3P_Pzvv6Lw,17047
+cherrypy/lib/gctools.py,sha256=5SI3w507J3JJConDjp8Jmgo6FI9uR59S_mm44cGB_Lw,7346
+cherrypy/lib/httputil.py,sha256=IZRCxq9tOlZluemM5LCwmZe_62t-NAK1Xp0a29jadbU,17255
+cherrypy/lib/jsontools.py,sha256=YQQmGQN4XKnSgB4u0lRl2MB9YkVeNR3-6YWyU76jUFQ,3641
+cherrypy/lib/locking.py,sha256=RLUwVj07-xNnBavmcRkjs-fmg712S7wSk3Gnox9TyEg,1224
+cherrypy/lib/profiler.py,sha256=1apBGddxzooel45y5TwH7VikJrwB8pvPk-6FjIs0R1U,6539
+cherrypy/lib/reprconf.py,sha256=ZRiYeruM2593qrRBPCKbNSUe9QpgCbZvoPMhowqDTFA,12384
+cherrypy/lib/sessions.py,sha256=BBad6BLiFlBMFXI_-dgTvv6f0bbEBIYNEd0DUzHcYuo,30998
+cherrypy/lib/static.py,sha256=Zd-2RVUM3lrgU7Ag-HxAEWlMb22CzZ7fU5xatowfGGo,16592
+cherrypy/lib/xmlrpcutil.py,sha256=UZqJsoBboSHjgsSiOfFcDwlubtSruhYyxpKdxutazQk,1684
+cherrypy/process/__init__.py,sha256=RjaRqUG5U-ZhxAs7GBWN9PFR5hIK-9a9x3ZFwFjyW4Y,547
+cherrypy/process/__pycache__/__init__.cpython-38.pyc,,
+cherrypy/process/__pycache__/plugins.cpython-38.pyc,,
+cherrypy/process/__pycache__/servers.cpython-38.pyc,,
+cherrypy/process/__pycache__/win32.cpython-38.pyc,,
+cherrypy/process/__pycache__/wspbus.cpython-38.pyc,,
+cherrypy/process/plugins.py,sha256=nbwj3LUglw3V69Z9UFihTZrnZGaNuw_NkUbqcI14wKA,26817
+cherrypy/process/servers.py,sha256=KojCzP2AxBV6D3URQX_blU8dfpkIL78OpfdwcQwRtt8,13442
+cherrypy/process/win32.py,sha256=o_aT4XrUL6WcyVPti4O59apFd5ARoJz75VbiktdqqJk,5787
+cherrypy/process/wspbus.py,sha256=wcDsxWtOBXyAAhcl2WZ6_mybs3N79GRtEeRu-zjXQ-8,21460
+cherrypy/scaffold/__init__.py,sha256=sLK5vjy-_f6ikJ7-Lhj7HT8VGAIi6olqPuZMTD-RpAQ,1997
+cherrypy/scaffold/__pycache__/__init__.cpython-38.pyc,,
+cherrypy/scaffold/apache-fcgi.conf,sha256=0M10HHX8i2Or3r-gHoDglSQK4dZHd4Jhx4WhamJtuwc,930
+cherrypy/scaffold/example.conf,sha256=EAqr2Sb1B1osc198dY1FV2A0wgnBmGsJf99_-GGexVU,62
+cherrypy/scaffold/site.conf,sha256=pjUhF-ir1xzSsV7LqXGfyR6Ns_r_n3ATWw8OlfbgT3w,426
+cherrypy/scaffold/static/made_with_cherrypy_small.png,sha256=VlSRvYj-pZzls-peicQhWpbqkdsZHtNhPtSfZV12BFQ,6347
+cherrypy/test/__init__.py,sha256=jWQJbuVAcOuCITN7V8yrRz7U4rcK_ACx35Ephl4xwqY,396
+cherrypy/test/__pycache__/__init__.cpython-38.pyc,,
+cherrypy/test/__pycache__/_test_decorators.cpython-38.pyc,,
+cherrypy/test/__pycache__/_test_states_demo.cpython-38.pyc,,
+cherrypy/test/__pycache__/benchmark.cpython-38.pyc,,
+cherrypy/test/__pycache__/checkerdemo.cpython-38.pyc,,
+cherrypy/test/__pycache__/helper.cpython-38.pyc,,
+cherrypy/test/__pycache__/logtest.cpython-38.pyc,,
+cherrypy/test/__pycache__/modfastcgi.cpython-38.pyc,,
+cherrypy/test/__pycache__/modfcgid.cpython-38.pyc,,
+cherrypy/test/__pycache__/modpy.cpython-38.pyc,,
+cherrypy/test/__pycache__/modwsgi.cpython-38.pyc,,
+cherrypy/test/__pycache__/sessiondemo.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_auth_basic.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_auth_digest.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_bus.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_caching.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_config.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_config_server.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_conn.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_core.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_dynamicobjectmapping.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_encoding.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_etags.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_http.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_httputil.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_iterator.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_json.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_logging.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_mime.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_misc_tools.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_native.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_objectmapping.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_params.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_plugins.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_proxy.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_refleaks.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_request_obj.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_routes.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_session.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_sessionauthenticate.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_states.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_static.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_tools.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_tutorials.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_virtualhost.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_wsgi_ns.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_wsgi_unix_socket.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_wsgi_vhost.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_wsgiapps.cpython-38.pyc,,
+cherrypy/test/__pycache__/test_xmlrpc.cpython-38.pyc,,
+cherrypy/test/__pycache__/webtest.cpython-38.pyc,,
+cherrypy/test/_test_decorators.py,sha256=U5y0mhWwTcCrYLjY3EWSkkz_Vw-z0bPSP5ujwb6b79Y,951
+cherrypy/test/_test_states_demo.py,sha256=lVpbqHgHcIfdz1-i6xDbmzmD87V3JbwD_e4lK0Bagnw,1876
+cherrypy/test/benchmark.py,sha256=GoTeD98I5tFaDDSQTxmy1OrmTM5XS8bybg41i2j3rB4,12563
+cherrypy/test/checkerdemo.py,sha256=H8muNg9uHSAlDVYc-rICkcUKtJqzlTQfjYmMzC1Fuv4,1861
+cherrypy/test/fastcgi.conf,sha256=0YsIPLmOg-NdGGqCCPpBERKGYy_zBU6LkRDyR41nvBE,686
+cherrypy/test/fcgi.conf,sha256=neiD1sjiFblAJLUdlOSKiZ1uVl1eK3zM2_7LZQigkTs,486
+cherrypy/test/helper.py,sha256=aKFNXMLrk14NRPDIaVEXcbGwN6ls3-2d1i26B9WNnIQ,16369
+cherrypy/test/logtest.py,sha256=f22KP-7dVbmr7mEj9JQSNxaMUERYYvKxoeRx6PGCwqY,8151
+cherrypy/test/modfastcgi.py,sha256=1uqYIQo0LqyaifeWf1GhAf8Ht8jdtI8T5gWiI49P1ME,4652
+cherrypy/test/modfcgid.py,sha256=twCrCSavKZqOTqyN9K2q4VtBkEVpKs0dm8-GDlpLmSY,4237
+cherrypy/test/modpy.py,sha256=pbSFkwS-bJZGOoXrs57mg3ndqTy89zR_x9QyGLlsTJQ,4978
+cherrypy/test/modwsgi.py,sha256=kYF4qAhVwq5u2kLS_FYqCvyh0m_wSN_-Z7w03ZmNRIA,4834
+cherrypy/test/sessiondemo.py,sha256=_vVruGha_8f6Fim_n9LgxwNxebbM6JgiqBvyX0WaEQc,5425
+cherrypy/test/static/404.html,sha256=9jnU0KKbdzHVq9EvhkGOEly92qNJbtCSXOsH0KFTL4U,92
+cherrypy/test/static/dirback.jpg,sha256=eS_X3BSeu8OSu-GTYndM1tJkWoW_oVJp1O_mmUUGeo8,16585
+cherrypy/test/static/index.html,sha256=cB6ALrLhcxEGyMNgOHzmnBvmRnPCW_u3ebZUqdCiHkQ,14
+cherrypy/test/style.css,sha256=2Ypw_ziOWlY4dTZJlwsrorDLqLA1z485lgagaGemtKQ,17
+cherrypy/test/test.pem,sha256=x6LrLPw2dBRyZwHXk6FhdSDNM3-Cv7DBXc8o4A19RhI,2254
+cherrypy/test/test_auth_basic.py,sha256=WatD1zSR8nnY85Hdxxs6iUkEyipgqMBt-Lk2_gVh9CQ,4499
+cherrypy/test/test_auth_digest.py,sha256=6VgZBhz5MZcntE-qznmtwa7P6NrTPe9OFQ_fNH9QMOQ,4454
+cherrypy/test/test_bus.py,sha256=K_6pYUz4q6xBOh5tMK67d3n_X1VF1LJhS1mRsGh0JnQ,9960
+cherrypy/test/test_caching.py,sha256=s6aA_P6mcasaEfPwX50c9A-UqwSiYdzMVp5GFQH08uQ,14386
+cherrypy/test/test_config.py,sha256=lUxRUCBmDVBo8LK7yi8w5qvFcS3vw4YpFwl66TdRskQ,8836
+cherrypy/test/test_config_server.py,sha256=D7jLqZawCOh2z6vGU-WjkupiJA0BxPywb8LuILL2JGA,4037
+cherrypy/test/test_conn.py,sha256=g2e2CCaB_2UiFWVLniVZbk2YNrXqN9_J0M1FymMZ_F8,30744
+cherrypy/test/test_core.py,sha256=i1z0YWyMPmeAOkPvZMfPRJm40E6Y5yvnAiOeckx8nH8,30408
+cherrypy/test/test_dynamicobjectmapping.py,sha256=0VpMCJq1APhGjqwYoYvx0irFJ4yb3NWqdE5RtK11uDM,12331
+cherrypy/test/test_encoding.py,sha256=TbJ-PBuhDA7upGa-YwvzSRr9YuUubi-8kucPykR3VS4,17535
+cherrypy/test/test_etags.py,sha256=mzuKNjFXx67gHoqS_jaGEzjxJ025mzlLgepsW2DYETI,3093
+cherrypy/test/test_http.py,sha256=s5we0qreW82SubkSiPPXPgtRtTD86m5amwOGZkjUJ24,10939
+cherrypy/test/test_httputil.py,sha256=gA3u7bt1vV2Av3T1I0CaEGEdYV9kGDEPD80al1IbPE8,2412
+cherrypy/test/test_iterator.py,sha256=hBKYnKCxcr6DnLCRrwMGyR14t551qdYhrlEHKX7gFCQ,5724
+cherrypy/test/test_json.py,sha256=rVfzyCwSMf79bcZ8aYBA_180FJxcHY9jFT5_0M6-pSc,2860
+cherrypy/test/test_logging.py,sha256=hXL0UT8fv2xEwW853xu7OBg9Mm-Q2N_2GEho1w7H9gM,8315
+cherrypy/test/test_mime.py,sha256=-6HpcAIGtN56nWRQclxvSKhNLW5e_bYRO6kDtM3DlN8,4538
+cherrypy/test/test_misc_tools.py,sha256=Ixjq2IAJZ1BTuV-i_AUKw_OsjYru_i9RPm1gIWyWt_E,7094
+cherrypy/test/test_native.py,sha256=rtow-ShYRkd2oEBtDksU6e06_L0BvZToFJFngeeGF34,971
+cherrypy/test/test_objectmapping.py,sha256=KTMqAizhWBGCgDFp1n8msno4ylYPnmvWZ1cWHzzUgO0,14504
+cherrypy/test/test_params.py,sha256=p4DfugiKWxF9nPX5Gs7arGhUmpx_eeZhWyS5yCXCUj4,1862
+cherrypy/test/test_plugins.py,sha256=afO6r6XOLYaWPO-Crbei1W0xe_VDA1ippMkCtuo_ypc,334
+cherrypy/test/test_proxy.py,sha256=XPdi3O_izRLtvu3UTJE-WTBVmn4DEng7irrfUD69srU,5630
+cherrypy/test/test_refleaks.py,sha256=HK55E9JtRFc28FhnnTLV9DrM1k82ZmPEVdHYyp525K0,1555
+cherrypy/test/test_request_obj.py,sha256=E8eitzZjVwmCCzdikaIvqZA8EPnRcZmuWZAchrgteEo,37097
+cherrypy/test/test_routes.py,sha256=m0MvSqurFk42PuSp5vF8ue40-vnhPNwC2EGTqkDozo4,2583
+cherrypy/test/test_session.py,sha256=Fxkm1kuth5tRb6FKiToFLETDmFwnQeVQKmDYLdo6pU0,17949
+cherrypy/test/test_sessionauthenticate.py,sha256=zhVUpN3FWPaZbMKQcTrDQiaI-RXjlwrJi7ssqbzhmU8,2013
+cherrypy/test/test_states.py,sha256=BWK_-08_tuxqJbHxjZK0_F6HOXKp0GwPODgLntCXqHU,16703
+cherrypy/test/test_static.py,sha256=4VQpLumcxWGNFwaLuCgOS11pyla1gpGgQNlKhMOopws,16702
+cherrypy/test/test_tools.py,sha256=cQELdifYvLfwJPg_0JoGQwyFvsgbuFXIJLUd8ejrgZA,17851
+cherrypy/test/test_tutorials.py,sha256=eUYJZvhiRgq28fRteEks72TkfTxuOIxWKfEP3ApcLiE,6964
+cherrypy/test/test_virtualhost.py,sha256=ap_e1gM7PERVN4mU70zc5RD1pVoSdN-te-B_uIAlV8g,4021
+cherrypy/test/test_wsgi_ns.py,sha256=PuoUe2EUwZk5z0yLIw9NdRkd_P4VKU7Cckj8n0QKSJo,2812
+cherrypy/test/test_wsgi_unix_socket.py,sha256=8guEkMHcOBhOGXYBfAThqCzpIBmUTNpqsilOpaRgD2c,2228
+cherrypy/test/test_wsgi_vhost.py,sha256=4uZ8_luFHiQJ6uxQeHFJtjemug8UiPcKmnwwclj0dkw,1034
+cherrypy/test/test_wsgiapps.py,sha256=1SdQGuWVcVCTiSvizDdIOekuvQLCybRXUKF2dpV2NTs,3997
+cherrypy/test/test_xmlrpc.py,sha256=DQfgzjIMcQP_gOi8el1QQ5dkfPaRbyZ6CDdPL9ZIgWo,4584
+cherrypy/test/webtest.py,sha256=uRwMw_why3KeXGZXdHE7-GfJag4ziL9KZmDGx4Q7Jbg,262
+cherrypy/tutorial/README.rst,sha256=v77BbhuiK44TxqeYPk3PwqV09Dg5AKWFdp7re04KdEo,617
+cherrypy/tutorial/__init__.py,sha256=cmLXfvQI0L6trCXwDzR0WE1bu4JZYt301HJRNhjZOBc,85
+cherrypy/tutorial/__pycache__/__init__.cpython-38.pyc,,
+cherrypy/tutorial/__pycache__/tut01_helloworld.cpython-38.pyc,,
+cherrypy/tutorial/__pycache__/tut02_expose_methods.cpython-38.pyc,,
+cherrypy/tutorial/__pycache__/tut03_get_and_post.cpython-38.pyc,,
+cherrypy/tutorial/__pycache__/tut04_complex_site.cpython-38.pyc,,
+cherrypy/tutorial/__pycache__/tut05_derived_objects.cpython-38.pyc,,
+cherrypy/tutorial/__pycache__/tut06_default_method.cpython-38.pyc,,
+cherrypy/tutorial/__pycache__/tut07_sessions.cpython-38.pyc,,
+cherrypy/tutorial/__pycache__/tut08_generators_and_yield.cpython-38.pyc,,
+cherrypy/tutorial/__pycache__/tut09_files.cpython-38.pyc,,
+cherrypy/tutorial/__pycache__/tut10_http_errors.cpython-38.pyc,,
+cherrypy/tutorial/custom_error.html,sha256=9cMEb83zwct9i-fJlyl7yvBSNexF7yEIWOoxH8lpllQ,404
+cherrypy/tutorial/pdf_file.pdf,sha256=_4ED6K9omlqDodRtP1G3oagWwF4vqQbT1yjeA2UASgw,85698
+cherrypy/tutorial/tut01_helloworld.py,sha256=Au6IDz0Kd1XMPzb-vJL-1y_BWz0GgrljOtqOpzDZeOk,1015
+cherrypy/tutorial/tut02_expose_methods.py,sha256=ikz6QOGLknEZm0k-f9BR18deItQ3UO8yYg1NW13kB8g,801
+cherrypy/tutorial/tut03_get_and_post.py,sha256=bY_cTha4zIkokv585yioQkM-S2a7GfetTE3ovl3-7cw,1587
+cherrypy/tutorial/tut04_complex_site.py,sha256=xg2llF64S2jg4geG9IlFL4RkQdsUXZtri0HvNXVy6Ac,2948
+cherrypy/tutorial/tut05_derived_objects.py,sha256=u0LBnUTW8DnexEAtowhJ0sjeAqp6rS065TM2QXfDY1I,2141
+cherrypy/tutorial/tut06_default_method.py,sha256=3Wx34fL_4P3M_dhu6RQdpXQeSriIJpSsy72IqpNQ6ns,2264
+cherrypy/tutorial/tut07_sessions.py,sha256=fQo-v_ol5CjXiq4vdsm7Dh1us6DVDzbqAIouRkmLeR8,1228
+cherrypy/tutorial/tut08_generators_and_yield.py,sha256=m5GfOtNDoGxMd1rw5tCRg3o9cTyt-e1gR-ALoLRLiQw,1288
+cherrypy/tutorial/tut09_files.py,sha256=qcelN09_k62zWVC-daId5I4i-fw6TWA4WddUbG4j8B8,3463
+cherrypy/tutorial/tut10_http_errors.py,sha256=6GllO8SI-6Xs6R8hRwHe7jUzGxYJxLlxwxgaOJC9i8Y,2706
+cherrypy/tutorial/tutorial.conf,sha256=9ENgfRDyopHuignr_aHeMaWoC562xThbmlgF6zg4oEE,96
diff --git a/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/WHEEL b/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..01b8fc7d4a10cb8b4f1d21f11d3398d07d6b3478
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.36.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/entry_points.txt b/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/entry_points.txt
new file mode 100644
index 0000000000000000000000000000000000000000..044979dc161dcfade54c5b8caf76c445fdd3eee5
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/entry_points.txt
@@ -0,0 +1,3 @@
+[console_scripts]
+cherryd = cherrypy.__main__:run
+
diff --git a/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/top_level.txt b/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..d718706924ce240bc067bb80661f7e62f41d58da
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/CherryPy-18.6.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+cherrypy
diff --git a/monEnvTP/lib/python3.8/site-packages/__pycache__/easy_install.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/__pycache__/easy_install.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..77bc32701aced1e5aab1798213f64f6a3bf229d2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/__pycache__/easy_install.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/__pycache__/portend.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/__pycache__/portend.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..483ac0e5d5ef436b7ba833941ae5f63590ebec3a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/__pycache__/portend.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/__pycache__/six.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/__pycache__/six.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..76c94553c31cf95afe07809d923d3b610880d244
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/__pycache__/six.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/__pycache__/zipp.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/__pycache__/zipp.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4c9d13e2b90317250f7d94fc8b34889e34d91dfd
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/__pycache__/zipp.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/_mysql_connector.cpython-38-x86_64-linux-gnu.so b/monEnvTP/lib/python3.8/site-packages/_mysql_connector.cpython-38-x86_64-linux-gnu.so
new file mode 100755
index 0000000000000000000000000000000000000000..13481d700cb3cf24f1e1c3f19bab0016df50d09a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/_mysql_connector.cpython-38-x86_64-linux-gnu.so differ
diff --git a/monEnvTP/lib/python3.8/site-packages/_mysqlxpb.cpython-38-x86_64-linux-gnu.so b/monEnvTP/lib/python3.8/site-packages/_mysqlxpb.cpython-38-x86_64-linux-gnu.so
new file mode 100755
index 0000000000000000000000000000000000000000..0f168080e1018ef0a9da7f8124ecec7f13310f25
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/_mysqlxpb.cpython-38-x86_64-linux-gnu.so differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/INSTALLER b/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/LICENSE.md b/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/LICENSE.md
new file mode 100644
index 0000000000000000000000000000000000000000..9216409088b7f5d8e4400b3fe1b3375043ab5359
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/LICENSE.md
@@ -0,0 +1,30 @@
+Copyright © 2004-2020, CherryPy Team (team@cherrypy.dev)
+
+All rights reserved.
+
+* * *
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+* Neither the name of CherryPy nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/METADATA b/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..982c7ddc2d33280a01d5d39cd78eb9b91ef78d55
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/METADATA
@@ -0,0 +1,150 @@
+Metadata-Version: 2.1
+Name: cheroot
+Version: 8.6.0
+Summary: Highly-optimized, pure-python HTTP server
+Home-page: https://cheroot.cherrypy.dev
+Author: CherryPy Team
+Author-email: team@cherrypy.dev
+License: UNKNOWN
+Project-URL: CI: GitHub, https://github.com/cherrypy/cheroot/actions
+Project-URL: Docs: RTD, https://cheroot.cherrypy.dev
+Project-URL: GitHub: issues, https://github.com/cherrypy/cheroot/issues
+Project-URL: GitHub: repo, https://github.com/cherrypy/cheroot
+Project-URL: Tidelift: funding, https://tidelift.com/subscription/pkg/pypi-cheroot?utm_source=pypi-cheroot&utm_medium=referral&utm_campaign=pypi
+Keywords: http,server,ssl,wsgi
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: Operating System :: OS Independent
+Classifier: Framework :: CherryPy
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: Implementation
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: Jython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Internet :: WWW/HTTP
+Classifier: Topic :: Internet :: WWW/HTTP :: HTTP Servers
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Server
+Classifier: Typing :: Typed
+Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE.md
+Requires-Dist: six (>=1.11.0)
+Requires-Dist: jaraco.functools
+Requires-Dist: backports.functools-lru-cache ; python_version < "3.3"
+Requires-Dist: selectors2 ; python_version < "3.4"
+Requires-Dist: more-itertools (<8.11.0,>=2.6) ; python_version < "3.6"
+Requires-Dist: more-itertools (>=2.6) ; python_version >= "3.6"
+Provides-Extra: docs
+Requires-Dist: sphinx (>=1.8.2) ; extra == 'docs'
+Requires-Dist: jaraco.packaging (>=3.2) ; extra == 'docs'
+Requires-Dist: sphinx-tabs (>=1.1.0) ; extra == 'docs'
+Requires-Dist: furo ; extra == 'docs'
+Requires-Dist: python-dateutil ; extra == 'docs'
+Requires-Dist: sphinxcontrib-apidoc (>=0.3.0) ; extra == 'docs'
+
+.. image:: https://img.shields.io/pypi/v/cheroot.svg
+ :target: https://pypi.org/project/cheroot
+
+.. image:: https://tidelift.com/badges/package/pypi/cheroot
+ :target: https://tidelift.com/subscription/pkg/pypi-cheroot?utm_source=pypi-cheroot&utm_medium=readme
+ :alt: Cheroot is available as part of the Tidelift Subscription
+
+.. image:: https://github.com/cherrypy/cheroot/actions/workflows/ci-cd.yml/badge.svg
+ :target: https://github.com/cherrypy/cheroot/actions/workflows/ci-cd.yml
+ :alt: GitHub Actions CI/CD Workflow
+
+.. image:: https://img.shields.io/badge/license-BSD-blue.svg?maxAge=3600
+ :target: https://pypi.org/project/cheroot
+
+.. image:: https://img.shields.io/pypi/pyversions/cheroot.svg
+ :target: https://pypi.org/project/cheroot
+
+.. image:: https://codecov.io/gh/cherrypy/cheroot/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/cherrypy/cheroot
+ :alt: codecov
+
+.. image:: https://readthedocs.org/projects/cheroot/badge/?version=latest
+ :target: https://cheroot.cherrypy.dev/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/StackOverflow-Cheroot-blue.svg
+ :target: https://stackoverflow.com/questions/tagged/cheroot+or+cherrypy
+
+.. image:: https://img.shields.io/gitter/room/cherrypy/cherrypy.svg
+ :target: https://gitter.im/cherrypy/cherrypy
+
+.. image:: https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square
+ :target: http://makeapullrequest.com/
+
+.. image:: https://app.fossa.io/api/projects/git%2Bgithub.com%2Fcherrypy%2Fcheroot.svg?type=shield
+ :target: https://app.fossa.io/projects/git%2Bgithub.com%2Fcherrypy%2Fcheroot?ref=badge_shield
+ :alt: FOSSA Status
+
+Cheroot is the high-performance, pure-Python HTTP server used by CherryPy.
+
+Status
+======
+
+The test suite currently relies on pytest. It's being run via GitHub
+Actions CI/CD workflows.
+
+For Enterprise
+==============
+
+.. list-table::
+ :widths: 10 100
+
+ * - |tideliftlogo|
+ - Professional support for Cheroot is available as part of the
+ `Tidelift Subscription`_. The CherryPy maintainers and the
+ maintainers of thousands of other packages are working with
+ Tidelift to deliver one enterprise subscription that covers all
+ of the open source you use.
+
+ Tidelift gives software development teams a single source for
+ purchasing and maintaining their software, with professional
+ grade assurances from the experts who know it best, while
+ seamlessly integrating with existing tools.
+
+ `Learn more <Tidelift Subscription_>`_.
+
+.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-cheroot?utm_source=pypi-cheroot&utm_medium=referral&utm_campaign=readme
+
+.. |tideliftlogo| image:: https://cdn2.hubspot.net/hubfs/4008838/website/logos/logos_for_download/Tidelift_primary-shorthand-logo.png
+ :target: https://tidelift.com/subscription/pkg/pypi-cheroot?utm_source=pypi-cheroot&utm_medium=readme
+ :width: 75
+ :alt: Tidelift
+
+Contribute Cheroot
+==================
+**Want to add something to upstream?** Feel free to submit a PR or file an issue
+if unsure. Please follow `CherryPy's common contribution guidelines
+<https://github.com/cherrypy/cherrypy/blob/master/.github/CONTRIBUTING.rst>`_.
+Note that PR is more likely to be accepted if it includes tests and detailed
+description helping maintainers to understand it better 🎉
+
+Oh, and be pythonic, please 🐍
+
+**Don't know how?** Check out `How to Contribute to Open Source
+<https://opensource.guide/how-to-contribute/>`_ article by GitHub 🚀
+
+
+License
+=======
+.. image:: https://app.fossa.io/api/projects/git%2Bgithub.com%2Fcherrypy%2Fcheroot.svg?type=large
+ :target: https://app.fossa.io/projects/git%2Bgithub.com%2Fcherrypy%2Fcheroot?ref=badge_large
+ :alt: FOSSA Status
+
+
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/RECORD b/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..7567577ed3037b4aecce40cb59100021c8e1e8fc
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/RECORD
@@ -0,0 +1,82 @@
+../../../bin/cheroot,sha256=UMHq7TfQ4Ha3XV-WCBXqjgmJtfsJsJj4OLag-Dgztco,248
+cheroot-8.6.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+cheroot-8.6.0.dist-info/LICENSE.md,sha256=4g_utJGn6YCE8VcZNJ6YV6rUHEUDxeR5-IFbBj2_dWQ,1511
+cheroot-8.6.0.dist-info/METADATA,sha256=fRvx2O4OTgRWfn7bP0Bz-thEcRkz7RNkk25lxGOrIMQ,6481
+cheroot-8.6.0.dist-info/RECORD,,
+cheroot-8.6.0.dist-info/WHEEL,sha256=z9j0xAa_JmUKMpmz72K0ZGALSM_n-wQVmGbleXx2VHg,110
+cheroot-8.6.0.dist-info/entry_points.txt,sha256=BapQtPZPUE38-YhhtJUBwlkNRNY_yg0s3_bt5yVqqG8,46
+cheroot-8.6.0.dist-info/top_level.txt,sha256=P8VZfrem5gTRS34X6Thu7jyEoj_zSaPNI_3P0fShbAI,8
+cheroot/__init__.py,sha256=vzAolu3TKXLhB1P55oKbdDV6d67oMqCzCMSwq7XD3lg,334
+cheroot/__init__.pyi,sha256=Y25n44pyE3vp92MiABKrcK3IWRyQ1JG1rZ4Ufqy2nC0,17
+cheroot/__main__.py,sha256=jLOqwD221LYbg1ksPWhjOKYu4M4yV7y3mteM6CJi-Oc,109
+cheroot/__pycache__/__init__.cpython-38.pyc,,
+cheroot/__pycache__/__main__.cpython-38.pyc,,
+cheroot/__pycache__/_compat.cpython-38.pyc,,
+cheroot/__pycache__/cli.cpython-38.pyc,,
+cheroot/__pycache__/connections.cpython-38.pyc,,
+cheroot/__pycache__/errors.cpython-38.pyc,,
+cheroot/__pycache__/makefile.cpython-38.pyc,,
+cheroot/__pycache__/server.cpython-38.pyc,,
+cheroot/__pycache__/testing.cpython-38.pyc,,
+cheroot/__pycache__/wsgi.cpython-38.pyc,,
+cheroot/_compat.py,sha256=NrRs5Z9OYYcGTH3utLOQsdDfmWHTO5UTMuPltB2Ov7M,4436
+cheroot/cli.py,sha256=EtiT5UX3K4VF_0J7WVgfTwj6R6wcV83ah9tgdDAggVw,6994
+cheroot/cli.pyi,sha256=LIKNaRFyZVRRl2n3Jm_VTJjYaNsDSJQ7IcEr5qcjZR0,828
+cheroot/connections.py,sha256=j6bnZWLLErrczSc21d1JDYZtuKc_LCO1h2bdQIiGSvg,14723
+cheroot/connections.pyi,sha256=r-I9Mkn-PHcjqQvekmMmQNbh8a5-sJyfLp6d5sP7T48,714
+cheroot/errors.py,sha256=dvTrgjdKFItfv2RzXwHWgQAri6NVvkUzA2Ftn3HU6uA,2944
+cheroot/errors.pyi,sha256=ha91OvE0Zx2P_af_NBYEWfH-Y2lhSogPUv--EuHucms,413
+cheroot/makefile.py,sha256=kHpPY5SPjYPoHRIb2OVcR84qSfQT9LJcqvGUB9mM2HU,16368
+cheroot/makefile.pyi,sha256=oOxPy_NHJvXLAkvM0pxvS1S0MGFl5bAVhMkTkcPw0Vg,931
+cheroot/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+cheroot/server.py,sha256=n_kxnlDMDNBMoDRLe__z5-2uYw3Xtsy_a_cDyFt4Qro,76777
+cheroot/server.pyi,sha256=fwWhE98nNp1BtWDlkmWO8rUiNO7Eo7Goxh2_yKIHthY,4910
+cheroot/ssl/__init__.py,sha256=oJ642-rs_gn-Iky3yNpZ_jp6JScXZ5h6VGkIQh6Es9c,1539
+cheroot/ssl/__init__.pyi,sha256=diz9LWOaOnci4cHstYnA6_kz_muPetXmlkxOV4qRy34,555
+cheroot/ssl/__pycache__/__init__.cpython-38.pyc,,
+cheroot/ssl/__pycache__/builtin.cpython-38.pyc,,
+cheroot/ssl/__pycache__/pyopenssl.cpython-38.pyc,,
+cheroot/ssl/builtin.py,sha256=wbNh4pmb4B3OmRg1auRkTV03LGH6q839csVhU-eh6yc,18045
+cheroot/ssl/builtin.pyi,sha256=6Vebb7sVgVJ2VBiHgYrWBycCQio51tOpJ1biAjkMfkA,585
+cheroot/ssl/pyopenssl.py,sha256=dt2DnIePqb15FUq5fyiFeUFZn32kCIJ2xHH2U0m1AKg,13339
+cheroot/ssl/pyopenssl.pyi,sha256=yy87Xh_cq-Ibb-POumzI-0HMLFtHke89HGPAgKPSOUU,1018
+cheroot/test/__init__.py,sha256=_hgyWgeLHsLLScMhGwMOsevD3Wg3pssK535YfNJFoeU,26
+cheroot/test/__pycache__/__init__.cpython-38.pyc,,
+cheroot/test/__pycache__/_pytest_plugin.cpython-38.pyc,,
+cheroot/test/__pycache__/conftest.cpython-38.pyc,,
+cheroot/test/__pycache__/helper.cpython-38.pyc,,
+cheroot/test/__pycache__/test__compat.cpython-38.pyc,,
+cheroot/test/__pycache__/test_cli.cpython-38.pyc,,
+cheroot/test/__pycache__/test_conn.cpython-38.pyc,,
+cheroot/test/__pycache__/test_core.cpython-38.pyc,,
+cheroot/test/__pycache__/test_dispatch.cpython-38.pyc,,
+cheroot/test/__pycache__/test_errors.cpython-38.pyc,,
+cheroot/test/__pycache__/test_makefile.cpython-38.pyc,,
+cheroot/test/__pycache__/test_server.cpython-38.pyc,,
+cheroot/test/__pycache__/test_ssl.cpython-38.pyc,,
+cheroot/test/__pycache__/test_wsgi.cpython-38.pyc,,
+cheroot/test/__pycache__/webtest.cpython-38.pyc,,
+cheroot/test/_pytest_plugin.py,sha256=PfDs5_XsVsrUPiMJzahRNNx3rnWKytDp43B5VqpVRmU,2085
+cheroot/test/conftest.py,sha256=VpHQPt4hoW0kQQdttfHyVtg_fZfSoZzXkPBBBQkV7h8,1839
+cheroot/test/helper.py,sha256=np9pohGTTz06MqBU64f-3lBtnFdUnmU1sdH2nongOT4,4896
+cheroot/test/test__compat.py,sha256=mSNh4fw65xYoYIDosIenRmoXuQoRMwPn_xjQLiqp_jE,1818
+cheroot/test/test_cli.py,sha256=iIOuWA8ng966x92UwizzwSzFgZuFt38kn6cYe1tn_q0,3091
+cheroot/test/test_conn.py,sha256=nqiaGB2sh4nWec32UootU85reoGOtTngYMzTBaD3mGE,43739
+cheroot/test/test_core.py,sha256=2O1TdtpeFKavsA-43Y__TPxXA4zimQUjyt6yHJMuGvQ,14652
+cheroot/test/test_dispatch.py,sha256=FyNSuo4GXpUSLDs0bXIskF35qk7wVqXPhgP0c09FCDI,1332
+cheroot/test/test_errors.py,sha256=xknJ_rJsI_L6FuAUoqqHh99ywTkOdILSU9akT7jeDwA,868
+cheroot/test/test_makefile.py,sha256=FPDaw2RdecyoKsa2kDRpk5k2kfWmWfWSegpX9pRoxOA,1214
+cheroot/test/test_server.py,sha256=WHu3pFrBDGWhbWPt1wUgXKt7etCfNthlhYuuJxdwYVQ,12813
+cheroot/test/test_ssl.py,sha256=2J2a2ty1AZksca91G3CBFhAaoKDY3A3vfzsox_nDYhU,23991
+cheroot/test/test_wsgi.py,sha256=92OZaPAkk_6Tpv0gNl_eRInkjH18YMBkQmyBv-QN6Bo,2758
+cheroot/test/webtest.py,sha256=3VUNbDRrB5WwKG2Bgh3DMktmtky19MNSVw30EYJiUd0,19205
+cheroot/testing.py,sha256=cHMJo2jmcbW6BSXTULmcSFH7oiP8RtDUzsjZcQxippc,4172
+cheroot/testing.pyi,sha256=HlbbwoROyn5DSAuCQxcUw58v2cCKu1MG7ySzwbH0ZXk,448
+cheroot/workers/__init__.py,sha256=-ziyw7iPWHs2dN4R_Q7AQZ7r0dQPTes1nVCzAg2LOc8,25
+cheroot/workers/__init__.pyi,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+cheroot/workers/__pycache__/__init__.cpython-38.pyc,,
+cheroot/workers/__pycache__/threadpool.cpython-38.pyc,,
+cheroot/workers/threadpool.py,sha256=hXJBSnDM6SNarn2iWDeL_7y2Du64_ZZDgYMf6KCC30I,10700
+cheroot/workers/threadpool.pyi,sha256=9xF6s4LAwnURJbdG1K8f97zGk2YVb2kEULJMaxsUXiI,925
+cheroot/wsgi.py,sha256=n7cQ_RcuQci3cX-7qlGdpYw906fK5GN4I4JrJO_Swm4,14870
+cheroot/wsgi.pyi,sha256=4PT0w-BFcAxlv_WCgsfdcRxTnmfzXe-ktIWzNbVe0cE,1348
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/WHEEL b/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..0b18a281107a0448a9980396d9d324ea2aa7a7f8
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.1)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/entry_points.txt b/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/entry_points.txt
new file mode 100644
index 0000000000000000000000000000000000000000..087790b4a633cb6b017e31e9278473fbaae061c9
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/entry_points.txt
@@ -0,0 +1,3 @@
+[console_scripts]
+cheroot = cheroot.cli:main
+
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/top_level.txt b/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..a610cf883cada013afe41dd3c1b5dfbb1a7e1d5f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot-8.6.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+cheroot
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/__init__.py b/monEnvTP/lib/python3.8/site-packages/cheroot/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..30d38cab7b6dd7ff259f14e12db84e8df55ccd76
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/__init__.py
@@ -0,0 +1,15 @@
+"""High-performance, pure-Python HTTP server used by CherryPy."""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+try:
+ import pkg_resources
+except ImportError:
+ pass
+
+
+try:
+ __version__ = pkg_resources.get_distribution('cheroot').version
+except Exception:
+ __version__ = 'unknown'
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/__init__.pyi b/monEnvTP/lib/python3.8/site-packages/cheroot/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..bda5b5a7f4cc3cdc8ef37ca67f3a0d26c3c0aad7
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/__init__.pyi
@@ -0,0 +1 @@
+__version__: str
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/__main__.py b/monEnvTP/lib/python3.8/site-packages/cheroot/__main__.py
new file mode 100644
index 0000000000000000000000000000000000000000..d2e27c1083eeee804e8c5e76f1c5f145e7d1864b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/__main__.py
@@ -0,0 +1,6 @@
+"""Stub for accessing the Cheroot CLI tool."""
+
+from .cli import main
+
+if __name__ == '__main__':
+ main()
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8534dfc919adfb67779f98493f0f26e5ea74bfe3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/__main__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/__main__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..917ad55079633526fc391ae7eea97635a6b6ebda
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/__main__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/_compat.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/_compat.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3f64b9bac9bad24c7a41f8bd75b903c3267cbd3f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/_compat.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/cli.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/cli.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..dd54c9fba6c03ae1fb560950ce9d0903e115e290
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/cli.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/connections.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/connections.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..acd2e43f94e3a7fb6bf7cd1e146188f12b54da68
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/connections.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/errors.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/errors.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a1301b5a890d4a8da88a343837bf22b7b88d08b9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/errors.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/makefile.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/makefile.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..06b19d27a0f8f60fd5820baf28cf6369505c031c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/makefile.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/server.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/server.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2849c08016f48754279235ed4482f637d1300f67
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/server.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/testing.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/testing.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3a9e199c78bd49e39332944b633314f809f3b637
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/testing.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/wsgi.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/wsgi.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d64ae169ed2c787931259da08d4302236ea1a9a0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/__pycache__/wsgi.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/_compat.py b/monEnvTP/lib/python3.8/site-packages/cheroot/_compat.py
new file mode 100644
index 0000000000000000000000000000000000000000..10dcdefa396e0effb8af2986ec8a058dbfede30b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/_compat.py
@@ -0,0 +1,148 @@
+# pylint: disable=unused-import
+"""Compatibility code for using Cheroot with various versions of Python."""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import os
+import platform
+import re
+
+import six
+
+try:
+ import selectors # lgtm [py/unused-import]
+except ImportError:
+ import selectors2 as selectors # noqa: F401 # lgtm [py/unused-import]
+
+try:
+ import ssl
+ IS_ABOVE_OPENSSL10 = ssl.OPENSSL_VERSION_INFO >= (1, 1)
+ del ssl
+except ImportError:
+ IS_ABOVE_OPENSSL10 = None
+
+# contextlib.suppress was added in Python 3.4
+try:
+ from contextlib import suppress
+except ImportError:
+ from contextlib import contextmanager
+
+ @contextmanager
+ def suppress(*exceptions):
+ """Return a context manager that suppresses the `exceptions`."""
+ try:
+ yield
+ except exceptions:
+ pass
+
+
+IS_CI = bool(os.getenv('CI'))
+IS_GITHUB_ACTIONS_WORKFLOW = bool(os.getenv('GITHUB_WORKFLOW'))
+
+
+IS_PYPY = platform.python_implementation() == 'PyPy'
+
+
+SYS_PLATFORM = platform.system()
+IS_WINDOWS = SYS_PLATFORM == 'Windows'
+IS_LINUX = SYS_PLATFORM == 'Linux'
+IS_MACOS = SYS_PLATFORM == 'Darwin'
+
+PLATFORM_ARCH = platform.machine()
+IS_PPC = PLATFORM_ARCH.startswith('ppc')
+
+
+if not six.PY2:
+ def ntob(n, encoding='ISO-8859-1'):
+ """Return the native string as bytes in the given encoding."""
+ assert_native(n)
+ # In Python 3, the native string type is unicode
+ return n.encode(encoding)
+
+ def ntou(n, encoding='ISO-8859-1'):
+ """Return the native string as Unicode with the given encoding."""
+ assert_native(n)
+ # In Python 3, the native string type is unicode
+ return n
+
+ def bton(b, encoding='ISO-8859-1'):
+ """Return the byte string as native string in the given encoding."""
+ return b.decode(encoding)
+else:
+ # Python 2
+ def ntob(n, encoding='ISO-8859-1'):
+ """Return the native string as bytes in the given encoding."""
+ assert_native(n)
+ # In Python 2, the native string type is bytes. Assume it's already
+ # in the given encoding, which for ISO-8859-1 is almost always what
+ # was intended.
+ return n
+
+ def ntou(n, encoding='ISO-8859-1'):
+ """Return the native string as Unicode with the given encoding."""
+ assert_native(n)
+ # In Python 2, the native string type is bytes.
+ # First, check for the special encoding 'escape'. The test suite uses
+ # this to signal that it wants to pass a string with embedded \uXXXX
+ # escapes, but without having to prefix it with u'' for Python 2,
+ # but no prefix for Python 3.
+ if encoding == 'escape':
+ return re.sub(
+ r'\\u([0-9a-zA-Z]{4})',
+ lambda m: six.unichr(int(m.group(1), 16)),
+ n.decode('ISO-8859-1'),
+ )
+ # Assume it's already in the given encoding, which for ISO-8859-1
+ # is almost always what was intended.
+ return n.decode(encoding)
+
+ def bton(b, encoding='ISO-8859-1'):
+ """Return the byte string as native string in the given encoding."""
+ return b
+
+
+def assert_native(n):
+ """Check whether the input is of native :py:class:`str` type.
+
+ Raises:
+ TypeError: in case of failed check
+
+ """
+ if not isinstance(n, str):
+ raise TypeError('n must be a native str (got %s)' % type(n).__name__)
+
+
+if not six.PY2:
+ """Python 3 has :py:class:`memoryview` builtin."""
+ # Python 2.7 has it backported, but socket.write() does
+ # str(memoryview(b'0' * 100)) -> <memory at 0x7fb6913a5588>
+ # instead of accessing it correctly.
+ memoryview = memoryview
+else:
+ """Link :py:class:`memoryview` to buffer under Python 2."""
+ memoryview = buffer # noqa: F821
+
+
+def extract_bytes(mv):
+ r"""Retrieve bytes out of the given input buffer.
+
+ :param mv: input :py:func:`buffer`
+ :type mv: memoryview or bytes
+
+ :return: unwrapped bytes
+ :rtype: bytes
+
+ :raises ValueError: if the input is not one of \
+ :py:class:`memoryview`/:py:func:`buffer` \
+ or :py:class:`bytes`
+ """
+ if isinstance(mv, memoryview):
+ return bytes(mv) if six.PY2 else mv.tobytes()
+
+ if isinstance(mv, bytes):
+ return mv
+
+ raise ValueError(
+ 'extract_bytes() only accepts bytes and memoryview/buffer',
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/cli.py b/monEnvTP/lib/python3.8/site-packages/cheroot/cli.py
new file mode 100644
index 0000000000000000000000000000000000000000..4607e22671f8d803291df900facc9e0efa3b8d25
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/cli.py
@@ -0,0 +1,247 @@
+"""Command line tool for starting a Cheroot WSGI/HTTP server instance.
+
+Basic usage:
+
+.. code-block:: shell-session
+
+ $ # Start a server on 127.0.0.1:8000 with the default settings
+ $ # for the WSGI app myapp/wsgi.py:application()
+ $ cheroot myapp.wsgi
+
+ $ # Start a server on 0.0.0.0:9000 with 8 threads
+ $ # for the WSGI app myapp/wsgi.py:main_app()
+ $ cheroot myapp.wsgi:main_app --bind 0.0.0.0:9000 --threads 8
+
+ $ # Start a server for the cheroot.server.Gateway subclass
+ $ # myapp/gateway.py:HTTPGateway
+ $ cheroot myapp.gateway:HTTPGateway
+
+ $ # Start a server on the UNIX socket /var/spool/myapp.sock
+ $ cheroot myapp.wsgi --bind /var/spool/myapp.sock
+
+ $ # Start a server on the abstract UNIX socket CherootServer
+ $ cheroot myapp.wsgi --bind @CherootServer
+
+.. spelling::
+
+ cli
+"""
+
+import argparse
+from importlib import import_module
+import os
+import sys
+
+import six
+
+from . import server
+from . import wsgi
+from ._compat import suppress
+
+
+__metaclass__ = type
+
+
+class BindLocation:
+ """A class for storing the bind location for a Cheroot instance."""
+
+
+class TCPSocket(BindLocation):
+ """TCPSocket."""
+
+ def __init__(self, address, port):
+ """Initialize.
+
+ Args:
+ address (str): Host name or IP address
+ port (int): TCP port number
+
+ """
+ self.bind_addr = address, port
+
+
+class UnixSocket(BindLocation):
+ """UnixSocket."""
+
+ def __init__(self, path):
+ """Initialize."""
+ self.bind_addr = path
+
+
+class AbstractSocket(BindLocation):
+ """AbstractSocket."""
+
+ def __init__(self, abstract_socket):
+ """Initialize."""
+ self.bind_addr = '\x00{sock_path}'.format(sock_path=abstract_socket)
+
+
+class Application:
+ """Application."""
+
+ @classmethod
+ def resolve(cls, full_path):
+ """Read WSGI app/Gateway path string and import application module."""
+ mod_path, _, app_path = full_path.partition(':')
+ app = getattr(import_module(mod_path), app_path or 'application')
+ # suppress the `TypeError` exception, just in case `app` is not a class
+ with suppress(TypeError):
+ if issubclass(app, server.Gateway):
+ return GatewayYo(app)
+
+ return cls(app)
+
+ def __init__(self, wsgi_app):
+ """Initialize."""
+ if not callable(wsgi_app):
+ raise TypeError(
+ 'Application must be a callable object or '
+ 'cheroot.server.Gateway subclass',
+ )
+ self.wsgi_app = wsgi_app
+
+ def server_args(self, parsed_args):
+ """Return keyword args for Server class."""
+ args = {
+ arg: value
+ for arg, value in vars(parsed_args).items()
+ if not arg.startswith('_') and value is not None
+ }
+ args.update(vars(self))
+ return args
+
+ def server(self, parsed_args):
+ """Server."""
+ return wsgi.Server(**self.server_args(parsed_args))
+
+
+class GatewayYo:
+ """Gateway."""
+
+ def __init__(self, gateway):
+ """Init."""
+ self.gateway = gateway
+
+ def server(self, parsed_args):
+ """Server."""
+ server_args = vars(self)
+ server_args['bind_addr'] = parsed_args['bind_addr']
+ if parsed_args.max is not None:
+ server_args['maxthreads'] = parsed_args.max
+ if parsed_args.numthreads is not None:
+ server_args['minthreads'] = parsed_args.numthreads
+ return server.HTTPServer(**server_args)
+
+
+def parse_wsgi_bind_location(bind_addr_string):
+ """Convert bind address string to a BindLocation."""
+ # if the string begins with an @ symbol, use an abstract socket,
+ # this is the first condition to verify, otherwise the urlparse
+ # validation would detect //@<value> as a valid url with a hostname
+ # with value: "<value>" and port: None
+ if bind_addr_string.startswith('@'):
+ return AbstractSocket(bind_addr_string[1:])
+
+ # try and match for an IP/hostname and port
+ match = six.moves.urllib.parse.urlparse(
+ '//{addr}'.format(addr=bind_addr_string),
+ )
+ try:
+ addr = match.hostname
+ port = match.port
+ if addr is not None or port is not None:
+ return TCPSocket(addr, port)
+ except ValueError:
+ pass
+
+ # else, assume a UNIX socket path
+ return UnixSocket(path=bind_addr_string)
+
+
+def parse_wsgi_bind_addr(bind_addr_string):
+ """Convert bind address string to bind address parameter."""
+ return parse_wsgi_bind_location(bind_addr_string).bind_addr
+
+
+_arg_spec = {
+ '_wsgi_app': {
+ 'metavar': 'APP_MODULE',
+ 'type': Application.resolve,
+ 'help': 'WSGI application callable or cheroot.server.Gateway subclass',
+ },
+ '--bind': {
+ 'metavar': 'ADDRESS',
+ 'dest': 'bind_addr',
+ 'type': parse_wsgi_bind_addr,
+ 'default': '[::1]:8000',
+ 'help': 'Network interface to listen on (default: [::1]:8000)',
+ },
+ '--chdir': {
+ 'metavar': 'PATH',
+ 'type': os.chdir,
+ 'help': 'Set the working directory',
+ },
+ '--server-name': {
+ 'dest': 'server_name',
+ 'type': str,
+ 'help': 'Web server name to be advertised via Server HTTP header',
+ },
+ '--threads': {
+ 'metavar': 'INT',
+ 'dest': 'numthreads',
+ 'type': int,
+ 'help': 'Minimum number of worker threads',
+ },
+ '--max-threads': {
+ 'metavar': 'INT',
+ 'dest': 'max',
+ 'type': int,
+ 'help': 'Maximum number of worker threads',
+ },
+ '--timeout': {
+ 'metavar': 'INT',
+ 'dest': 'timeout',
+ 'type': int,
+ 'help': 'Timeout in seconds for accepted connections',
+ },
+ '--shutdown-timeout': {
+ 'metavar': 'INT',
+ 'dest': 'shutdown_timeout',
+ 'type': int,
+ 'help': 'Time in seconds to wait for worker threads to cleanly exit',
+ },
+ '--request-queue-size': {
+ 'metavar': 'INT',
+ 'dest': 'request_queue_size',
+ 'type': int,
+ 'help': 'Maximum number of queued connections',
+ },
+ '--accepted-queue-size': {
+ 'metavar': 'INT',
+ 'dest': 'accepted_queue_size',
+ 'type': int,
+ 'help': 'Maximum number of active requests in queue',
+ },
+ '--accepted-queue-timeout': {
+ 'metavar': 'INT',
+ 'dest': 'accepted_queue_timeout',
+ 'type': int,
+ 'help': 'Timeout in seconds for putting requests into queue',
+ },
+}
+
+
+def main():
+ """Create a new Cheroot instance with arguments from the command line."""
+ parser = argparse.ArgumentParser(
+ description='Start an instance of the Cheroot WSGI/HTTP server.',
+ )
+ for arg, spec in _arg_spec.items():
+ parser.add_argument(arg, **spec)
+ raw_args = parser.parse_args()
+
+ # ensure cwd in sys.path
+ '' in sys.path or sys.path.insert(0, '')
+
+ # create a server based on the arguments provided
+ raw_args._wsgi_app.server(raw_args).safe_start()
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/cli.pyi b/monEnvTP/lib/python3.8/site-packages/cheroot/cli.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..b9803b3e0ab0500cfa817ff1cde195539eda90b2
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/cli.pyi
@@ -0,0 +1,32 @@
+from typing import Any
+
+class BindLocation: ...
+
+class TCPSocket(BindLocation):
+ bind_addr: Any
+ def __init__(self, address, port) -> None: ...
+
+class UnixSocket(BindLocation):
+ bind_addr: Any
+ def __init__(self, path) -> None: ...
+
+class AbstractSocket(BindLocation):
+ bind_addr: Any
+ def __init__(self, abstract_socket) -> None: ...
+
+class Application:
+ @classmethod
+ def resolve(cls, full_path): ...
+ wsgi_app: Any
+ def __init__(self, wsgi_app) -> None: ...
+ def server_args(self, parsed_args): ...
+ def server(self, parsed_args): ...
+
+class GatewayYo:
+ gateway: Any
+ def __init__(self, gateway) -> None: ...
+ def server(self, parsed_args): ...
+
+def parse_wsgi_bind_location(bind_addr_string: str): ...
+def parse_wsgi_bind_addr(bind_addr_string: str): ...
+def main() -> None: ...
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/connections.py b/monEnvTP/lib/python3.8/site-packages/cheroot/connections.py
new file mode 100644
index 0000000000000000000000000000000000000000..181e373105037d386a0a705126c17f891ce93d38
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/connections.py
@@ -0,0 +1,397 @@
+"""Utilities to manage open connections."""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import io
+import os
+import socket
+import threading
+import time
+
+from . import errors
+from ._compat import selectors
+from ._compat import suppress
+from ._compat import IS_WINDOWS
+from .makefile import MakeFile
+
+import six
+
+try:
+ import fcntl
+except ImportError:
+ try:
+ from ctypes import windll, WinError
+ import ctypes.wintypes
+ _SetHandleInformation = windll.kernel32.SetHandleInformation
+ _SetHandleInformation.argtypes = [
+ ctypes.wintypes.HANDLE,
+ ctypes.wintypes.DWORD,
+ ctypes.wintypes.DWORD,
+ ]
+ _SetHandleInformation.restype = ctypes.wintypes.BOOL
+ except ImportError:
+ def prevent_socket_inheritance(sock):
+ """Stub inheritance prevention.
+
+ Dummy function, since neither fcntl nor ctypes are available.
+ """
+ pass
+ else:
+ def prevent_socket_inheritance(sock):
+ """Mark the given socket fd as non-inheritable (Windows)."""
+ if not _SetHandleInformation(sock.fileno(), 1, 0):
+ raise WinError()
+else:
+ def prevent_socket_inheritance(sock):
+ """Mark the given socket fd as non-inheritable (POSIX)."""
+ fd = sock.fileno()
+ old_flags = fcntl.fcntl(fd, fcntl.F_GETFD)
+ fcntl.fcntl(fd, fcntl.F_SETFD, old_flags | fcntl.FD_CLOEXEC)
+
+
+class _ThreadsafeSelector:
+ """Thread-safe wrapper around a DefaultSelector.
+
+ There are 2 thread contexts in which it may be accessed:
+ * the selector thread
+ * one of the worker threads in workers/threadpool.py
+
+ The expected read/write patterns are:
+ * :py:func:`~iter`: selector thread
+ * :py:meth:`register`: selector thread and threadpool,
+ via :py:meth:`~cheroot.workers.threadpool.ThreadPool.put`
+ * :py:meth:`unregister`: selector thread only
+
+ Notably, this means :py:class:`_ThreadsafeSelector` never needs to worry
+ that connections will be removed behind its back.
+
+ The lock is held when iterating or modifying the selector but is not
+ required when :py:meth:`select()ing <selectors.BaseSelector.select>` on it.
+ """
+
+ def __init__(self):
+ self._selector = selectors.DefaultSelector()
+ self._lock = threading.Lock()
+
+ def __len__(self):
+ with self._lock:
+ return len(self._selector.get_map() or {})
+
+ @property
+ def connections(self):
+ """Retrieve connections registered with the selector."""
+ with self._lock:
+ mapping = self._selector.get_map() or {}
+ for _, (_, sock_fd, _, conn) in mapping.items():
+ yield (sock_fd, conn)
+
+ def register(self, fileobj, events, data=None):
+ """Register ``fileobj`` with the selector."""
+ with self._lock:
+ return self._selector.register(fileobj, events, data)
+
+ def unregister(self, fileobj):
+ """Unregister ``fileobj`` from the selector."""
+ with self._lock:
+ return self._selector.unregister(fileobj)
+
+ def select(self, timeout=None):
+ """Return socket fd and data pairs from selectors.select call.
+
+ Returns entries ready to read in the form:
+ (socket_file_descriptor, connection)
+ """
+ return (
+ (key.fd, key.data)
+ for key, _ in self._selector.select(timeout=timeout)
+ )
+
+ def close(self):
+ """Close the selector."""
+ with self._lock:
+ self._selector.close()
+
+
+class ConnectionManager:
+ """Class which manages HTTPConnection objects.
+
+ This is for connections which are being kept-alive for follow-up requests.
+ """
+
+ def __init__(self, server):
+ """Initialize ConnectionManager object.
+
+ Args:
+ server (cheroot.server.HTTPServer): web server object
+ that uses this ConnectionManager instance.
+ """
+ self._serving = False
+ self._stop_requested = False
+
+ self.server = server
+ self._selector = _ThreadsafeSelector()
+
+ self._selector.register(
+ server.socket.fileno(),
+ selectors.EVENT_READ, data=server,
+ )
+
+ def put(self, conn):
+ """Put idle connection into the ConnectionManager to be managed.
+
+ :param conn: HTTP connection to be managed
+ :type conn: cheroot.server.HTTPConnection
+ """
+ conn.last_used = time.time()
+ # if this conn doesn't have any more data waiting to be read,
+ # register it with the selector.
+ if conn.rfile.has_data():
+ self.server.process_conn(conn)
+ else:
+ self._selector.register(
+ conn.socket.fileno(), selectors.EVENT_READ, data=conn,
+ )
+
+ def _expire(self, threshold):
+ r"""Expire least recently used connections.
+
+ :param threshold: Connections that have not been used within this \
+ duration (in seconds), are considered expired and \
+ are closed and removed.
+ :type threshold: float
+
+ This should be called periodically.
+ """
+ # find any connections still registered with the selector
+ # that have not been active recently enough.
+ timed_out_connections = [
+ (sock_fd, conn)
+ for (sock_fd, conn) in self._selector.connections
+ if conn != self.server and conn.last_used < threshold
+ ]
+ for sock_fd, conn in timed_out_connections:
+ self._selector.unregister(sock_fd)
+ conn.close()
+
+ def stop(self):
+ """Stop the selector loop in run() synchronously.
+
+ May take up to half a second.
+ """
+ self._stop_requested = True
+ while self._serving:
+ time.sleep(0.01)
+
+ def run(self, expiration_interval):
+ """Run the connections selector indefinitely.
+
+ Args:
+ expiration_interval (float): Interval, in seconds, at which
+ connections will be checked for expiration.
+
+ Connections that are ready to process are submitted via
+ self.server.process_conn()
+
+ Connections submitted for processing must be `put()`
+ back if they should be examined again for another request.
+
+ Can be shut down by calling `stop()`.
+ """
+ self._serving = True
+ try:
+ self._run(expiration_interval)
+ finally:
+ self._serving = False
+
+ def _run(self, expiration_interval):
+ r"""Run connection handler loop until stop was requested.
+
+ :param expiration_interval: Interval, in seconds, at which \
+ connections will be checked for \
+ expiration.
+ :type expiration_interval: float
+
+ Use ``expiration_interval`` as ``select()`` timeout
+ to assure expired connections are closed in time.
+
+ On Windows cap the timeout to 0.05 seconds
+ as ``select()`` does not return when a socket is ready.
+ """
+ last_expiration_check = time.time()
+ if IS_WINDOWS:
+ # 0.05 seconds are used as an empirically obtained balance between
+ # max connection delay and idle system load. Benchmarks show a
+ # mean processing time per connection of ~0.03 seconds on Linux
+ # and with 0.01 seconds timeout on Windows:
+ # https://github.com/cherrypy/cheroot/pull/352
+ # While this highly depends on system and hardware, 0.05 seconds
+ # max delay should hence usually not significantly increase the
+ # mean time/delay per connection, but significantly reduce idle
+ # system load by reducing socket loops to 1/5 with 0.01 seconds.
+ select_timeout = min(expiration_interval, 0.05)
+ else:
+ select_timeout = expiration_interval
+
+ while not self._stop_requested:
+ try:
+ active_list = self._selector.select(timeout=select_timeout)
+ except OSError:
+ self._remove_invalid_sockets()
+ continue
+
+ for (sock_fd, conn) in active_list:
+ if conn is self.server:
+ # New connection
+ new_conn = self._from_server_socket(self.server.socket)
+ if new_conn is not None:
+ self.server.process_conn(new_conn)
+ else:
+ # unregister connection from the selector until the server
+ # has read from it and returned it via put()
+ self._selector.unregister(sock_fd)
+ self.server.process_conn(conn)
+
+ now = time.time()
+ if (now - last_expiration_check) > expiration_interval:
+ self._expire(threshold=now - self.server.timeout)
+ last_expiration_check = now
+
+ def _remove_invalid_sockets(self):
+ """Clean up the resources of any broken connections.
+
+ This method attempts to detect any connections in an invalid state,
+ unregisters them from the selector and closes the file descriptors of
+ the corresponding network sockets where possible.
+ """
+ invalid_conns = []
+ for sock_fd, conn in self._selector.connections:
+ if conn is self.server:
+ continue
+
+ try:
+ os.fstat(sock_fd)
+ except OSError:
+ invalid_conns.append((sock_fd, conn))
+
+ for sock_fd, conn in invalid_conns:
+ self._selector.unregister(sock_fd)
+ # One of the reason on why a socket could cause an error
+ # is that the socket is already closed, ignore the
+ # socket error if we try to close it at this point.
+ # This is equivalent to OSError in Py3
+ with suppress(socket.error):
+ conn.close()
+
+ def _from_server_socket(self, server_socket): # noqa: C901 # FIXME
+ try:
+ s, addr = server_socket.accept()
+ if self.server.stats['Enabled']:
+ self.server.stats['Accepts'] += 1
+ prevent_socket_inheritance(s)
+ if hasattr(s, 'settimeout'):
+ s.settimeout(self.server.timeout)
+
+ mf = MakeFile
+ ssl_env = {}
+ # if ssl cert and key are set, we try to be a secure HTTP server
+ if self.server.ssl_adapter is not None:
+ try:
+ s, ssl_env = self.server.ssl_adapter.wrap(s)
+ except errors.NoSSLError:
+ msg = (
+ 'The client sent a plain HTTP request, but '
+ 'this server only speaks HTTPS on this port.'
+ )
+ buf = [
+ '%s 400 Bad Request\r\n' % self.server.protocol,
+ 'Content-Length: %s\r\n' % len(msg),
+ 'Content-Type: text/plain\r\n\r\n',
+ msg,
+ ]
+
+ sock_to_make = s if not six.PY2 else s._sock
+ wfile = mf(sock_to_make, 'wb', io.DEFAULT_BUFFER_SIZE)
+ try:
+ wfile.write(''.join(buf).encode('ISO-8859-1'))
+ except socket.error as ex:
+ if ex.args[0] not in errors.socket_errors_to_ignore:
+ raise
+ return
+ if not s:
+ return
+ mf = self.server.ssl_adapter.makefile
+ # Re-apply our timeout since we may have a new socket object
+ if hasattr(s, 'settimeout'):
+ s.settimeout(self.server.timeout)
+
+ conn = self.server.ConnectionClass(self.server, s, mf)
+
+ if not isinstance(
+ self.server.bind_addr,
+ (six.text_type, six.binary_type),
+ ):
+ # optional values
+ # Until we do DNS lookups, omit REMOTE_HOST
+ if addr is None: # sometimes this can happen
+ # figure out if AF_INET or AF_INET6.
+ if len(s.getsockname()) == 2:
+ # AF_INET
+ addr = ('0.0.0.0', 0)
+ else:
+ # AF_INET6
+ addr = ('::', 0)
+ conn.remote_addr = addr[0]
+ conn.remote_port = addr[1]
+
+ conn.ssl_env = ssl_env
+ return conn
+
+ except socket.timeout:
+ # The only reason for the timeout in start() is so we can
+ # notice keyboard interrupts on Win32, which don't interrupt
+ # accept() by default
+ return
+ except socket.error as ex:
+ if self.server.stats['Enabled']:
+ self.server.stats['Socket Errors'] += 1
+ if ex.args[0] in errors.socket_error_eintr:
+ # I *think* this is right. EINTR should occur when a signal
+ # is received during the accept() call; all docs say retry
+ # the call, and I *think* I'm reading it right that Python
+ # will then go ahead and poll for and handle the signal
+ # elsewhere. See
+ # https://github.com/cherrypy/cherrypy/issues/707.
+ return
+ if ex.args[0] in errors.socket_errors_nonblocking:
+ # Just try again. See
+ # https://github.com/cherrypy/cherrypy/issues/479.
+ return
+ if ex.args[0] in errors.socket_errors_to_ignore:
+ # Our socket was closed.
+ # See https://github.com/cherrypy/cherrypy/issues/686.
+ return
+ raise
+
+ def close(self):
+ """Close all monitored connections."""
+ for (_, conn) in self._selector.connections:
+ if conn is not self.server: # server closes its own socket
+ conn.close()
+ self._selector.close()
+
+ @property
+ def _num_connections(self):
+ """Return the current number of connections.
+
+ Includes all connections registered with the selector,
+ minus one for the server socket, which is always registered
+ with the selector.
+ """
+ return len(self._selector) - 1
+
+ @property
+ def can_add_keepalive_connection(self):
+ """Flag whether it is allowed to add a new keep-alive connection."""
+ ka_limit = self.server.keep_alive_conn_limit
+ return ka_limit is None or self._num_connections < ka_limit
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/connections.pyi b/monEnvTP/lib/python3.8/site-packages/cheroot/connections.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..528ad76519c423d6bfd1cd1e19f87ee691ab992e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/connections.pyi
@@ -0,0 +1,23 @@
+from typing import Any
+
+def prevent_socket_inheritance(sock) -> None: ...
+
+class _ThreadsafeSelector:
+ def __init__(self) -> None: ...
+ def __len__(self): ...
+ @property
+ def connections(self) -> None: ...
+ def register(self, fileobj, events, data: Any | None = ...): ...
+ def unregister(self, fileobj): ...
+ def select(self, timeout: Any | None = ...): ...
+ def close(self) -> None: ...
+
+class ConnectionManager:
+ server: Any
+ def __init__(self, server) -> None: ...
+ def put(self, conn) -> None: ...
+ def stop(self) -> None: ...
+ def run(self, expiration_interval) -> None: ...
+ def close(self) -> None: ...
+ @property
+ def can_add_keepalive_connection(self): ...
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/errors.py b/monEnvTP/lib/python3.8/site-packages/cheroot/errors.py
new file mode 100644
index 0000000000000000000000000000000000000000..e00629f86fe5e92b7807356bebaa2927f027ce0b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/errors.py
@@ -0,0 +1,88 @@
+# -*- coding: utf-8 -*-
+"""Collection of exceptions raised and/or processed by Cheroot."""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import errno
+import sys
+
+
+class MaxSizeExceeded(Exception):
+ """Exception raised when a client sends more data then acceptable within limit.
+
+ Depends on ``request.body.maxbytes`` config option if used within CherryPy
+ """
+
+
+class NoSSLError(Exception):
+ """Exception raised when a client speaks HTTP to an HTTPS socket."""
+
+
+class FatalSSLAlert(Exception):
+ """Exception raised when the SSL implementation signals a fatal alert."""
+
+
+def plat_specific_errors(*errnames):
+ """Return error numbers for all errors in ``errnames`` on this platform.
+
+ The :py:mod:`errno` module contains different global constants
+ depending on the specific platform (OS). This function will return
+ the list of numeric values for a given list of potential names.
+ """
+ missing_attr = {None}
+ unique_nums = {getattr(errno, k, None) for k in errnames}
+ return list(unique_nums - missing_attr)
+
+
+socket_error_eintr = plat_specific_errors('EINTR', 'WSAEINTR')
+
+socket_errors_to_ignore = plat_specific_errors(
+ 'EPIPE',
+ 'EBADF', 'WSAEBADF',
+ 'ENOTSOCK', 'WSAENOTSOCK',
+ 'ETIMEDOUT', 'WSAETIMEDOUT',
+ 'ECONNREFUSED', 'WSAECONNREFUSED',
+ 'ECONNRESET', 'WSAECONNRESET',
+ 'ECONNABORTED', 'WSAECONNABORTED',
+ 'ENETRESET', 'WSAENETRESET',
+ 'EHOSTDOWN', 'EHOSTUNREACH',
+)
+socket_errors_to_ignore.append('timed out')
+socket_errors_to_ignore.append('The read operation timed out')
+socket_errors_nonblocking = plat_specific_errors(
+ 'EAGAIN', 'EWOULDBLOCK', 'WSAEWOULDBLOCK',
+)
+
+if sys.platform == 'darwin':
+ socket_errors_to_ignore.extend(plat_specific_errors('EPROTOTYPE'))
+ socket_errors_nonblocking.extend(plat_specific_errors('EPROTOTYPE'))
+
+
+acceptable_sock_shutdown_error_codes = {
+ errno.ENOTCONN,
+ errno.EPIPE, errno.ESHUTDOWN, # corresponds to BrokenPipeError in Python 3
+ errno.ECONNRESET, # corresponds to ConnectionResetError in Python 3
+}
+"""Errors that may happen during the connection close sequence.
+
+* ENOTCONN — client is no longer connected
+* EPIPE — write on a pipe while the other end has been closed
+* ESHUTDOWN — write on a socket which has been shutdown for writing
+* ECONNRESET — connection is reset by the peer, we received a TCP RST packet
+
+Refs:
+* https://github.com/cherrypy/cheroot/issues/341#issuecomment-735884889
+* https://bugs.python.org/issue30319
+* https://bugs.python.org/issue30329
+* https://github.com/python/cpython/commit/83a2c28
+* https://github.com/python/cpython/blob/c39b52f/Lib/poplib.py#L297-L302
+* https://docs.microsoft.com/windows/win32/api/winsock/nf-winsock-shutdown
+"""
+
+try: # py3
+ acceptable_sock_shutdown_exceptions = (
+ BrokenPipeError, ConnectionResetError,
+ )
+except NameError: # py2
+ acceptable_sock_shutdown_exceptions = ()
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/errors.pyi b/monEnvTP/lib/python3.8/site-packages/cheroot/errors.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..e78a758523d71e952cd50ac8c613a73ef87afc00
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/errors.pyi
@@ -0,0 +1,13 @@
+from typing import Any, List, Set, Tuple
+
+class MaxSizeExceeded(Exception): ...
+class NoSSLError(Exception): ...
+class FatalSSLAlert(Exception): ...
+
+def plat_specific_errors(*errnames: str) -> List[int]: ...
+
+socket_error_eintr: List[int]
+socket_errors_to_ignore: List[int]
+socket_errors_nonblocking: List[int]
+acceptable_sock_shutdown_error_codes: Set[int]
+acceptable_sock_shutdown_exceptions: Tuple[Exception]
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/makefile.py b/monEnvTP/lib/python3.8/site-packages/cheroot/makefile.py
new file mode 100644
index 0000000000000000000000000000000000000000..1383c65898695b493f55dde10f8af2383b4cd615
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/makefile.py
@@ -0,0 +1,447 @@
+"""Socket file object."""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import socket
+
+try:
+ # prefer slower Python-based io module
+ import _pyio as io
+except ImportError:
+ # Python 2.6
+ import io
+
+import six
+
+from . import errors
+from ._compat import extract_bytes, memoryview
+
+
+# Write only 16K at a time to sockets
+SOCK_WRITE_BLOCKSIZE = 16384
+
+
+class BufferedWriter(io.BufferedWriter):
+ """Faux file object attached to a socket object."""
+
+ def write(self, b):
+ """Write bytes to buffer."""
+ self._checkClosed()
+ if isinstance(b, str):
+ raise TypeError("can't write str to binary stream")
+
+ with self._write_lock:
+ self._write_buf.extend(b)
+ self._flush_unlocked()
+ return len(b)
+
+ def _flush_unlocked(self):
+ self._checkClosed('flush of closed file')
+ while self._write_buf:
+ try:
+ # ssl sockets only except 'bytes', not bytearrays
+ # so perhaps we should conditionally wrap this for perf?
+ n = self.raw.write(bytes(self._write_buf))
+ except io.BlockingIOError as e:
+ n = e.characters_written
+ del self._write_buf[:n]
+
+
+class MakeFile_PY2(getattr(socket, '_fileobject', object)):
+ """Faux file object attached to a socket object."""
+
+ def __init__(self, *args, **kwargs):
+ """Initialize faux file object."""
+ self.bytes_read = 0
+ self.bytes_written = 0
+ socket._fileobject.__init__(self, *args, **kwargs)
+ self._refcount = 0
+
+ def _reuse(self):
+ self._refcount += 1
+
+ def _drop(self):
+ if self._refcount < 0:
+ self.close()
+ else:
+ self._refcount -= 1
+
+ def write(self, data):
+ """Send entire data contents for non-blocking sockets."""
+ bytes_sent = 0
+ data_mv = memoryview(data)
+ payload_size = len(data_mv)
+ while bytes_sent < payload_size:
+ try:
+ bytes_sent += self.send(
+ data_mv[bytes_sent:bytes_sent + SOCK_WRITE_BLOCKSIZE],
+ )
+ except socket.error as e:
+ if e.args[0] not in errors.socket_errors_nonblocking:
+ raise
+
+ def send(self, data):
+ """Send some part of message to the socket."""
+ bytes_sent = self._sock.send(extract_bytes(data))
+ self.bytes_written += bytes_sent
+ return bytes_sent
+
+ def flush(self):
+ """Write all data from buffer to socket and reset write buffer."""
+ if self._wbuf:
+ buffer = ''.join(self._wbuf)
+ self._wbuf = []
+ self.write(buffer)
+
+ def recv(self, size):
+ """Receive message of a size from the socket."""
+ while True:
+ try:
+ data = self._sock.recv(size)
+ self.bytes_read += len(data)
+ return data
+ except socket.error as e:
+ what = (
+ e.args[0] not in errors.socket_errors_nonblocking
+ and e.args[0] not in errors.socket_error_eintr
+ )
+ if what:
+ raise
+
+ class FauxSocket:
+ """Faux socket with the minimal interface required by pypy."""
+
+ def _reuse(self):
+ pass
+
+ _fileobject_uses_str_type = six.PY2 and isinstance(
+ socket._fileobject(FauxSocket())._rbuf, six.string_types,
+ )
+
+ # FauxSocket is no longer needed
+ del FauxSocket
+
+ if not _fileobject_uses_str_type: # noqa: C901 # FIXME
+ def read(self, size=-1):
+ """Read data from the socket to buffer."""
+ # Use max, disallow tiny reads in a loop as they are very
+ # inefficient.
+ # We never leave read() with any leftover data from a new recv()
+ # call in our internal buffer.
+ rbufsize = max(self._rbufsize, self.default_bufsize)
+ # Our use of StringIO rather than lists of string objects returned
+ # by recv() minimizes memory usage and fragmentation that occurs
+ # when rbufsize is large compared to the typical return value of
+ # recv().
+ buf = self._rbuf
+ buf.seek(0, 2) # seek end
+ if size < 0:
+ # Read until EOF
+ # reset _rbuf. we consume it via buf.
+ self._rbuf = io.BytesIO()
+ while True:
+ data = self.recv(rbufsize)
+ if not data:
+ break
+ buf.write(data)
+ return buf.getvalue()
+ else:
+ # Read until size bytes or EOF seen, whichever comes first
+ buf_len = buf.tell()
+ if buf_len >= size:
+ # Already have size bytes in our buffer? Extract and
+ # return.
+ buf.seek(0)
+ rv = buf.read(size)
+ self._rbuf = io.BytesIO()
+ self._rbuf.write(buf.read())
+ return rv
+
+ # reset _rbuf. we consume it via buf.
+ self._rbuf = io.BytesIO()
+ while True:
+ left = size - buf_len
+ # recv() will malloc the amount of memory given as its
+ # parameter even though it often returns much less data
+ # than that. The returned data string is short lived
+ # as we copy it into a StringIO and free it. This avoids
+ # fragmentation issues on many platforms.
+ data = self.recv(left)
+ if not data:
+ break
+ n = len(data)
+ if n == size and not buf_len:
+ # Shortcut. Avoid buffer data copies when:
+ # - We have no data in our buffer.
+ # AND
+ # - Our call to recv returned exactly the
+ # number of bytes we were asked to read.
+ return data
+ if n == left:
+ buf.write(data)
+ del data # explicit free
+ break
+ assert n <= left, 'recv(%d) returned %d bytes' % (left, n)
+ buf.write(data)
+ buf_len += n
+ del data # explicit free
+ # assert buf_len == buf.tell()
+ return buf.getvalue()
+
+ def readline(self, size=-1):
+ """Read line from the socket to buffer."""
+ buf = self._rbuf
+ buf.seek(0, 2) # seek end
+ if buf.tell() > 0:
+ # check if we already have it in our buffer
+ buf.seek(0)
+ bline = buf.readline(size)
+ if bline.endswith('\n') or len(bline) == size:
+ self._rbuf = io.BytesIO()
+ self._rbuf.write(buf.read())
+ return bline
+ del bline
+ if size < 0:
+ # Read until \n or EOF, whichever comes first
+ if self._rbufsize <= 1:
+ # Speed up unbuffered case
+ buf.seek(0)
+ buffers = [buf.read()]
+ # reset _rbuf. we consume it via buf.
+ self._rbuf = io.BytesIO()
+ data = None
+ recv = self.recv
+ while data != '\n':
+ data = recv(1)
+ if not data:
+ break
+ buffers.append(data)
+ return ''.join(buffers)
+
+ buf.seek(0, 2) # seek end
+ # reset _rbuf. we consume it via buf.
+ self._rbuf = io.BytesIO()
+ while True:
+ data = self.recv(self._rbufsize)
+ if not data:
+ break
+ nl = data.find('\n')
+ if nl >= 0:
+ nl += 1
+ buf.write(data[:nl])
+ self._rbuf.write(data[nl:])
+ del data
+ break
+ buf.write(data)
+ return buf.getvalue()
+
+ else:
+ # Read until size bytes or \n or EOF seen, whichever comes
+ # first
+ buf.seek(0, 2) # seek end
+ buf_len = buf.tell()
+ if buf_len >= size:
+ buf.seek(0)
+ rv = buf.read(size)
+ self._rbuf = io.BytesIO()
+ self._rbuf.write(buf.read())
+ return rv
+ # reset _rbuf. we consume it via buf.
+ self._rbuf = io.BytesIO()
+ while True:
+ data = self.recv(self._rbufsize)
+ if not data:
+ break
+ left = size - buf_len
+ # did we just receive a newline?
+ nl = data.find('\n', 0, left)
+ if nl >= 0:
+ nl += 1
+ # save the excess data to _rbuf
+ self._rbuf.write(data[nl:])
+ if buf_len:
+ buf.write(data[:nl])
+ break
+ else:
+ # Shortcut. Avoid data copy through buf when
+ # returning a substring of our first recv().
+ return data[:nl]
+ n = len(data)
+ if n == size and not buf_len:
+ # Shortcut. Avoid data copy through buf when
+ # returning exactly all of our first recv().
+ return data
+ if n >= left:
+ buf.write(data[:left])
+ self._rbuf.write(data[left:])
+ break
+ buf.write(data)
+ buf_len += n
+ # assert buf_len == buf.tell()
+ return buf.getvalue()
+
+ def has_data(self):
+ """Return true if there is buffered data to read."""
+ return bool(self._rbuf.getvalue())
+
+ else:
+ def read(self, size=-1):
+ """Read data from the socket to buffer."""
+ if size < 0:
+ # Read until EOF
+ buffers = [self._rbuf]
+ self._rbuf = ''
+ if self._rbufsize <= 1:
+ recv_size = self.default_bufsize
+ else:
+ recv_size = self._rbufsize
+
+ while True:
+ data = self.recv(recv_size)
+ if not data:
+ break
+ buffers.append(data)
+ return ''.join(buffers)
+ else:
+ # Read until size bytes or EOF seen, whichever comes first
+ data = self._rbuf
+ buf_len = len(data)
+ if buf_len >= size:
+ self._rbuf = data[size:]
+ return data[:size]
+ buffers = []
+ if data:
+ buffers.append(data)
+ self._rbuf = ''
+ while True:
+ left = size - buf_len
+ recv_size = max(self._rbufsize, left)
+ data = self.recv(recv_size)
+ if not data:
+ break
+ buffers.append(data)
+ n = len(data)
+ if n >= left:
+ self._rbuf = data[left:]
+ buffers[-1] = data[:left]
+ break
+ buf_len += n
+ return ''.join(buffers)
+
+ def readline(self, size=-1):
+ """Read line from the socket to buffer."""
+ data = self._rbuf
+ if size < 0:
+ # Read until \n or EOF, whichever comes first
+ if self._rbufsize <= 1:
+ # Speed up unbuffered case
+ assert data == ''
+ buffers = []
+ while data != '\n':
+ data = self.recv(1)
+ if not data:
+ break
+ buffers.append(data)
+ return ''.join(buffers)
+ nl = data.find('\n')
+ if nl >= 0:
+ nl += 1
+ self._rbuf = data[nl:]
+ return data[:nl]
+ buffers = []
+ if data:
+ buffers.append(data)
+ self._rbuf = ''
+ while True:
+ data = self.recv(self._rbufsize)
+ if not data:
+ break
+ buffers.append(data)
+ nl = data.find('\n')
+ if nl >= 0:
+ nl += 1
+ self._rbuf = data[nl:]
+ buffers[-1] = data[:nl]
+ break
+ return ''.join(buffers)
+ else:
+ # Read until size bytes or \n or EOF seen, whichever comes
+ # first
+ nl = data.find('\n', 0, size)
+ if nl >= 0:
+ nl += 1
+ self._rbuf = data[nl:]
+ return data[:nl]
+ buf_len = len(data)
+ if buf_len >= size:
+ self._rbuf = data[size:]
+ return data[:size]
+ buffers = []
+ if data:
+ buffers.append(data)
+ self._rbuf = ''
+ while True:
+ data = self.recv(self._rbufsize)
+ if not data:
+ break
+ buffers.append(data)
+ left = size - buf_len
+ nl = data.find('\n', 0, left)
+ if nl >= 0:
+ nl += 1
+ self._rbuf = data[nl:]
+ buffers[-1] = data[:nl]
+ break
+ n = len(data)
+ if n >= left:
+ self._rbuf = data[left:]
+ buffers[-1] = data[:left]
+ break
+ buf_len += n
+ return ''.join(buffers)
+
+ def has_data(self):
+ """Return true if there is buffered data to read."""
+ return bool(self._rbuf)
+
+
+if not six.PY2:
+ class StreamReader(io.BufferedReader):
+ """Socket stream reader."""
+
+ def __init__(self, sock, mode='r', bufsize=io.DEFAULT_BUFFER_SIZE):
+ """Initialize socket stream reader."""
+ super().__init__(socket.SocketIO(sock, mode), bufsize)
+ self.bytes_read = 0
+
+ def read(self, *args, **kwargs):
+ """Capture bytes read."""
+ val = super().read(*args, **kwargs)
+ self.bytes_read += len(val)
+ return val
+
+ def has_data(self):
+ """Return true if there is buffered data to read."""
+ return len(self._read_buf) > self._read_pos
+
+ class StreamWriter(BufferedWriter):
+ """Socket stream writer."""
+
+ def __init__(self, sock, mode='w', bufsize=io.DEFAULT_BUFFER_SIZE):
+ """Initialize socket stream writer."""
+ super().__init__(socket.SocketIO(sock, mode), bufsize)
+ self.bytes_written = 0
+
+ def write(self, val, *args, **kwargs):
+ """Capture bytes written."""
+ res = super().write(val, *args, **kwargs)
+ self.bytes_written += len(val)
+ return res
+
+ def MakeFile(sock, mode='r', bufsize=io.DEFAULT_BUFFER_SIZE):
+ """File object attached to a socket object."""
+ cls = StreamReader if 'r' in mode else StreamWriter
+ return cls(sock, mode, bufsize)
+else:
+ StreamReader = StreamWriter = MakeFile = MakeFile_PY2
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/makefile.pyi b/monEnvTP/lib/python3.8/site-packages/cheroot/makefile.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..1174850597686f99a1ea2a51a1a0a976c1da6044
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/makefile.pyi
@@ -0,0 +1,32 @@
+import io
+
+SOCK_WRITE_BLOCKSIZE: int
+
+class BufferedWriter(io.BufferedWriter):
+ def write(self, b): ...
+
+class MakeFile_PY2:
+ bytes_read: int
+ bytes_written: int
+ def __init__(self, *args, **kwargs) -> None: ...
+ def write(self, data) -> None: ...
+ def send(self, data): ...
+ def flush(self) -> None: ...
+ def recv(self, size): ...
+ class FauxSocket: ...
+ def read(self, size: int = ...): ...
+ def readline(self, size: int = ...): ...
+ def has_data(self): ...
+
+class StreamReader(io.BufferedReader):
+ bytes_read: int
+ def __init__(self, sock, mode: str = ..., bufsize=...) -> None: ...
+ def read(self, *args, **kwargs): ...
+ def has_data(self): ...
+
+class StreamWriter(BufferedWriter):
+ bytes_written: int
+ def __init__(self, sock, mode: str = ..., bufsize=...) -> None: ...
+ def write(self, val, *args, **kwargs): ...
+
+def MakeFile(sock, mode: str = ..., bufsize=...): ...
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/py.typed b/monEnvTP/lib/python3.8/site-packages/cheroot/py.typed
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/server.py b/monEnvTP/lib/python3.8/site-packages/cheroot/server.py
new file mode 100644
index 0000000000000000000000000000000000000000..d92988abf5ef461b6affc79aaa85b40ff1ea6178
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/server.py
@@ -0,0 +1,2204 @@
+"""
+A high-speed, production ready, thread pooled, generic HTTP server.
+
+For those of you wanting to understand internals of this module, here's the
+basic call flow. The server's listening thread runs a very tight loop,
+sticking incoming connections onto a Queue::
+
+ server = HTTPServer(...)
+ server.start()
+ -> serve()
+ while ready:
+ _connections.run()
+ while not stop_requested:
+ child = socket.accept() # blocks until a request comes in
+ conn = HTTPConnection(child, ...)
+ server.process_conn(conn) # adds conn to threadpool
+
+Worker threads are kept in a pool and poll the Queue, popping off and then
+handling each connection in turn. Each connection can consist of an arbitrary
+number of requests and their responses, so we run a nested loop::
+
+ while True:
+ conn = server.requests.get()
+ conn.communicate()
+ -> while True:
+ req = HTTPRequest(...)
+ req.parse_request()
+ -> # Read the Request-Line, e.g. "GET /page HTTP/1.1"
+ req.rfile.readline()
+ read_headers(req.rfile, req.inheaders)
+ req.respond()
+ -> response = app(...)
+ try:
+ for chunk in response:
+ if chunk:
+ req.write(chunk)
+ finally:
+ if hasattr(response, "close"):
+ response.close()
+ if req.close_connection:
+ return
+
+For running a server you can invoke :func:`start() <HTTPServer.start()>` (it
+will run the server forever) or use invoking :func:`prepare()
+<HTTPServer.prepare()>` and :func:`serve() <HTTPServer.serve()>` like this::
+
+ server = HTTPServer(...)
+ server.prepare()
+ try:
+ threading.Thread(target=server.serve).start()
+
+ # waiting/detecting some appropriate stop condition here
+ ...
+
+ finally:
+ server.stop()
+
+And now for a trivial doctest to exercise the test suite
+
+.. testsetup::
+
+ from cheroot.server import HTTPServer
+
+>>> 'HTTPServer' in globals()
+True
+"""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import os
+import io
+import re
+import email.utils
+import socket
+import sys
+import time
+import traceback as traceback_
+import logging
+import platform
+import contextlib
+import threading
+
+try:
+ from functools import lru_cache
+except ImportError:
+ from backports.functools_lru_cache import lru_cache
+
+import six
+from six.moves import queue
+from six.moves import urllib
+
+from . import connections, errors, __version__
+from ._compat import bton, ntou
+from ._compat import IS_PPC
+from .workers import threadpool
+from .makefile import MakeFile, StreamWriter
+
+
+__all__ = (
+ 'HTTPRequest', 'HTTPConnection', 'HTTPServer',
+ 'HeaderReader', 'DropUnderscoreHeaderReader',
+ 'SizeCheckWrapper', 'KnownLengthRFile', 'ChunkedRFile',
+ 'Gateway', 'get_ssl_adapter_class',
+)
+
+
+IS_WINDOWS = platform.system() == 'Windows'
+"""Flag indicating whether the app is running under Windows."""
+
+
+IS_GAE = os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine/')
+"""Flag indicating whether the app is running in GAE env.
+
+Ref:
+https://cloud.google.com/appengine/docs/standard/python/tools
+/using-local-server#detecting_application_runtime_environment
+"""
+
+
+IS_UID_GID_RESOLVABLE = not IS_WINDOWS and not IS_GAE
+"""Indicates whether UID/GID resolution's available under current platform."""
+
+
+if IS_UID_GID_RESOLVABLE:
+ try:
+ import grp
+ import pwd
+ except ImportError:
+ """Unavailable in the current env.
+
+ This shouldn't be happening normally.
+ All of the known cases are excluded via the if clause.
+ """
+ IS_UID_GID_RESOLVABLE = False
+ grp, pwd = None, None
+ import struct
+
+
+if IS_WINDOWS and hasattr(socket, 'AF_INET6'):
+ if not hasattr(socket, 'IPPROTO_IPV6'):
+ socket.IPPROTO_IPV6 = 41
+ if not hasattr(socket, 'IPV6_V6ONLY'):
+ socket.IPV6_V6ONLY = 27
+
+
+if not hasattr(socket, 'SO_PEERCRED'):
+ """
+ NOTE: the value for SO_PEERCRED can be architecture specific, in
+ which case the getsockopt() will hopefully fail. The arch
+ specific value could be derived from platform.processor()
+ """
+ socket.SO_PEERCRED = 21 if IS_PPC else 17
+
+
+LF = b'\n'
+CRLF = b'\r\n'
+TAB = b'\t'
+SPACE = b' '
+COLON = b':'
+SEMICOLON = b';'
+EMPTY = b''
+ASTERISK = b'*'
+FORWARD_SLASH = b'/'
+QUOTED_SLASH = b'%2F'
+QUOTED_SLASH_REGEX = re.compile(b''.join((b'(?i)', QUOTED_SLASH)))
+
+
+_STOPPING_FOR_INTERRUPT = object() # sentinel used during shutdown
+
+
+comma_separated_headers = [
+ b'Accept', b'Accept-Charset', b'Accept-Encoding',
+ b'Accept-Language', b'Accept-Ranges', b'Allow', b'Cache-Control',
+ b'Connection', b'Content-Encoding', b'Content-Language', b'Expect',
+ b'If-Match', b'If-None-Match', b'Pragma', b'Proxy-Authenticate', b'TE',
+ b'Trailer', b'Transfer-Encoding', b'Upgrade', b'Vary', b'Via', b'Warning',
+ b'WWW-Authenticate',
+]
+
+
+if not hasattr(logging, 'statistics'):
+ logging.statistics = {}
+
+
+class HeaderReader:
+ """Object for reading headers from an HTTP request.
+
+ Interface and default implementation.
+ """
+
+ def __call__(self, rfile, hdict=None): # noqa: C901 # FIXME
+ """
+ Read headers from the given stream into the given header dict.
+
+ If hdict is None, a new header dict is created. Returns the populated
+ header dict.
+
+ Headers which are repeated are folded together using a comma if their
+ specification so dictates.
+
+ This function raises ValueError when the read bytes violate the HTTP
+ spec.
+ You should probably return "400 Bad Request" if this happens.
+ """
+ if hdict is None:
+ hdict = {}
+
+ while True:
+ line = rfile.readline()
+ if not line:
+ # No more data--illegal end of headers
+ raise ValueError('Illegal end of headers.')
+
+ if line == CRLF:
+ # Normal end of headers
+ break
+ if not line.endswith(CRLF):
+ raise ValueError('HTTP requires CRLF terminators')
+
+ if line[0] in (SPACE, TAB):
+ # It's a continuation line.
+ v = line.strip()
+ else:
+ try:
+ k, v = line.split(COLON, 1)
+ except ValueError:
+ raise ValueError('Illegal header line.')
+ v = v.strip()
+ k = self._transform_key(k)
+ hname = k
+
+ if not self._allow_header(k):
+ continue
+
+ if k in comma_separated_headers:
+ existing = hdict.get(hname)
+ if existing:
+ v = b', '.join((existing, v))
+ hdict[hname] = v
+
+ return hdict
+
+ def _allow_header(self, key_name):
+ return True
+
+ def _transform_key(self, key_name):
+ # TODO: what about TE and WWW-Authenticate?
+ return key_name.strip().title()
+
+
+class DropUnderscoreHeaderReader(HeaderReader):
+ """Custom HeaderReader to exclude any headers with underscores in them."""
+
+ def _allow_header(self, key_name):
+ orig = super(DropUnderscoreHeaderReader, self)._allow_header(key_name)
+ return orig and '_' not in key_name
+
+
+class SizeCheckWrapper:
+ """Wraps a file-like object, raising MaxSizeExceeded if too large.
+
+ :param rfile: ``file`` of a limited size
+ :param int maxlen: maximum length of the file being read
+ """
+
+ def __init__(self, rfile, maxlen):
+ """Initialize SizeCheckWrapper instance."""
+ self.rfile = rfile
+ self.maxlen = maxlen
+ self.bytes_read = 0
+
+ def _check_length(self):
+ if self.maxlen and self.bytes_read > self.maxlen:
+ raise errors.MaxSizeExceeded()
+
+ def read(self, size=None):
+ """Read a chunk from ``rfile`` buffer and return it.
+
+ :param size: amount of data to read
+ :type size: int
+
+ :returns: chunk from ``rfile``, limited by size if specified
+ :rtype: bytes
+ """
+ data = self.rfile.read(size)
+ self.bytes_read += len(data)
+ self._check_length()
+ return data
+
+ def readline(self, size=None):
+ """Read a single line from ``rfile`` buffer and return it.
+
+ :param size: minimum amount of data to read
+ :type size: int
+
+ :returns: one line from ``rfile``
+ :rtype: bytes
+ """
+ if size is not None:
+ data = self.rfile.readline(size)
+ self.bytes_read += len(data)
+ self._check_length()
+ return data
+
+ # User didn't specify a size ...
+ # We read the line in chunks to make sure it's not a 100MB line !
+ res = []
+ while True:
+ data = self.rfile.readline(256)
+ self.bytes_read += len(data)
+ self._check_length()
+ res.append(data)
+ # See https://github.com/cherrypy/cherrypy/issues/421
+ if len(data) < 256 or data[-1:] == LF:
+ return EMPTY.join(res)
+
+ def readlines(self, sizehint=0):
+ """Read all lines from ``rfile`` buffer and return them.
+
+ :param sizehint: hint of minimum amount of data to read
+ :type sizehint: int
+
+ :returns: lines of bytes read from ``rfile``
+ :rtype: list[bytes]
+ """
+ # Shamelessly stolen from StringIO
+ total = 0
+ lines = []
+ line = self.readline(sizehint)
+ while line:
+ lines.append(line)
+ total += len(line)
+ if 0 < sizehint <= total:
+ break
+ line = self.readline(sizehint)
+ return lines
+
+ def close(self):
+ """Release resources allocated for ``rfile``."""
+ self.rfile.close()
+
+ def __iter__(self):
+ """Return file iterator."""
+ return self
+
+ def __next__(self):
+ """Generate next file chunk."""
+ data = next(self.rfile)
+ self.bytes_read += len(data)
+ self._check_length()
+ return data
+
+ next = __next__
+
+
+class KnownLengthRFile:
+ """Wraps a file-like object, returning an empty string when exhausted.
+
+ :param rfile: ``file`` of a known size
+ :param int content_length: length of the file being read
+ """
+
+ def __init__(self, rfile, content_length):
+ """Initialize KnownLengthRFile instance."""
+ self.rfile = rfile
+ self.remaining = content_length
+
+ def read(self, size=None):
+ """Read a chunk from ``rfile`` buffer and return it.
+
+ :param size: amount of data to read
+ :type size: int
+
+ :rtype: bytes
+ :returns: chunk from ``rfile``, limited by size if specified
+ """
+ if self.remaining == 0:
+ return b''
+ if size is None:
+ size = self.remaining
+ else:
+ size = min(size, self.remaining)
+
+ data = self.rfile.read(size)
+ self.remaining -= len(data)
+ return data
+
+ def readline(self, size=None):
+ """Read a single line from ``rfile`` buffer and return it.
+
+ :param size: minimum amount of data to read
+ :type size: int
+
+ :returns: one line from ``rfile``
+ :rtype: bytes
+ """
+ if self.remaining == 0:
+ return b''
+ if size is None:
+ size = self.remaining
+ else:
+ size = min(size, self.remaining)
+
+ data = self.rfile.readline(size)
+ self.remaining -= len(data)
+ return data
+
+ def readlines(self, sizehint=0):
+ """Read all lines from ``rfile`` buffer and return them.
+
+ :param sizehint: hint of minimum amount of data to read
+ :type sizehint: int
+
+ :returns: lines of bytes read from ``rfile``
+ :rtype: list[bytes]
+ """
+ # Shamelessly stolen from StringIO
+ total = 0
+ lines = []
+ line = self.readline(sizehint)
+ while line:
+ lines.append(line)
+ total += len(line)
+ if 0 < sizehint <= total:
+ break
+ line = self.readline(sizehint)
+ return lines
+
+ def close(self):
+ """Release resources allocated for ``rfile``."""
+ self.rfile.close()
+
+ def __iter__(self):
+ """Return file iterator."""
+ return self
+
+ def __next__(self):
+ """Generate next file chunk."""
+ data = next(self.rfile)
+ self.remaining -= len(data)
+ return data
+
+ next = __next__
+
+
+class ChunkedRFile:
+ """Wraps a file-like object, returning an empty string when exhausted.
+
+ This class is intended to provide a conforming wsgi.input value for
+ request entities that have been encoded with the 'chunked' transfer
+ encoding.
+
+ :param rfile: file encoded with the 'chunked' transfer encoding
+ :param int maxlen: maximum length of the file being read
+ :param int bufsize: size of the buffer used to read the file
+ """
+
+ def __init__(self, rfile, maxlen, bufsize=8192):
+ """Initialize ChunkedRFile instance."""
+ self.rfile = rfile
+ self.maxlen = maxlen
+ self.bytes_read = 0
+ self.buffer = EMPTY
+ self.bufsize = bufsize
+ self.closed = False
+
+ def _fetch(self):
+ if self.closed:
+ return
+
+ line = self.rfile.readline()
+ self.bytes_read += len(line)
+
+ if self.maxlen and self.bytes_read > self.maxlen:
+ raise errors.MaxSizeExceeded(
+ 'Request Entity Too Large', self.maxlen,
+ )
+
+ line = line.strip().split(SEMICOLON, 1)
+
+ try:
+ chunk_size = line.pop(0)
+ chunk_size = int(chunk_size, 16)
+ except ValueError:
+ raise ValueError(
+ 'Bad chunked transfer size: {chunk_size!r}'.
+ format(chunk_size=chunk_size),
+ )
+
+ if chunk_size <= 0:
+ self.closed = True
+ return
+
+# if line: chunk_extension = line[0]
+
+ if self.maxlen and self.bytes_read + chunk_size > self.maxlen:
+ raise IOError('Request Entity Too Large')
+
+ chunk = self.rfile.read(chunk_size)
+ self.bytes_read += len(chunk)
+ self.buffer += chunk
+
+ crlf = self.rfile.read(2)
+ if crlf != CRLF:
+ raise ValueError(
+ "Bad chunked transfer coding (expected '\\r\\n', "
+ 'got ' + repr(crlf) + ')',
+ )
+
+ def read(self, size=None):
+ """Read a chunk from ``rfile`` buffer and return it.
+
+ :param size: amount of data to read
+ :type size: int
+
+ :returns: chunk from ``rfile``, limited by size if specified
+ :rtype: bytes
+ """
+ data = EMPTY
+
+ if size == 0:
+ return data
+
+ while True:
+ if size and len(data) >= size:
+ return data
+
+ if not self.buffer:
+ self._fetch()
+ if not self.buffer:
+ # EOF
+ return data
+
+ if size:
+ remaining = size - len(data)
+ data += self.buffer[:remaining]
+ self.buffer = self.buffer[remaining:]
+ else:
+ data += self.buffer
+ self.buffer = EMPTY
+
+ def readline(self, size=None):
+ """Read a single line from ``rfile`` buffer and return it.
+
+ :param size: minimum amount of data to read
+ :type size: int
+
+ :returns: one line from ``rfile``
+ :rtype: bytes
+ """
+ data = EMPTY
+
+ if size == 0:
+ return data
+
+ while True:
+ if size and len(data) >= size:
+ return data
+
+ if not self.buffer:
+ self._fetch()
+ if not self.buffer:
+ # EOF
+ return data
+
+ newline_pos = self.buffer.find(LF)
+ if size:
+ if newline_pos == -1:
+ remaining = size - len(data)
+ data += self.buffer[:remaining]
+ self.buffer = self.buffer[remaining:]
+ else:
+ remaining = min(size - len(data), newline_pos)
+ data += self.buffer[:remaining]
+ self.buffer = self.buffer[remaining:]
+ else:
+ if newline_pos == -1:
+ data += self.buffer
+ self.buffer = EMPTY
+ else:
+ data += self.buffer[:newline_pos]
+ self.buffer = self.buffer[newline_pos:]
+
+ def readlines(self, sizehint=0):
+ """Read all lines from ``rfile`` buffer and return them.
+
+ :param sizehint: hint of minimum amount of data to read
+ :type sizehint: int
+
+ :returns: lines of bytes read from ``rfile``
+ :rtype: list[bytes]
+ """
+ # Shamelessly stolen from StringIO
+ total = 0
+ lines = []
+ line = self.readline(sizehint)
+ while line:
+ lines.append(line)
+ total += len(line)
+ if 0 < sizehint <= total:
+ break
+ line = self.readline(sizehint)
+ return lines
+
+ def read_trailer_lines(self):
+ """Read HTTP headers and yield them.
+
+ Returns:
+ Generator: yields CRLF separated lines.
+
+ """
+ if not self.closed:
+ raise ValueError(
+ 'Cannot read trailers until the request body has been read.',
+ )
+
+ while True:
+ line = self.rfile.readline()
+ if not line:
+ # No more data--illegal end of headers
+ raise ValueError('Illegal end of headers.')
+
+ self.bytes_read += len(line)
+ if self.maxlen and self.bytes_read > self.maxlen:
+ raise IOError('Request Entity Too Large')
+
+ if line == CRLF:
+ # Normal end of headers
+ break
+ if not line.endswith(CRLF):
+ raise ValueError('HTTP requires CRLF terminators')
+
+ yield line
+
+ def close(self):
+ """Release resources allocated for ``rfile``."""
+ self.rfile.close()
+
+
+class HTTPRequest:
+ """An HTTP Request (and response).
+
+ A single HTTP connection may consist of multiple request/response pairs.
+ """
+
+ server = None
+ """The HTTPServer object which is receiving this request."""
+
+ conn = None
+ """The HTTPConnection object on which this request connected."""
+
+ inheaders = {}
+ """A dict of request headers."""
+
+ outheaders = []
+ """A list of header tuples to write in the response."""
+
+ ready = False
+ """When True, the request has been parsed and is ready to begin generating
+ the response. When False, signals the calling Connection that the response
+ should not be generated and the connection should close."""
+
+ close_connection = False
+ """Signals the calling Connection that the request should close. This does
+ not imply an error! The client and/or server may each request that the
+ connection be closed."""
+
+ chunked_write = False
+ """If True, output will be encoded with the "chunked" transfer-coding.
+
+ This value is set automatically inside send_headers."""
+
+ header_reader = HeaderReader()
+ """
+ A HeaderReader instance or compatible reader.
+ """
+
+ def __init__(self, server, conn, proxy_mode=False, strict_mode=True):
+ """Initialize HTTP request container instance.
+
+ Args:
+ server (HTTPServer): web server object receiving this request
+ conn (HTTPConnection): HTTP connection object for this request
+ proxy_mode (bool): whether this HTTPServer should behave as a PROXY
+ server for certain requests
+ strict_mode (bool): whether we should return a 400 Bad Request when
+ we encounter a request that a HTTP compliant client should not be
+ making
+ """
+ self.server = server
+ self.conn = conn
+
+ self.ready = False
+ self.started_request = False
+ self.scheme = b'http'
+ if self.server.ssl_adapter is not None:
+ self.scheme = b'https'
+ # Use the lowest-common protocol in case read_request_line errors.
+ self.response_protocol = 'HTTP/1.0'
+ self.inheaders = {}
+
+ self.status = ''
+ self.outheaders = []
+ self.sent_headers = False
+ self.close_connection = self.__class__.close_connection
+ self.chunked_read = False
+ self.chunked_write = self.__class__.chunked_write
+ self.proxy_mode = proxy_mode
+ self.strict_mode = strict_mode
+
+ def parse_request(self):
+ """Parse the next HTTP request start-line and message-headers."""
+ self.rfile = SizeCheckWrapper(
+ self.conn.rfile,
+ self.server.max_request_header_size,
+ )
+ try:
+ success = self.read_request_line()
+ except errors.MaxSizeExceeded:
+ self.simple_response(
+ '414 Request-URI Too Long',
+ 'The Request-URI sent with the request exceeds the maximum '
+ 'allowed bytes.',
+ )
+ return
+ else:
+ if not success:
+ return
+
+ try:
+ success = self.read_request_headers()
+ except errors.MaxSizeExceeded:
+ self.simple_response(
+ '413 Request Entity Too Large',
+ 'The headers sent with the request exceed the maximum '
+ 'allowed bytes.',
+ )
+ return
+ else:
+ if not success:
+ return
+
+ self.ready = True
+
+ def read_request_line(self): # noqa: C901 # FIXME
+ """Read and parse first line of the HTTP request.
+
+ Returns:
+ bool: True if the request line is valid or False if it's malformed.
+
+ """
+ # HTTP/1.1 connections are persistent by default. If a client
+ # requests a page, then idles (leaves the connection open),
+ # then rfile.readline() will raise socket.error("timed out").
+ # Note that it does this based on the value given to settimeout(),
+ # and doesn't need the client to request or acknowledge the close
+ # (although your TCP stack might suffer for it: cf Apache's history
+ # with FIN_WAIT_2).
+ request_line = self.rfile.readline()
+
+ # Set started_request to True so communicate() knows to send 408
+ # from here on out.
+ self.started_request = True
+ if not request_line:
+ return False
+
+ if request_line == CRLF:
+ # RFC 2616 sec 4.1: "...if the server is reading the protocol
+ # stream at the beginning of a message and receives a CRLF
+ # first, it should ignore the CRLF."
+ # But only ignore one leading line! else we enable a DoS.
+ request_line = self.rfile.readline()
+ if not request_line:
+ return False
+
+ if not request_line.endswith(CRLF):
+ self.simple_response(
+ '400 Bad Request', 'HTTP requires CRLF terminators',
+ )
+ return False
+
+ try:
+ method, uri, req_protocol = request_line.strip().split(SPACE, 2)
+ if not req_protocol.startswith(b'HTTP/'):
+ self.simple_response(
+ '400 Bad Request', 'Malformed Request-Line: bad protocol',
+ )
+ return False
+ rp = req_protocol[5:].split(b'.', 1)
+ if len(rp) != 2:
+ self.simple_response(
+ '400 Bad Request', 'Malformed Request-Line: bad version',
+ )
+ return False
+ rp = tuple(map(int, rp)) # Minor.Major must be threat as integers
+ if rp > (1, 1):
+ self.simple_response(
+ '505 HTTP Version Not Supported', 'Cannot fulfill request',
+ )
+ return False
+ except (ValueError, IndexError):
+ self.simple_response('400 Bad Request', 'Malformed Request-Line')
+ return False
+
+ self.uri = uri
+ self.method = method.upper()
+
+ if self.strict_mode and method != self.method:
+ resp = (
+ 'Malformed method name: According to RFC 2616 '
+ '(section 5.1.1) and its successors '
+ 'RFC 7230 (section 3.1.1) and RFC 7231 (section 4.1) '
+ 'method names are case-sensitive and uppercase.'
+ )
+ self.simple_response('400 Bad Request', resp)
+ return False
+
+ try:
+ if six.PY2: # FIXME: Figure out better way to do this
+ # Ref: https://stackoverflow.com/a/196392/595220 (like this?)
+ """This is a dummy check for unicode in URI."""
+ ntou(bton(uri, 'ascii'), 'ascii')
+ scheme, authority, path, qs, fragment = urllib.parse.urlsplit(uri)
+ except UnicodeError:
+ self.simple_response('400 Bad Request', 'Malformed Request-URI')
+ return False
+
+ uri_is_absolute_form = (scheme or authority)
+
+ if self.method == b'OPTIONS':
+ # TODO: cover this branch with tests
+ path = (
+ uri
+ # https://tools.ietf.org/html/rfc7230#section-5.3.4
+ if (self.proxy_mode and uri_is_absolute_form)
+ else path
+ )
+ elif self.method == b'CONNECT':
+ # TODO: cover this branch with tests
+ if not self.proxy_mode:
+ self.simple_response('405 Method Not Allowed')
+ return False
+
+ # `urlsplit()` above parses "example.com:3128" as path part of URI.
+ # this is a workaround, which makes it detect netloc correctly
+ uri_split = urllib.parse.urlsplit(b''.join((b'//', uri)))
+ _scheme, _authority, _path, _qs, _fragment = uri_split
+ _port = EMPTY
+ try:
+ _port = uri_split.port
+ except ValueError:
+ pass
+
+ # FIXME: use third-party validation to make checks against RFC
+ # the validation doesn't take into account, that urllib parses
+ # invalid URIs without raising errors
+ # https://tools.ietf.org/html/rfc7230#section-5.3.3
+ invalid_path = (
+ _authority != uri
+ or not _port
+ or any((_scheme, _path, _qs, _fragment))
+ )
+ if invalid_path:
+ self.simple_response(
+ '400 Bad Request',
+ 'Invalid path in Request-URI: request-'
+ 'target must match authority-form.',
+ )
+ return False
+
+ authority = path = _authority
+ scheme = qs = fragment = EMPTY
+ else:
+ disallowed_absolute = (
+ self.strict_mode
+ and not self.proxy_mode
+ and uri_is_absolute_form
+ )
+ if disallowed_absolute:
+ # https://tools.ietf.org/html/rfc7230#section-5.3.2
+ # (absolute form)
+ """Absolute URI is only allowed within proxies."""
+ self.simple_response(
+ '400 Bad Request',
+ 'Absolute URI not allowed if server is not a proxy.',
+ )
+ return False
+
+ invalid_path = (
+ self.strict_mode
+ and not uri.startswith(FORWARD_SLASH)
+ and not uri_is_absolute_form
+ )
+ if invalid_path:
+ # https://tools.ietf.org/html/rfc7230#section-5.3.1
+ # (origin_form) and
+ """Path should start with a forward slash."""
+ resp = (
+ 'Invalid path in Request-URI: request-target must contain '
+ 'origin-form which starts with absolute-path (URI '
+ 'starting with a slash "/").'
+ )
+ self.simple_response('400 Bad Request', resp)
+ return False
+
+ if fragment:
+ self.simple_response(
+ '400 Bad Request',
+ 'Illegal #fragment in Request-URI.',
+ )
+ return False
+
+ if path is None:
+ # FIXME: It looks like this case cannot happen
+ self.simple_response(
+ '400 Bad Request',
+ 'Invalid path in Request-URI.',
+ )
+ return False
+
+ # Unquote the path+params (e.g. "/this%20path" -> "/this path").
+ # https://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
+ #
+ # But note that "...a URI must be separated into its components
+ # before the escaped characters within those components can be
+ # safely decoded." https://www.ietf.org/rfc/rfc2396.txt, sec 2.4.2
+ # Therefore, "/this%2Fpath" becomes "/this%2Fpath", not
+ # "/this/path".
+ try:
+ # TODO: Figure out whether exception can really happen here.
+ # It looks like it's caught on urlsplit() call above.
+ atoms = [
+ urllib.parse.unquote_to_bytes(x)
+ for x in QUOTED_SLASH_REGEX.split(path)
+ ]
+ except ValueError as ex:
+ self.simple_response('400 Bad Request', ex.args[0])
+ return False
+ path = QUOTED_SLASH.join(atoms)
+
+ if not path.startswith(FORWARD_SLASH):
+ path = FORWARD_SLASH + path
+
+ if scheme is not EMPTY:
+ self.scheme = scheme
+ self.authority = authority
+ self.path = path
+
+ # Note that, like wsgiref and most other HTTP servers,
+ # we "% HEX HEX"-unquote the path but not the query string.
+ self.qs = qs
+
+ # Compare request and server HTTP protocol versions, in case our
+ # server does not support the requested protocol. Limit our output
+ # to min(req, server). We want the following output:
+ # request server actual written supported response
+ # protocol protocol response protocol feature set
+ # a 1.0 1.0 1.0 1.0
+ # b 1.0 1.1 1.1 1.0
+ # c 1.1 1.0 1.0 1.0
+ # d 1.1 1.1 1.1 1.1
+ # Notice that, in (b), the response will be "HTTP/1.1" even though
+ # the client only understands 1.0. RFC 2616 10.5.6 says we should
+ # only return 505 if the _major_ version is different.
+ sp = int(self.server.protocol[5]), int(self.server.protocol[7])
+
+ if sp[0] != rp[0]:
+ self.simple_response('505 HTTP Version Not Supported')
+ return False
+
+ self.request_protocol = req_protocol
+ self.response_protocol = 'HTTP/%s.%s' % min(rp, sp)
+
+ return True
+
+ def read_request_headers(self): # noqa: C901 # FIXME
+ """Read ``self.rfile`` into ``self.inheaders``.
+
+ Ref: :py:attr:`self.inheaders <HTTPRequest.outheaders>`.
+
+ :returns: success status
+ :rtype: bool
+ """
+ # then all the http headers
+ try:
+ self.header_reader(self.rfile, self.inheaders)
+ except ValueError as ex:
+ self.simple_response('400 Bad Request', ex.args[0])
+ return False
+
+ mrbs = self.server.max_request_body_size
+
+ try:
+ cl = int(self.inheaders.get(b'Content-Length', 0))
+ except ValueError:
+ self.simple_response(
+ '400 Bad Request',
+ 'Malformed Content-Length Header.',
+ )
+ return False
+
+ if mrbs and cl > mrbs:
+ self.simple_response(
+ '413 Request Entity Too Large',
+ 'The entity sent with the request exceeds the maximum '
+ 'allowed bytes.',
+ )
+ return False
+
+ # Persistent connection support
+ if self.response_protocol == 'HTTP/1.1':
+ # Both server and client are HTTP/1.1
+ if self.inheaders.get(b'Connection', b'') == b'close':
+ self.close_connection = True
+ else:
+ # Either the server or client (or both) are HTTP/1.0
+ if self.inheaders.get(b'Connection', b'') != b'Keep-Alive':
+ self.close_connection = True
+
+ # Transfer-Encoding support
+ te = None
+ if self.response_protocol == 'HTTP/1.1':
+ te = self.inheaders.get(b'Transfer-Encoding')
+ if te:
+ te = [x.strip().lower() for x in te.split(b',') if x.strip()]
+
+ self.chunked_read = False
+
+ if te:
+ for enc in te:
+ if enc == b'chunked':
+ self.chunked_read = True
+ else:
+ # Note that, even if we see "chunked", we must reject
+ # if there is an extension we don't recognize.
+ self.simple_response('501 Unimplemented')
+ self.close_connection = True
+ return False
+
+ # From PEP 333:
+ # "Servers and gateways that implement HTTP 1.1 must provide
+ # transparent support for HTTP 1.1's "expect/continue" mechanism.
+ # This may be done in any of several ways:
+ # 1. Respond to requests containing an Expect: 100-continue request
+ # with an immediate "100 Continue" response, and proceed normally.
+ # 2. Proceed with the request normally, but provide the application
+ # with a wsgi.input stream that will send the "100 Continue"
+ # response if/when the application first attempts to read from
+ # the input stream. The read request must then remain blocked
+ # until the client responds.
+ # 3. Wait until the client decides that the server does not support
+ # expect/continue, and sends the request body on its own.
+ # (This is suboptimal, and is not recommended.)
+ #
+ # We used to do 3, but are now doing 1. Maybe we'll do 2 someday,
+ # but it seems like it would be a big slowdown for such a rare case.
+ if self.inheaders.get(b'Expect', b'') == b'100-continue':
+ # Don't use simple_response here, because it emits headers
+ # we don't want. See
+ # https://github.com/cherrypy/cherrypy/issues/951
+ msg = b''.join((
+ self.server.protocol.encode('ascii'), SPACE, b'100 Continue',
+ CRLF, CRLF,
+ ))
+ try:
+ self.conn.wfile.write(msg)
+ except socket.error as ex:
+ if ex.args[0] not in errors.socket_errors_to_ignore:
+ raise
+ return True
+
+ def respond(self):
+ """Call the gateway and write its iterable output."""
+ mrbs = self.server.max_request_body_size
+ if self.chunked_read:
+ self.rfile = ChunkedRFile(self.conn.rfile, mrbs)
+ else:
+ cl = int(self.inheaders.get(b'Content-Length', 0))
+ if mrbs and mrbs < cl:
+ if not self.sent_headers:
+ self.simple_response(
+ '413 Request Entity Too Large',
+ 'The entity sent with the request exceeds the '
+ 'maximum allowed bytes.',
+ )
+ return
+ self.rfile = KnownLengthRFile(self.conn.rfile, cl)
+
+ self.server.gateway(self).respond()
+ self.ready and self.ensure_headers_sent()
+
+ if self.chunked_write:
+ self.conn.wfile.write(b'0\r\n\r\n')
+
+ def simple_response(self, status, msg=''):
+ """Write a simple response back to the client."""
+ status = str(status)
+ proto_status = '%s %s\r\n' % (self.server.protocol, status)
+ content_length = 'Content-Length: %s\r\n' % len(msg)
+ content_type = 'Content-Type: text/plain\r\n'
+ buf = [
+ proto_status.encode('ISO-8859-1'),
+ content_length.encode('ISO-8859-1'),
+ content_type.encode('ISO-8859-1'),
+ ]
+
+ if status[:3] in ('413', '414'):
+ # Request Entity Too Large / Request-URI Too Long
+ self.close_connection = True
+ if self.response_protocol == 'HTTP/1.1':
+ # This will not be true for 414, since read_request_line
+ # usually raises 414 before reading the whole line, and we
+ # therefore cannot know the proper response_protocol.
+ buf.append(b'Connection: close\r\n')
+ else:
+ # HTTP/1.0 had no 413/414 status nor Connection header.
+ # Emit 400 instead and trust the message body is enough.
+ status = '400 Bad Request'
+
+ buf.append(CRLF)
+ if msg:
+ if isinstance(msg, six.text_type):
+ msg = msg.encode('ISO-8859-1')
+ buf.append(msg)
+
+ try:
+ self.conn.wfile.write(EMPTY.join(buf))
+ except socket.error as ex:
+ if ex.args[0] not in errors.socket_errors_to_ignore:
+ raise
+
+ def ensure_headers_sent(self):
+ """Ensure headers are sent to the client if not already sent."""
+ if not self.sent_headers:
+ self.sent_headers = True
+ self.send_headers()
+
+ def write(self, chunk):
+ """Write unbuffered data to the client."""
+ if self.chunked_write and chunk:
+ chunk_size_hex = hex(len(chunk))[2:].encode('ascii')
+ buf = [chunk_size_hex, CRLF, chunk, CRLF]
+ self.conn.wfile.write(EMPTY.join(buf))
+ else:
+ self.conn.wfile.write(chunk)
+
+ def send_headers(self): # noqa: C901 # FIXME
+ """Assert, process, and send the HTTP response message-headers.
+
+ You must set ``self.status``, and :py:attr:`self.outheaders
+ <HTTPRequest.outheaders>` before calling this.
+ """
+ hkeys = [key.lower() for key, value in self.outheaders]
+ status = int(self.status[:3])
+
+ if status == 413:
+ # Request Entity Too Large. Close conn to avoid garbage.
+ self.close_connection = True
+ elif b'content-length' not in hkeys:
+ # "All 1xx (informational), 204 (no content),
+ # and 304 (not modified) responses MUST NOT
+ # include a message-body." So no point chunking.
+ if status < 200 or status in (204, 205, 304):
+ pass
+ else:
+ needs_chunked = (
+ self.response_protocol == 'HTTP/1.1'
+ and self.method != b'HEAD'
+ )
+ if needs_chunked:
+ # Use the chunked transfer-coding
+ self.chunked_write = True
+ self.outheaders.append((b'Transfer-Encoding', b'chunked'))
+ else:
+ # Closing the conn is the only way to determine len.
+ self.close_connection = True
+
+ # Override the decision to not close the connection if the connection
+ # manager doesn't have space for it.
+ if not self.close_connection:
+ can_keep = self.server.can_add_keepalive_connection
+ self.close_connection = not can_keep
+
+ if b'connection' not in hkeys:
+ if self.response_protocol == 'HTTP/1.1':
+ # Both server and client are HTTP/1.1 or better
+ if self.close_connection:
+ self.outheaders.append((b'Connection', b'close'))
+ else:
+ # Server and/or client are HTTP/1.0
+ if not self.close_connection:
+ self.outheaders.append((b'Connection', b'Keep-Alive'))
+
+ if (b'Connection', b'Keep-Alive') in self.outheaders:
+ self.outheaders.append((
+ b'Keep-Alive',
+ u'timeout={connection_timeout}'.
+ format(connection_timeout=self.server.timeout).
+ encode('ISO-8859-1'),
+ ))
+
+ if (not self.close_connection) and (not self.chunked_read):
+ # Read any remaining request body data on the socket.
+ # "If an origin server receives a request that does not include an
+ # Expect request-header field with the "100-continue" expectation,
+ # the request includes a request body, and the server responds
+ # with a final status code before reading the entire request body
+ # from the transport connection, then the server SHOULD NOT close
+ # the transport connection until it has read the entire request,
+ # or until the client closes the connection. Otherwise, the client
+ # might not reliably receive the response message. However, this
+ # requirement is not be construed as preventing a server from
+ # defending itself against denial-of-service attacks, or from
+ # badly broken client implementations."
+ remaining = getattr(self.rfile, 'remaining', 0)
+ if remaining > 0:
+ self.rfile.read(remaining)
+
+ if b'date' not in hkeys:
+ self.outheaders.append((
+ b'Date',
+ email.utils.formatdate(usegmt=True).encode('ISO-8859-1'),
+ ))
+
+ if b'server' not in hkeys:
+ self.outheaders.append((
+ b'Server',
+ self.server.server_name.encode('ISO-8859-1'),
+ ))
+
+ proto = self.server.protocol.encode('ascii')
+ buf = [proto + SPACE + self.status + CRLF]
+ for k, v in self.outheaders:
+ buf.append(k + COLON + SPACE + v + CRLF)
+ buf.append(CRLF)
+ self.conn.wfile.write(EMPTY.join(buf))
+
+
+class HTTPConnection:
+ """An HTTP connection (active socket)."""
+
+ remote_addr = None
+ remote_port = None
+ ssl_env = None
+ rbufsize = io.DEFAULT_BUFFER_SIZE
+ wbufsize = io.DEFAULT_BUFFER_SIZE
+ RequestHandlerClass = HTTPRequest
+ peercreds_enabled = False
+ peercreds_resolve_enabled = False
+
+ # Fields set by ConnectionManager.
+ last_used = None
+
+ def __init__(self, server, sock, makefile=MakeFile):
+ """Initialize HTTPConnection instance.
+
+ Args:
+ server (HTTPServer): web server object receiving this request
+ sock (socket._socketobject): the raw socket object (usually
+ TCP) for this connection
+ makefile (file): a fileobject class for reading from the socket
+ """
+ self.server = server
+ self.socket = sock
+ self.rfile = makefile(sock, 'rb', self.rbufsize)
+ self.wfile = makefile(sock, 'wb', self.wbufsize)
+ self.requests_seen = 0
+
+ self.peercreds_enabled = self.server.peercreds_enabled
+ self.peercreds_resolve_enabled = self.server.peercreds_resolve_enabled
+
+ # LRU cached methods:
+ # Ref: https://stackoverflow.com/a/14946506/595220
+ self.resolve_peer_creds = (
+ lru_cache(maxsize=1)(self.resolve_peer_creds)
+ )
+ self.get_peer_creds = (
+ lru_cache(maxsize=1)(self.get_peer_creds)
+ )
+
+ def communicate(self): # noqa: C901 # FIXME
+ """Read each request and respond appropriately.
+
+ Returns true if the connection should be kept open.
+ """
+ request_seen = False
+ try:
+ req = self.RequestHandlerClass(self.server, self)
+ req.parse_request()
+ if self.server.stats['Enabled']:
+ self.requests_seen += 1
+ if not req.ready:
+ # Something went wrong in the parsing (and the server has
+ # probably already made a simple_response). Return and
+ # let the conn close.
+ return False
+
+ request_seen = True
+ req.respond()
+ if not req.close_connection:
+ return True
+ except socket.error as ex:
+ errnum = ex.args[0]
+ # sadly SSL sockets return a different (longer) time out string
+ timeout_errs = 'timed out', 'The read operation timed out'
+ if errnum in timeout_errs:
+ # Don't error if we're between requests; only error
+ # if 1) no request has been started at all, or 2) we're
+ # in the middle of a request.
+ # See https://github.com/cherrypy/cherrypy/issues/853
+ if (not request_seen) or (req and req.started_request):
+ self._conditional_error(req, '408 Request Timeout')
+ elif errnum not in errors.socket_errors_to_ignore:
+ self.server.error_log(
+ 'socket.error %s' % repr(errnum),
+ level=logging.WARNING, traceback=True,
+ )
+ self._conditional_error(req, '500 Internal Server Error')
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except errors.FatalSSLAlert:
+ pass
+ except errors.NoSSLError:
+ self._handle_no_ssl(req)
+ except Exception as ex:
+ self.server.error_log(
+ repr(ex), level=logging.ERROR, traceback=True,
+ )
+ self._conditional_error(req, '500 Internal Server Error')
+ return False
+
+ linger = False
+
+ def _handle_no_ssl(self, req):
+ if not req or req.sent_headers:
+ return
+ # Unwrap wfile
+ try:
+ resp_sock = self.socket._sock
+ except AttributeError:
+ # self.socket is of OpenSSL.SSL.Connection type
+ resp_sock = self.socket._socket
+ self.wfile = StreamWriter(resp_sock, 'wb', self.wbufsize)
+ msg = (
+ 'The client sent a plain HTTP request, but '
+ 'this server only speaks HTTPS on this port.'
+ )
+ req.simple_response('400 Bad Request', msg)
+ self.linger = True
+
+ def _conditional_error(self, req, response):
+ """Respond with an error.
+
+ Don't bother writing if a response
+ has already started being written.
+ """
+ if not req or req.sent_headers:
+ return
+
+ try:
+ req.simple_response(response)
+ except errors.FatalSSLAlert:
+ pass
+ except errors.NoSSLError:
+ self._handle_no_ssl(req)
+
+ def close(self):
+ """Close the socket underlying this connection."""
+ self.rfile.close()
+
+ if not self.linger:
+ self._close_kernel_socket()
+ # close the socket file descriptor
+ # (will be closed in the OS if there is no
+ # other reference to the underlying socket)
+ self.socket.close()
+ else:
+ # On the other hand, sometimes we want to hang around for a bit
+ # to make sure the client has a chance to read our entire
+ # response. Skipping the close() calls here delays the FIN
+ # packet until the socket object is garbage-collected later.
+ # Someday, perhaps, we'll do the full lingering_close that
+ # Apache does, but not today.
+ pass
+
+ def get_peer_creds(self): # LRU cached on per-instance basis, see __init__
+ """Return the PID/UID/GID tuple of the peer socket for UNIX sockets.
+
+ This function uses SO_PEERCRED to query the UNIX PID, UID, GID
+ of the peer, which is only available if the bind address is
+ a UNIX domain socket.
+
+ Raises:
+ NotImplementedError: in case of unsupported socket type
+ RuntimeError: in case of SO_PEERCRED lookup unsupported or disabled
+
+ """
+ PEERCRED_STRUCT_DEF = '3i'
+
+ if IS_WINDOWS or self.socket.family != socket.AF_UNIX:
+ raise NotImplementedError(
+ 'SO_PEERCRED is only supported in Linux kernel and WSL',
+ )
+ elif not self.peercreds_enabled:
+ raise RuntimeError(
+ 'Peer creds lookup is disabled within this server',
+ )
+
+ try:
+ peer_creds = self.socket.getsockopt(
+ # FIXME: Use LOCAL_CREDS for BSD-like OSs
+ # Ref: https://gist.github.com/LucaFilipozzi/e4f1e118202aff27af6aadebda1b5d91 # noqa
+ socket.SOL_SOCKET, socket.SO_PEERCRED,
+ struct.calcsize(PEERCRED_STRUCT_DEF),
+ )
+ except socket.error as socket_err:
+ """Non-Linux kernels don't support SO_PEERCRED.
+
+ Refs:
+ http://welz.org.za/notes/on-peer-cred.html
+ https://github.com/daveti/tcpSockHack
+ msdn.microsoft.com/en-us/commandline/wsl/release_notes#build-15025
+ """
+ six.raise_from( # 3.6+: raise RuntimeError from socket_err
+ RuntimeError,
+ socket_err,
+ )
+ else:
+ pid, uid, gid = struct.unpack(PEERCRED_STRUCT_DEF, peer_creds)
+ return pid, uid, gid
+
+ @property
+ def peer_pid(self):
+ """Return the id of the connected peer process."""
+ pid, _, _ = self.get_peer_creds()
+ return pid
+
+ @property
+ def peer_uid(self):
+ """Return the user id of the connected peer process."""
+ _, uid, _ = self.get_peer_creds()
+ return uid
+
+ @property
+ def peer_gid(self):
+ """Return the group id of the connected peer process."""
+ _, _, gid = self.get_peer_creds()
+ return gid
+
+ def resolve_peer_creds(self): # LRU cached on per-instance basis
+ """Look up the username and group tuple of the ``PEERCREDS``.
+
+ :returns: the username and group tuple of the ``PEERCREDS``
+
+ :raises NotImplementedError: if the OS is unsupported
+ :raises RuntimeError: if UID/GID lookup is unsupported or disabled
+ """
+ if not IS_UID_GID_RESOLVABLE:
+ raise NotImplementedError(
+ 'UID/GID lookup is unavailable under current platform. '
+ 'It can only be done under UNIX-like OS '
+ 'but not under the Google App Engine',
+ )
+ elif not self.peercreds_resolve_enabled:
+ raise RuntimeError(
+ 'UID/GID lookup is disabled within this server',
+ )
+
+ user = pwd.getpwuid(self.peer_uid).pw_name # [0]
+ group = grp.getgrgid(self.peer_gid).gr_name # [0]
+
+ return user, group
+
+ @property
+ def peer_user(self):
+ """Return the username of the connected peer process."""
+ user, _ = self.resolve_peer_creds()
+ return user
+
+ @property
+ def peer_group(self):
+ """Return the group of the connected peer process."""
+ _, group = self.resolve_peer_creds()
+ return group
+
+ def _close_kernel_socket(self):
+ """Terminate the connection at the transport level."""
+ # Honor ``sock_shutdown`` for PyOpenSSL connections.
+ shutdown = getattr(
+ self.socket, 'sock_shutdown',
+ self.socket.shutdown,
+ )
+
+ try:
+ shutdown(socket.SHUT_RDWR) # actually send a TCP FIN
+ except errors.acceptable_sock_shutdown_exceptions:
+ pass
+ except socket.error as e:
+ if e.errno not in errors.acceptable_sock_shutdown_error_codes:
+ raise
+
+
+class HTTPServer:
+ """An HTTP server."""
+
+ _bind_addr = '127.0.0.1'
+ _interrupt = None
+
+ gateway = None
+ """A Gateway instance."""
+
+ minthreads = None
+ """The minimum number of worker threads to create (default 10)."""
+
+ maxthreads = None
+ """The maximum number of worker threads to create.
+
+ (default -1 = no limit)"""
+
+ server_name = None
+ """The name of the server; defaults to ``self.version``."""
+
+ protocol = 'HTTP/1.1'
+ """The version string to write in the Status-Line of all HTTP responses.
+
+ For example, "HTTP/1.1" is the default. This also limits the supported
+ features used in the response."""
+
+ request_queue_size = 5
+ """The 'backlog' arg to socket.listen(); max queued connections.
+
+ (default 5)."""
+
+ shutdown_timeout = 5
+ """The total time to wait for worker threads to cleanly exit.
+
+ Specified in seconds."""
+
+ timeout = 10
+ """The timeout in seconds for accepted connections (default 10)."""
+
+ expiration_interval = 0.5
+ """The interval, in seconds, at which the server checks for
+ expired connections (default 0.5).
+ """
+
+ version = 'Cheroot/{version!s}'.format(version=__version__)
+ """A version string for the HTTPServer."""
+
+ software = None
+ """The value to set for the SERVER_SOFTWARE entry in the WSGI environ.
+
+ If None, this defaults to ``'%s Server' % self.version``.
+ """
+
+ ready = False
+ """Internal flag which indicating the socket is accepting connections."""
+
+ max_request_header_size = 0
+ """The maximum size, in bytes, for request headers, or 0 for no limit."""
+
+ max_request_body_size = 0
+ """The maximum size, in bytes, for request bodies, or 0 for no limit."""
+
+ nodelay = True
+ """If True (the default since 3.1), sets the TCP_NODELAY socket option."""
+
+ ConnectionClass = HTTPConnection
+ """The class to use for handling HTTP connections."""
+
+ ssl_adapter = None
+ """An instance of ``ssl.Adapter`` (or a subclass).
+
+ Ref: :py:class:`ssl.Adapter <cheroot.ssl.Adapter>`.
+
+ You must have the corresponding TLS driver library installed.
+ """
+
+ peercreds_enabled = False
+ """
+ If :py:data:`True`, peer creds will be looked up via UNIX domain socket.
+ """
+
+ peercreds_resolve_enabled = False
+ """
+ If :py:data:`True`, username/group will be looked up in the OS from
+ ``PEERCREDS``-provided IDs.
+ """
+
+ keep_alive_conn_limit = 10
+ """The maximum number of waiting keep-alive connections that will be kept open.
+
+ Default is 10. Set to None to have unlimited connections."""
+
+ def __init__(
+ self, bind_addr, gateway,
+ minthreads=10, maxthreads=-1, server_name=None,
+ peercreds_enabled=False, peercreds_resolve_enabled=False,
+ ):
+ """Initialize HTTPServer instance.
+
+ Args:
+ bind_addr (tuple): network interface to listen to
+ gateway (Gateway): gateway for processing HTTP requests
+ minthreads (int): minimum number of threads for HTTP thread pool
+ maxthreads (int): maximum number of threads for HTTP thread pool
+ server_name (str): web server name to be advertised via Server
+ HTTP header
+ """
+ self.bind_addr = bind_addr
+ self.gateway = gateway
+
+ self.requests = threadpool.ThreadPool(
+ self, min=minthreads or 1, max=maxthreads,
+ )
+
+ if not server_name:
+ server_name = self.version
+ self.server_name = server_name
+ self.peercreds_enabled = peercreds_enabled
+ self.peercreds_resolve_enabled = (
+ peercreds_resolve_enabled and peercreds_enabled
+ )
+ self.clear_stats()
+
+ def clear_stats(self):
+ """Reset server stat counters.."""
+ self._start_time = None
+ self._run_time = 0
+ self.stats = {
+ 'Enabled': False,
+ 'Bind Address': lambda s: repr(self.bind_addr),
+ 'Run time': lambda s: (not s['Enabled']) and -1 or self.runtime(),
+ 'Accepts': 0,
+ 'Accepts/sec': lambda s: s['Accepts'] / self.runtime(),
+ 'Queue': lambda s: getattr(self.requests, 'qsize', None),
+ 'Threads': lambda s: len(getattr(self.requests, '_threads', [])),
+ 'Threads Idle': lambda s: getattr(self.requests, 'idle', None),
+ 'Socket Errors': 0,
+ 'Requests': lambda s: (not s['Enabled']) and -1 or sum(
+ (w['Requests'](w) for w in s['Worker Threads'].values()), 0,
+ ),
+ 'Bytes Read': lambda s: (not s['Enabled']) and -1 or sum(
+ (w['Bytes Read'](w) for w in s['Worker Threads'].values()), 0,
+ ),
+ 'Bytes Written': lambda s: (not s['Enabled']) and -1 or sum(
+ (w['Bytes Written'](w) for w in s['Worker Threads'].values()),
+ 0,
+ ),
+ 'Work Time': lambda s: (not s['Enabled']) and -1 or sum(
+ (w['Work Time'](w) for w in s['Worker Threads'].values()), 0,
+ ),
+ 'Read Throughput': lambda s: (not s['Enabled']) and -1 or sum(
+ (
+ w['Bytes Read'](w) / (w['Work Time'](w) or 1e-6)
+ for w in s['Worker Threads'].values()
+ ), 0,
+ ),
+ 'Write Throughput': lambda s: (not s['Enabled']) and -1 or sum(
+ (
+ w['Bytes Written'](w) / (w['Work Time'](w) or 1e-6)
+ for w in s['Worker Threads'].values()
+ ), 0,
+ ),
+ 'Worker Threads': {},
+ }
+ logging.statistics['Cheroot HTTPServer %d' % id(self)] = self.stats
+
+ def runtime(self):
+ """Return server uptime."""
+ if self._start_time is None:
+ return self._run_time
+ else:
+ return self._run_time + (time.time() - self._start_time)
+
+ def __str__(self):
+ """Render Server instance representing bind address."""
+ return '%s.%s(%r)' % (
+ self.__module__, self.__class__.__name__,
+ self.bind_addr,
+ )
+
+ @property
+ def bind_addr(self):
+ """Return the interface on which to listen for connections.
+
+ For TCP sockets, a (host, port) tuple. Host values may be any
+ :term:`IPv4` or :term:`IPv6` address, or any valid hostname.
+ The string 'localhost' is a synonym for '127.0.0.1' (or '::1',
+ if your hosts file prefers :term:`IPv6`).
+ The string '0.0.0.0' is a special :term:`IPv4` entry meaning
+ "any active interface" (INADDR_ANY), and '::' is the similar
+ IN6ADDR_ANY for :term:`IPv6`.
+ The empty string or :py:data:`None` are not allowed.
+
+ For UNIX sockets, supply the file name as a string.
+
+ Systemd socket activation is automatic and doesn't require tempering
+ with this variable.
+
+ .. glossary::
+
+ :abbr:`IPv4 (Internet Protocol version 4)`
+ Internet Protocol version 4
+
+ :abbr:`IPv6 (Internet Protocol version 6)`
+ Internet Protocol version 6
+ """
+ return self._bind_addr
+
+ @bind_addr.setter
+ def bind_addr(self, value):
+ """Set the interface on which to listen for connections."""
+ if isinstance(value, tuple) and value[0] in ('', None):
+ # Despite the socket module docs, using '' does not
+ # allow AI_PASSIVE to work. Passing None instead
+ # returns '0.0.0.0' like we want. In other words:
+ # host AI_PASSIVE result
+ # '' Y 192.168.x.y
+ # '' N 192.168.x.y
+ # None Y 0.0.0.0
+ # None N 127.0.0.1
+ # But since you can get the same effect with an explicit
+ # '0.0.0.0', we deny both the empty string and None as values.
+ raise ValueError(
+ "Host values of '' or None are not allowed. "
+ "Use '0.0.0.0' (IPv4) or '::' (IPv6) instead "
+ 'to listen on all active interfaces.',
+ )
+ self._bind_addr = value
+
+ def safe_start(self):
+ """Run the server forever, and stop it cleanly on exit."""
+ try:
+ self.start()
+ except (KeyboardInterrupt, IOError):
+ # The time.sleep call might raise
+ # "IOError: [Errno 4] Interrupted function call" on KBInt.
+ self.error_log('Keyboard Interrupt: shutting down')
+ self.stop()
+ raise
+ except SystemExit:
+ self.error_log('SystemExit raised: shutting down')
+ self.stop()
+ raise
+
+ def prepare(self): # noqa: C901 # FIXME
+ """Prepare server to serving requests.
+
+ It binds a socket's port, setups the socket to ``listen()`` and does
+ other preparing things.
+ """
+ self._interrupt = None
+
+ if self.software is None:
+ self.software = '%s Server' % self.version
+
+ # Select the appropriate socket
+ self.socket = None
+ msg = 'No socket could be created'
+ if os.getenv('LISTEN_PID', None):
+ # systemd socket activation
+ self.socket = socket.fromfd(3, socket.AF_INET, socket.SOCK_STREAM)
+ elif isinstance(self.bind_addr, (six.text_type, six.binary_type)):
+ # AF_UNIX socket
+ try:
+ self.bind_unix_socket(self.bind_addr)
+ except socket.error as serr:
+ msg = '%s -- (%s: %s)' % (msg, self.bind_addr, serr)
+ six.raise_from(socket.error(msg), serr)
+ else:
+ # AF_INET or AF_INET6 socket
+ # Get the correct address family for our host (allows IPv6
+ # addresses)
+ host, port = self.bind_addr
+ try:
+ info = socket.getaddrinfo(
+ host, port, socket.AF_UNSPEC,
+ socket.SOCK_STREAM, 0, socket.AI_PASSIVE,
+ )
+ except socket.gaierror:
+ sock_type = socket.AF_INET
+ bind_addr = self.bind_addr
+
+ if ':' in host:
+ sock_type = socket.AF_INET6
+ bind_addr = bind_addr + (0, 0)
+
+ info = [(sock_type, socket.SOCK_STREAM, 0, '', bind_addr)]
+
+ for res in info:
+ af, socktype, proto, _canonname, sa = res
+ try:
+ self.bind(af, socktype, proto)
+ break
+ except socket.error as serr:
+ msg = '%s -- (%s: %s)' % (msg, sa, serr)
+ if self.socket:
+ self.socket.close()
+ self.socket = None
+
+ if not self.socket:
+ raise socket.error(msg)
+
+ # Timeout so KeyboardInterrupt can be caught on Win32
+ self.socket.settimeout(1)
+ self.socket.listen(self.request_queue_size)
+
+ # must not be accessed once stop() has been called
+ self._connections = connections.ConnectionManager(self)
+
+ # Create worker threads
+ self.requests.start()
+
+ self.ready = True
+ self._start_time = time.time()
+
+ def serve(self):
+ """Serve requests, after invoking :func:`prepare()`."""
+ while self.ready and not self.interrupt:
+ try:
+ self._connections.run(self.expiration_interval)
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except Exception:
+ self.error_log(
+ 'Error in HTTPServer.serve', level=logging.ERROR,
+ traceback=True,
+ )
+
+ # raise exceptions reported by any worker threads,
+ # such that the exception is raised from the serve() thread.
+ if self.interrupt:
+ while self._stopping_for_interrupt:
+ time.sleep(0.1)
+ if self.interrupt:
+ raise self.interrupt
+
+ def start(self):
+ """Run the server forever.
+
+ It is shortcut for invoking :func:`prepare()` then :func:`serve()`.
+ """
+ # We don't have to trap KeyboardInterrupt or SystemExit here,
+ # because cherrypy.server already does so, calling self.stop() for us.
+ # If you're using this server with another framework, you should
+ # trap those exceptions in whatever code block calls start().
+ self.prepare()
+ self.serve()
+
+ @contextlib.contextmanager
+ def _run_in_thread(self):
+ """Context manager for running this server in a thread."""
+ self.prepare()
+ thread = threading.Thread(target=self.serve)
+ thread.daemon = True
+ thread.start()
+ try:
+ yield thread
+ finally:
+ self.stop()
+
+ @property
+ def can_add_keepalive_connection(self):
+ """Flag whether it is allowed to add a new keep-alive connection."""
+ return self.ready and self._connections.can_add_keepalive_connection
+
+ def put_conn(self, conn):
+ """Put an idle connection back into the ConnectionManager."""
+ if self.ready:
+ self._connections.put(conn)
+ else:
+ # server is shutting down, just close it
+ conn.close()
+
+ def error_log(self, msg='', level=20, traceback=False):
+ """Write error message to log.
+
+ Args:
+ msg (str): error message
+ level (int): logging level
+ traceback (bool): add traceback to output or not
+ """
+ # Override this in subclasses as desired
+ sys.stderr.write('{msg!s}\n'.format(msg=msg))
+ sys.stderr.flush()
+ if traceback:
+ tblines = traceback_.format_exc()
+ sys.stderr.write(tblines)
+ sys.stderr.flush()
+
+ def bind(self, family, type, proto=0):
+ """Create (or recreate) the actual socket object."""
+ sock = self.prepare_socket(
+ self.bind_addr,
+ family, type, proto,
+ self.nodelay, self.ssl_adapter,
+ )
+ sock = self.socket = self.bind_socket(sock, self.bind_addr)
+ self.bind_addr = self.resolve_real_bind_addr(sock)
+ return sock
+
+ def bind_unix_socket(self, bind_addr): # noqa: C901 # FIXME
+ """Create (or recreate) a UNIX socket object."""
+ if IS_WINDOWS:
+ """
+ Trying to access socket.AF_UNIX under Windows
+ causes an AttributeError.
+ """
+ raise ValueError( # or RuntimeError?
+ 'AF_UNIX sockets are not supported under Windows.',
+ )
+
+ fs_permissions = 0o777 # TODO: allow changing mode
+
+ try:
+ # Make possible reusing the socket...
+ os.unlink(self.bind_addr)
+ except OSError:
+ """
+ File does not exist, which is the primary goal anyway.
+ """
+ except TypeError as typ_err:
+ err_msg = str(typ_err)
+ if (
+ 'remove() argument 1 must be encoded '
+ 'string without null bytes, not unicode'
+ not in err_msg
+ and 'embedded NUL character' not in err_msg # py34
+ and 'argument must be a '
+ 'string without NUL characters' not in err_msg # pypy2
+ ):
+ raise
+ except ValueError as val_err:
+ err_msg = str(val_err)
+ if (
+ 'unlink: embedded null '
+ 'character in path' not in err_msg
+ and 'embedded null byte' not in err_msg
+ and 'argument must be a '
+ 'string without NUL characters' not in err_msg # pypy3
+ ):
+ raise
+
+ sock = self.prepare_socket(
+ bind_addr=bind_addr,
+ family=socket.AF_UNIX, type=socket.SOCK_STREAM, proto=0,
+ nodelay=self.nodelay, ssl_adapter=self.ssl_adapter,
+ )
+
+ try:
+ """Linux way of pre-populating fs mode permissions."""
+ # Allow everyone access the socket...
+ os.fchmod(sock.fileno(), fs_permissions)
+ FS_PERMS_SET = True
+ except OSError:
+ FS_PERMS_SET = False
+
+ try:
+ sock = self.bind_socket(sock, bind_addr)
+ except socket.error:
+ sock.close()
+ raise
+
+ bind_addr = self.resolve_real_bind_addr(sock)
+
+ try:
+ """FreeBSD/macOS pre-populating fs mode permissions."""
+ if not FS_PERMS_SET:
+ try:
+ os.lchmod(bind_addr, fs_permissions)
+ except AttributeError:
+ os.chmod(bind_addr, fs_permissions, follow_symlinks=False)
+ FS_PERMS_SET = True
+ except OSError:
+ pass
+
+ if not FS_PERMS_SET:
+ self.error_log(
+ 'Failed to set socket fs mode permissions',
+ level=logging.WARNING,
+ )
+
+ self.bind_addr = bind_addr
+ self.socket = sock
+ return sock
+
+ @staticmethod
+ def prepare_socket(bind_addr, family, type, proto, nodelay, ssl_adapter):
+ """Create and prepare the socket object."""
+ sock = socket.socket(family, type, proto)
+ connections.prevent_socket_inheritance(sock)
+
+ host, port = bind_addr[:2]
+ IS_EPHEMERAL_PORT = port == 0
+
+ if not (IS_WINDOWS or IS_EPHEMERAL_PORT):
+ """Enable SO_REUSEADDR for the current socket.
+
+ Skip for Windows (has different semantics)
+ or ephemeral ports (can steal ports from others).
+
+ Refs:
+ * https://msdn.microsoft.com/en-us/library/ms740621(v=vs.85).aspx
+ * https://github.com/cherrypy/cheroot/issues/114
+ * https://gavv.github.io/blog/ephemeral-port-reuse/
+ """
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ if nodelay and not isinstance(
+ bind_addr,
+ (six.text_type, six.binary_type),
+ ):
+ sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+
+ if ssl_adapter is not None:
+ sock = ssl_adapter.bind(sock)
+
+ # If listening on the IPV6 any address ('::' = IN6ADDR_ANY),
+ # activate dual-stack. See
+ # https://github.com/cherrypy/cherrypy/issues/871.
+ listening_ipv6 = (
+ hasattr(socket, 'AF_INET6')
+ and family == socket.AF_INET6
+ and host in ('::', '::0', '::0.0.0.0')
+ )
+ if listening_ipv6:
+ try:
+ sock.setsockopt(
+ socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0,
+ )
+ except (AttributeError, socket.error):
+ # Apparently, the socket option is not available in
+ # this machine's TCP stack
+ pass
+
+ return sock
+
+ @staticmethod
+ def bind_socket(socket_, bind_addr):
+ """Bind the socket to given interface."""
+ socket_.bind(bind_addr)
+ return socket_
+
+ @staticmethod
+ def resolve_real_bind_addr(socket_):
+ """Retrieve actual bind address from bound socket."""
+ # FIXME: keep requested bind_addr separate real bound_addr (port
+ # is different in case of ephemeral port 0)
+ bind_addr = socket_.getsockname()
+ if socket_.family in (
+ # Windows doesn't have socket.AF_UNIX, so not using it in check
+ socket.AF_INET,
+ socket.AF_INET6,
+ ):
+ """UNIX domain sockets are strings or bytes.
+
+ In case of bytes with a leading null-byte it's an abstract socket.
+ """
+ return bind_addr[:2]
+
+ if isinstance(bind_addr, six.binary_type):
+ bind_addr = bton(bind_addr)
+
+ return bind_addr
+
+ def process_conn(self, conn):
+ """Process an incoming HTTPConnection."""
+ try:
+ self.requests.put(conn)
+ except queue.Full:
+ # Just drop the conn. TODO: write 503 back?
+ conn.close()
+
+ @property
+ def interrupt(self):
+ """Flag interrupt of the server."""
+ return self._interrupt
+
+ @property
+ def _stopping_for_interrupt(self):
+ """Return whether the server is responding to an interrupt."""
+ return self._interrupt is _STOPPING_FOR_INTERRUPT
+
+ @interrupt.setter
+ def interrupt(self, interrupt):
+ """Perform the shutdown of this server and save the exception.
+
+ Typically invoked by a worker thread in
+ :py:mod:`~cheroot.workers.threadpool`, the exception is raised
+ from the thread running :py:meth:`serve` once :py:meth:`stop`
+ has completed.
+ """
+ self._interrupt = _STOPPING_FOR_INTERRUPT
+ self.stop()
+ self._interrupt = interrupt
+
+ def stop(self): # noqa: C901 # FIXME
+ """Gracefully shutdown a server that is serving forever."""
+ if not self.ready:
+ return # already stopped
+
+ self.ready = False
+ if self._start_time is not None:
+ self._run_time += (time.time() - self._start_time)
+ self._start_time = None
+
+ self._connections.stop()
+
+ sock = getattr(self, 'socket', None)
+ if sock:
+ if not isinstance(
+ self.bind_addr,
+ (six.text_type, six.binary_type),
+ ):
+ # Touch our own socket to make accept() return immediately.
+ try:
+ host, port = sock.getsockname()[:2]
+ except socket.error as ex:
+ if ex.args[0] not in errors.socket_errors_to_ignore:
+ # Changed to use error code and not message
+ # See
+ # https://github.com/cherrypy/cherrypy/issues/860.
+ raise
+ else:
+ # Note that we're explicitly NOT using AI_PASSIVE,
+ # here, because we want an actual IP to touch.
+ # localhost won't work if we've bound to a public IP,
+ # but it will if we bound to '0.0.0.0' (INADDR_ANY).
+ for res in socket.getaddrinfo(
+ host, port, socket.AF_UNSPEC,
+ socket.SOCK_STREAM,
+ ):
+ af, socktype, proto, _canonname, _sa = res
+ s = None
+ try:
+ s = socket.socket(af, socktype, proto)
+ # See
+ # https://groups.google.com/group/cherrypy-users/
+ # browse_frm/thread/bbfe5eb39c904fe0
+ s.settimeout(1.0)
+ s.connect((host, port))
+ s.close()
+ except socket.error:
+ if s:
+ s.close()
+ if hasattr(sock, 'close'):
+ sock.close()
+ self.socket = None
+
+ self._connections.close()
+ self.requests.stop(self.shutdown_timeout)
+
+
+class Gateway:
+ """Base class to interface HTTPServer with other systems, such as WSGI."""
+
+ def __init__(self, req):
+ """Initialize Gateway instance with request.
+
+ Args:
+ req (HTTPRequest): current HTTP request
+ """
+ self.req = req
+
+ def respond(self):
+ """Process the current request. Must be overridden in a subclass."""
+ raise NotImplementedError # pragma: no cover
+
+
+# These may either be ssl.Adapter subclasses or the string names
+# of such classes (in which case they will be lazily loaded).
+ssl_adapters = {
+ 'builtin': 'cheroot.ssl.builtin.BuiltinSSLAdapter',
+ 'pyopenssl': 'cheroot.ssl.pyopenssl.pyOpenSSLAdapter',
+}
+
+
+def get_ssl_adapter_class(name='builtin'):
+ """Return an SSL adapter class for the given name."""
+ adapter = ssl_adapters[name.lower()]
+ if isinstance(adapter, six.string_types):
+ last_dot = adapter.rfind('.')
+ attr_name = adapter[last_dot + 1:]
+ mod_path = adapter[:last_dot]
+
+ try:
+ mod = sys.modules[mod_path]
+ if mod is None:
+ raise KeyError()
+ except KeyError:
+ # The last [''] is important.
+ mod = __import__(mod_path, globals(), locals(), [''])
+
+ # Let an AttributeError propagate outward.
+ try:
+ adapter = getattr(mod, attr_name)
+ except AttributeError:
+ raise AttributeError(
+ "'%s' object has no attribute '%s'"
+ % (mod_path, attr_name),
+ )
+
+ return adapter
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/server.pyi b/monEnvTP/lib/python3.8/site-packages/cheroot/server.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..864adff4ef75947807c67773e455ee0cd1aff380
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/server.pyi
@@ -0,0 +1,172 @@
+from typing import Any
+
+class HeaderReader:
+ def __call__(self, rfile, hdict: Any | None = ...): ...
+
+class DropUnderscoreHeaderReader(HeaderReader): ...
+
+class SizeCheckWrapper:
+ rfile: Any
+ maxlen: Any
+ bytes_read: int
+ def __init__(self, rfile, maxlen) -> None: ...
+ def read(self, size: Any | None = ...): ...
+ def readline(self, size: Any | None = ...): ...
+ def readlines(self, sizehint: int = ...): ...
+ def close(self) -> None: ...
+ def __iter__(self): ...
+ def __next__(self): ...
+ next: Any
+
+class KnownLengthRFile:
+ rfile: Any
+ remaining: Any
+ def __init__(self, rfile, content_length) -> None: ...
+ def read(self, size: Any | None = ...): ...
+ def readline(self, size: Any | None = ...): ...
+ def readlines(self, sizehint: int = ...): ...
+ def close(self) -> None: ...
+ def __iter__(self): ...
+ def __next__(self): ...
+ next: Any
+
+class ChunkedRFile:
+ rfile: Any
+ maxlen: Any
+ bytes_read: int
+ buffer: Any
+ bufsize: Any
+ closed: bool
+ def __init__(self, rfile, maxlen, bufsize: int = ...) -> None: ...
+ def read(self, size: Any | None = ...): ...
+ def readline(self, size: Any | None = ...): ...
+ def readlines(self, sizehint: int = ...): ...
+ def read_trailer_lines(self) -> None: ...
+ def close(self) -> None: ...
+
+class HTTPRequest:
+ server: Any
+ conn: Any
+ inheaders: Any
+ outheaders: Any
+ ready: bool
+ close_connection: bool
+ chunked_write: bool
+ header_reader: Any
+ started_request: bool
+ scheme: bytes
+ response_protocol: str
+ status: str
+ sent_headers: bool
+ chunked_read: bool
+ proxy_mode: Any
+ strict_mode: Any
+ def __init__(self, server, conn, proxy_mode: bool = ..., strict_mode: bool = ...) -> None: ...
+ rfile: Any
+ def parse_request(self) -> None: ...
+ uri: Any
+ method: Any
+ authority: Any
+ path: Any
+ qs: Any
+ request_protocol: Any
+ def read_request_line(self): ...
+ def read_request_headers(self): ...
+ def respond(self) -> None: ...
+ def simple_response(self, status, msg: str = ...) -> None: ...
+ def ensure_headers_sent(self) -> None: ...
+ def write(self, chunk) -> None: ...
+ def send_headers(self) -> None: ...
+
+class HTTPConnection:
+ remote_addr: Any
+ remote_port: Any
+ ssl_env: Any
+ rbufsize: Any
+ wbufsize: Any
+ RequestHandlerClass: Any
+ peercreds_enabled: bool
+ peercreds_resolve_enabled: bool
+ last_used: Any
+ server: Any
+ socket: Any
+ rfile: Any
+ wfile: Any
+ requests_seen: int
+ def __init__(self, server, sock, makefile=...) -> None: ...
+ def communicate(self): ...
+ linger: bool
+ def close(self) -> None: ...
+ def get_peer_creds(self): ...
+ @property
+ def peer_pid(self): ...
+ @property
+ def peer_uid(self): ...
+ @property
+ def peer_gid(self): ...
+ def resolve_peer_creds(self): ...
+ @property
+ def peer_user(self): ...
+ @property
+ def peer_group(self): ...
+
+class HTTPServer:
+ gateway: Any
+ minthreads: Any
+ maxthreads: Any
+ server_name: Any
+ protocol: str
+ request_queue_size: int
+ shutdown_timeout: int
+ timeout: int
+ expiration_interval: float
+ version: Any
+ software: Any
+ ready: bool
+ max_request_header_size: int
+ max_request_body_size: int
+ nodelay: bool
+ ConnectionClass: Any
+ ssl_adapter: Any
+ peercreds_enabled: bool
+ peercreds_resolve_enabled: bool
+ keep_alive_conn_limit: int
+ requests: Any
+ def __init__(self, bind_addr, gateway, minthreads: int = ..., maxthreads: int = ..., server_name: Any | None = ..., peercreds_enabled: bool = ..., peercreds_resolve_enabled: bool = ...) -> None: ...
+ stats: Any
+ def clear_stats(self): ...
+ def runtime(self): ...
+ @property
+ def bind_addr(self): ...
+ @bind_addr.setter
+ def bind_addr(self, value) -> None: ...
+ def safe_start(self) -> None: ...
+ socket: Any
+ def prepare(self) -> None: ...
+ def serve(self) -> None: ...
+ def start(self) -> None: ...
+ @property
+ def can_add_keepalive_connection(self): ...
+ def put_conn(self, conn) -> None: ...
+ def error_log(self, msg: str = ..., level: int = ..., traceback: bool = ...) -> None: ...
+ def bind(self, family, type, proto: int = ...): ...
+ def bind_unix_socket(self, bind_addr): ...
+ @staticmethod
+ def prepare_socket(bind_addr, family, type, proto, nodelay, ssl_adapter): ...
+ @staticmethod
+ def bind_socket(socket_, bind_addr): ...
+ @staticmethod
+ def resolve_real_bind_addr(socket_): ...
+ def process_conn(self, conn) -> None: ...
+ @property
+ def interrupt(self): ...
+ @interrupt.setter
+ def interrupt(self, interrupt) -> None: ...
+ def stop(self) -> None: ...
+
+class Gateway:
+ req: Any
+ def __init__(self, req) -> None: ...
+ def respond(self) -> None: ...
+
+def get_ssl_adapter_class(name: str = ...): ...
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/__init__.py b/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..d45fd7f17fb0e68c844c4ecc8a601733f21a6141
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/__init__.py
@@ -0,0 +1,52 @@
+"""Implementation of the SSL adapter base interface."""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from abc import ABCMeta, abstractmethod
+
+from six import add_metaclass
+
+
+@add_metaclass(ABCMeta)
+class Adapter:
+ """Base class for SSL driver library adapters.
+
+ Required methods:
+
+ * ``wrap(sock) -> (wrapped socket, ssl environ dict)``
+ * ``makefile(sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE) ->
+ socket file object``
+ """
+
+ @abstractmethod
+ def __init__(
+ self, certificate, private_key, certificate_chain=None,
+ ciphers=None,
+ ):
+ """Set up certificates, private key ciphers and reset context."""
+ self.certificate = certificate
+ self.private_key = private_key
+ self.certificate_chain = certificate_chain
+ self.ciphers = ciphers
+ self.context = None
+
+ @abstractmethod
+ def bind(self, sock):
+ """Wrap and return the given socket."""
+ return sock
+
+ @abstractmethod
+ def wrap(self, sock):
+ """Wrap and return the given socket, plus WSGI environ entries."""
+ raise NotImplementedError # pragma: no cover
+
+ @abstractmethod
+ def get_environ(self):
+ """Return WSGI environ entries to be merged into each request."""
+ raise NotImplementedError # pragma: no cover
+
+ @abstractmethod
+ def makefile(self, sock, mode='r', bufsize=-1):
+ """Return socket file object."""
+ raise NotImplementedError # pragma: no cover
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/__init__.pyi b/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..a98076606cd75b04f711a1065e8d623a5945c97a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/__init__.pyi
@@ -0,0 +1,19 @@
+from abc import abstractmethod
+from typing import Any
+
+class Adapter():
+ certificate: Any
+ private_key: Any
+ certificate_chain: Any
+ ciphers: Any
+ context: Any
+ @abstractmethod
+ def __init__(self, certificate, private_key, certificate_chain: Any | None = ..., ciphers: Any | None = ...): ...
+ @abstractmethod
+ def bind(self, sock): ...
+ @abstractmethod
+ def wrap(self, sock): ...
+ @abstractmethod
+ def get_environ(self): ...
+ @abstractmethod
+ def makefile(self, sock, mode: str = ..., bufsize: int = ...): ...
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..eb26eb40e5e4b59b74c6043d484a11428d6205b8
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/__pycache__/builtin.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/__pycache__/builtin.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d0c8cbbc6f20aa2afd1cc796dc0603081c2229b9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/__pycache__/builtin.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/__pycache__/pyopenssl.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/__pycache__/pyopenssl.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..058218fb8e34642f7ea9597cea78939d4bdb1e42
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/__pycache__/pyopenssl.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/builtin.py b/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/builtin.py
new file mode 100644
index 0000000000000000000000000000000000000000..ff987a7102b9496a6c26af093a4ab56626882067
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/builtin.py
@@ -0,0 +1,485 @@
+"""
+A library for integrating Python's builtin :py:mod:`ssl` library with Cheroot.
+
+The :py:mod:`ssl` module must be importable for SSL functionality.
+
+To use this module, set ``HTTPServer.ssl_adapter`` to an instance of
+``BuiltinSSLAdapter``.
+"""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import socket
+import sys
+import threading
+
+try:
+ import ssl
+except ImportError:
+ ssl = None
+
+try:
+ from _pyio import DEFAULT_BUFFER_SIZE
+except ImportError:
+ try:
+ from io import DEFAULT_BUFFER_SIZE
+ except ImportError:
+ DEFAULT_BUFFER_SIZE = -1
+
+import six
+
+from . import Adapter
+from .. import errors
+from .._compat import IS_ABOVE_OPENSSL10, suppress
+from ..makefile import StreamReader, StreamWriter
+from ..server import HTTPServer
+
+if six.PY2:
+ generic_socket_error = socket.error
+else:
+ generic_socket_error = OSError
+
+
+def _assert_ssl_exc_contains(exc, *msgs):
+ """Check whether SSL exception contains either of messages provided."""
+ if len(msgs) < 1:
+ raise TypeError(
+ '_assert_ssl_exc_contains() requires '
+ 'at least one message to be passed.',
+ )
+ err_msg_lower = str(exc).lower()
+ return any(m.lower() in err_msg_lower for m in msgs)
+
+
+def _loopback_for_cert_thread(context, server):
+ """Wrap a socket in ssl and perform the server-side handshake."""
+ # As we only care about parsing the certificate, the failure of
+ # which will cause an exception in ``_loopback_for_cert``,
+ # we can safely ignore connection and ssl related exceptions. Ref:
+ # https://github.com/cherrypy/cheroot/issues/302#issuecomment-662592030
+ with suppress(ssl.SSLError, OSError):
+ with context.wrap_socket(
+ server, do_handshake_on_connect=True, server_side=True,
+ ) as ssl_sock:
+ # in TLS 1.3 (Python 3.7+, OpenSSL 1.1.1+), the server
+ # sends the client session tickets that can be used to
+ # resume the TLS session on a new connection without
+ # performing the full handshake again. session tickets are
+ # sent as a post-handshake message at some _unspecified_
+ # time and thus a successful connection may be closed
+ # without the client having received the tickets.
+ # Unfortunately, on Windows (Python 3.8+), this is treated
+ # as an incomplete handshake on the server side and a
+ # ``ConnectionAbortedError`` is raised.
+ # TLS 1.3 support is still incomplete in Python 3.8;
+ # there is no way for the client to wait for tickets.
+ # While not necessary for retrieving the parsed certificate,
+ # we send a tiny bit of data over the connection in an
+ # attempt to give the server a chance to send the session
+ # tickets and close the connection cleanly.
+ # Note that, as this is essentially a race condition,
+ # the error may still occur ocasionally.
+ ssl_sock.send(b'0000')
+
+
+def _loopback_for_cert(certificate, private_key, certificate_chain):
+ """Create a loopback connection to parse a cert with a private key."""
+ context = ssl.create_default_context(cafile=certificate_chain)
+ context.load_cert_chain(certificate, private_key)
+ context.check_hostname = False
+ context.verify_mode = ssl.CERT_NONE
+
+ # Python 3+ Unix, Python 3.5+ Windows
+ client, server = socket.socketpair()
+ try:
+ # `wrap_socket` will block until the ssl handshake is complete.
+ # it must be called on both ends at the same time -> thread
+ # openssl will cache the peer's cert during a successful handshake
+ # and return it via `getpeercert` even after the socket is closed.
+ # when `close` is called, the SSL shutdown notice will be sent
+ # and then python will wait to receive the corollary shutdown.
+ thread = threading.Thread(
+ target=_loopback_for_cert_thread, args=(context, server),
+ )
+ try:
+ thread.start()
+ with context.wrap_socket(
+ client, do_handshake_on_connect=True,
+ server_side=False,
+ ) as ssl_sock:
+ ssl_sock.recv(4)
+ return ssl_sock.getpeercert()
+ finally:
+ thread.join()
+ finally:
+ client.close()
+ server.close()
+
+
+def _parse_cert(certificate, private_key, certificate_chain):
+ """Parse a certificate."""
+ # loopback_for_cert uses socket.socketpair which was only
+ # introduced in Python 3.0 for *nix and 3.5 for Windows
+ # and requires OS support (AttributeError, OSError)
+ # it also requires a private key either in its own file
+ # or combined with the cert (SSLError)
+ with suppress(AttributeError, ssl.SSLError, OSError):
+ return _loopback_for_cert(certificate, private_key, certificate_chain)
+
+ # KLUDGE: using an undocumented, private, test method to parse a cert
+ # unfortunately, it is the only built-in way without a connection
+ # as a private, undocumented method, it may change at any time
+ # so be tolerant of *any* possible errors it may raise
+ with suppress(Exception):
+ return ssl._ssl._test_decode_cert(certificate)
+
+ return {}
+
+
+def _sni_callback(sock, sni, context):
+ """Handle the SNI callback to tag the socket with the SNI."""
+ sock.sni = sni
+ # return None to allow the TLS negotiation to continue
+
+
+class BuiltinSSLAdapter(Adapter):
+ """Wrapper for integrating Python's builtin :py:mod:`ssl` with Cheroot."""
+
+ certificate = None
+ """The file name of the server SSL certificate."""
+
+ private_key = None
+ """The file name of the server's private key file."""
+
+ certificate_chain = None
+ """The file name of the certificate chain file."""
+
+ ciphers = None
+ """The ciphers list of SSL."""
+
+ # from mod_ssl/pkg.sslmod/ssl_engine_vars.c ssl_var_lookup_ssl_cert
+ CERT_KEY_TO_ENV = {
+ 'version': 'M_VERSION',
+ 'serialNumber': 'M_SERIAL',
+ 'notBefore': 'V_START',
+ 'notAfter': 'V_END',
+ 'subject': 'S_DN',
+ 'issuer': 'I_DN',
+ 'subjectAltName': 'SAN',
+ # not parsed by the Python standard library
+ # - A_SIG
+ # - A_KEY
+ # not provided by mod_ssl
+ # - OCSP
+ # - caIssuers
+ # - crlDistributionPoints
+ }
+
+ # from mod_ssl/pkg.sslmod/ssl_engine_vars.c ssl_var_lookup_ssl_cert_dn_rec
+ CERT_KEY_TO_LDAP_CODE = {
+ 'countryName': 'C',
+ 'stateOrProvinceName': 'ST',
+ # NOTE: mod_ssl also provides 'stateOrProvinceName' as 'SP'
+ # for compatibility with SSLeay
+ 'localityName': 'L',
+ 'organizationName': 'O',
+ 'organizationalUnitName': 'OU',
+ 'commonName': 'CN',
+ 'title': 'T',
+ 'initials': 'I',
+ 'givenName': 'G',
+ 'surname': 'S',
+ 'description': 'D',
+ 'userid': 'UID',
+ 'emailAddress': 'Email',
+ # not provided by mod_ssl
+ # - dnQualifier: DNQ
+ # - domainComponent: DC
+ # - postalCode: PC
+ # - streetAddress: STREET
+ # - serialNumber
+ # - generationQualifier
+ # - pseudonym
+ # - jurisdictionCountryName
+ # - jurisdictionLocalityName
+ # - jurisdictionStateOrProvince
+ # - businessCategory
+ }
+
+ def __init__(
+ self, certificate, private_key, certificate_chain=None,
+ ciphers=None,
+ ):
+ """Set up context in addition to base class properties if available."""
+ if ssl is None:
+ raise ImportError('You must install the ssl module to use HTTPS.')
+
+ super(BuiltinSSLAdapter, self).__init__(
+ certificate, private_key, certificate_chain, ciphers,
+ )
+
+ self.context = ssl.create_default_context(
+ purpose=ssl.Purpose.CLIENT_AUTH,
+ cafile=certificate_chain,
+ )
+ self.context.load_cert_chain(certificate, private_key)
+ if self.ciphers is not None:
+ self.context.set_ciphers(ciphers)
+
+ self._server_env = self._make_env_cert_dict(
+ 'SSL_SERVER',
+ _parse_cert(certificate, private_key, self.certificate_chain),
+ )
+ if not self._server_env:
+ return
+ cert = None
+ with open(certificate, mode='rt') as f:
+ cert = f.read()
+
+ # strip off any keys by only taking the first certificate
+ cert_start = cert.find(ssl.PEM_HEADER)
+ if cert_start == -1:
+ return
+ cert_end = cert.find(ssl.PEM_FOOTER, cert_start)
+ if cert_end == -1:
+ return
+ cert_end += len(ssl.PEM_FOOTER)
+ self._server_env['SSL_SERVER_CERT'] = cert[cert_start:cert_end]
+
+ @property
+ def context(self):
+ """:py:class:`~ssl.SSLContext` that will be used to wrap sockets."""
+ return self._context
+
+ @context.setter
+ def context(self, context):
+ """Set the ssl ``context`` to use."""
+ self._context = context
+ # Python 3.7+
+ # if a context is provided via `cherrypy.config.update` then
+ # `self.context` will be set after `__init__`
+ # use a property to intercept it to add an SNI callback
+ # but don't override the user's callback
+ # TODO: chain callbacks
+ with suppress(AttributeError):
+ if ssl.HAS_SNI and context.sni_callback is None:
+ context.sni_callback = _sni_callback
+
+ def bind(self, sock):
+ """Wrap and return the given socket."""
+ return super(BuiltinSSLAdapter, self).bind(sock)
+
+ def wrap(self, sock):
+ """Wrap and return the given socket, plus WSGI environ entries."""
+ EMPTY_RESULT = None, {}
+ try:
+ s = self.context.wrap_socket(
+ sock, do_handshake_on_connect=True, server_side=True,
+ )
+ except ssl.SSLError as ex:
+ if ex.errno == ssl.SSL_ERROR_EOF:
+ # This is almost certainly due to the cherrypy engine
+ # 'pinging' the socket to assert it's connectable;
+ # the 'ping' isn't SSL.
+ return EMPTY_RESULT
+ elif ex.errno == ssl.SSL_ERROR_SSL:
+ if _assert_ssl_exc_contains(ex, 'http request'):
+ # The client is speaking HTTP to an HTTPS server.
+ raise errors.NoSSLError
+
+ # Check if it's one of the known errors
+ # Errors that are caught by PyOpenSSL, but thrown by
+ # built-in ssl
+ _block_errors = (
+ 'unknown protocol', 'unknown ca', 'unknown_ca',
+ 'unknown error',
+ 'https proxy request', 'inappropriate fallback',
+ 'wrong version number',
+ 'no shared cipher', 'certificate unknown',
+ 'ccs received early',
+ 'certificate verify failed', # client cert w/o trusted CA
+ 'version too low', # caused by SSL3 connections
+ 'unsupported protocol', # caused by TLS1 connections
+ )
+ if _assert_ssl_exc_contains(ex, *_block_errors):
+ # Accepted error, let's pass
+ return EMPTY_RESULT
+ elif _assert_ssl_exc_contains(ex, 'handshake operation timed out'):
+ # This error is thrown by builtin SSL after a timeout
+ # when client is speaking HTTP to an HTTPS server.
+ # The connection can safely be dropped.
+ return EMPTY_RESULT
+ raise
+ except generic_socket_error as exc:
+ """It is unclear why exactly this happens.
+
+ It's reproducible only with openssl>1.0 and stdlib
+ :py:mod:`ssl` wrapper.
+ In CherryPy it's triggered by Checker plugin, which connects
+ to the app listening to the socket port in TLS mode via plain
+ HTTP during startup (from the same process).
+
+
+ Ref: https://github.com/cherrypy/cherrypy/issues/1618
+ """
+ is_error0 = exc.args == (0, 'Error')
+
+ if is_error0 and IS_ABOVE_OPENSSL10:
+ return EMPTY_RESULT
+ raise
+ return s, self.get_environ(s)
+
+ def get_environ(self, sock):
+ """Create WSGI environ entries to be merged into each request."""
+ cipher = sock.cipher()
+ ssl_environ = {
+ 'wsgi.url_scheme': 'https',
+ 'HTTPS': 'on',
+ 'SSL_PROTOCOL': cipher[1],
+ 'SSL_CIPHER': cipher[0],
+ 'SSL_CIPHER_EXPORT': '',
+ 'SSL_CIPHER_USEKEYSIZE': cipher[2],
+ 'SSL_VERSION_INTERFACE': '%s Python/%s' % (
+ HTTPServer.version, sys.version,
+ ),
+ 'SSL_VERSION_LIBRARY': ssl.OPENSSL_VERSION,
+ 'SSL_CLIENT_VERIFY': 'NONE',
+ # 'NONE' - client did not provide a cert (overriden below)
+ }
+
+ # Python 3.3+
+ with suppress(AttributeError):
+ compression = sock.compression()
+ if compression is not None:
+ ssl_environ['SSL_COMPRESS_METHOD'] = compression
+
+ # Python 3.6+
+ with suppress(AttributeError):
+ ssl_environ['SSL_SESSION_ID'] = sock.session.id.hex()
+ with suppress(AttributeError):
+ target_cipher = cipher[:2]
+ for cip in sock.context.get_ciphers():
+ if target_cipher == (cip['name'], cip['protocol']):
+ ssl_environ['SSL_CIPHER_ALGKEYSIZE'] = cip['alg_bits']
+ break
+
+ # Python 3.7+ sni_callback
+ with suppress(AttributeError):
+ ssl_environ['SSL_TLS_SNI'] = sock.sni
+
+ if self.context and self.context.verify_mode != ssl.CERT_NONE:
+ client_cert = sock.getpeercert()
+ if client_cert:
+ # builtin ssl **ALWAYS** validates client certificates
+ # and terminates the connection on failure
+ ssl_environ['SSL_CLIENT_VERIFY'] = 'SUCCESS'
+ ssl_environ.update(
+ self._make_env_cert_dict('SSL_CLIENT', client_cert),
+ )
+ ssl_environ['SSL_CLIENT_CERT'] = ssl.DER_cert_to_PEM_cert(
+ sock.getpeercert(binary_form=True),
+ ).strip()
+
+ ssl_environ.update(self._server_env)
+
+ # not supplied by the Python standard library (as of 3.8)
+ # - SSL_SESSION_RESUMED
+ # - SSL_SECURE_RENEG
+ # - SSL_CLIENT_CERT_CHAIN_n
+ # - SRP_USER
+ # - SRP_USERINFO
+
+ return ssl_environ
+
+ def _make_env_cert_dict(self, env_prefix, parsed_cert):
+ """Return a dict of WSGI environment variables for a certificate.
+
+ E.g. SSL_CLIENT_M_VERSION, SSL_CLIENT_M_SERIAL, etc.
+ See https://httpd.apache.org/docs/2.4/mod/mod_ssl.html#envvars.
+ """
+ if not parsed_cert:
+ return {}
+
+ env = {}
+ for cert_key, env_var in self.CERT_KEY_TO_ENV.items():
+ key = '%s_%s' % (env_prefix, env_var)
+ value = parsed_cert.get(cert_key)
+ if env_var == 'SAN':
+ env.update(self._make_env_san_dict(key, value))
+ elif env_var.endswith('_DN'):
+ env.update(self._make_env_dn_dict(key, value))
+ else:
+ env[key] = str(value)
+
+ # mod_ssl 2.1+; Python 3.2+
+ # number of days until the certificate expires
+ if 'notBefore' in parsed_cert:
+ remain = ssl.cert_time_to_seconds(parsed_cert['notAfter'])
+ remain -= ssl.cert_time_to_seconds(parsed_cert['notBefore'])
+ remain /= 60 * 60 * 24
+ env['%s_V_REMAIN' % (env_prefix,)] = str(int(remain))
+
+ return env
+
+ def _make_env_san_dict(self, env_prefix, cert_value):
+ """Return a dict of WSGI environment variables for a certificate DN.
+
+ E.g. SSL_CLIENT_SAN_Email_0, SSL_CLIENT_SAN_DNS_0, etc.
+ See SSL_CLIENT_SAN_* at
+ https://httpd.apache.org/docs/2.4/mod/mod_ssl.html#envvars.
+ """
+ if not cert_value:
+ return {}
+
+ env = {}
+ dns_count = 0
+ email_count = 0
+ for attr_name, val in cert_value:
+ if attr_name == 'DNS':
+ env['%s_DNS_%i' % (env_prefix, dns_count)] = val
+ dns_count += 1
+ elif attr_name == 'Email':
+ env['%s_Email_%i' % (env_prefix, email_count)] = val
+ email_count += 1
+
+ # other mod_ssl SAN vars:
+ # - SAN_OTHER_msUPN_n
+ return env
+
+ def _make_env_dn_dict(self, env_prefix, cert_value):
+ """Return a dict of WSGI environment variables for a certificate DN.
+
+ E.g. SSL_CLIENT_S_DN_CN, SSL_CLIENT_S_DN_C, etc.
+ See SSL_CLIENT_S_DN_x509 at
+ https://httpd.apache.org/docs/2.4/mod/mod_ssl.html#envvars.
+ """
+ if not cert_value:
+ return {}
+
+ dn = []
+ dn_attrs = {}
+ for rdn in cert_value:
+ for attr_name, val in rdn:
+ attr_code = self.CERT_KEY_TO_LDAP_CODE.get(attr_name)
+ dn.append('%s=%s' % (attr_code or attr_name, val))
+ if not attr_code:
+ continue
+ dn_attrs.setdefault(attr_code, [])
+ dn_attrs[attr_code].append(val)
+
+ env = {
+ env_prefix: ','.join(dn),
+ }
+ for attr_code, values in dn_attrs.items():
+ env['%s_%s' % (env_prefix, attr_code)] = ','.join(values)
+ if len(values) == 1:
+ continue
+ for i, val in enumerate(values):
+ env['%s_%s_%i' % (env_prefix, attr_code, i)] = val
+ return env
+
+ def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE):
+ """Return socket file object."""
+ cls = StreamReader if 'r' in mode else StreamWriter
+ return cls(sock, mode, bufsize)
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/builtin.pyi b/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/builtin.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..fdc656e03de57ac7f036b011cbb8efd00d1c7f7a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/builtin.pyi
@@ -0,0 +1,18 @@
+from typing import Any
+from . import Adapter
+
+generic_socket_error: OSError
+DEFAULT_BUFFER_SIZE: int
+
+class BuiltinSSLAdapter(Adapter):
+ CERT_KEY_TO_ENV: Any
+ CERT_KEY_TO_LDAP_CODE: Any
+ def __init__(self, certificate, private_key, certificate_chain: Any | None = ..., ciphers: Any | None = ...) -> None: ...
+ @property
+ def context(self): ...
+ @context.setter
+ def context(self, context) -> None: ...
+ def bind(self, sock): ...
+ def wrap(self, sock): ...
+ def get_environ(self): ...
+ def makefile(self, sock, mode: str = ..., bufsize: int = ...): ...
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/pyopenssl.py b/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/pyopenssl.py
new file mode 100644
index 0000000000000000000000000000000000000000..adc9a1bacba03406488b61aaace3d427b39ad648
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/pyopenssl.py
@@ -0,0 +1,382 @@
+"""
+A library for integrating :doc:`pyOpenSSL <pyopenssl:index>` with Cheroot.
+
+The :py:mod:`OpenSSL <pyopenssl:OpenSSL>` module must be importable
+for SSL/TLS/HTTPS functionality.
+You can obtain it from `here <https://github.com/pyca/pyopenssl>`_.
+
+To use this module, set :py:attr:`HTTPServer.ssl_adapter
+<cheroot.server.HTTPServer.ssl_adapter>` to an instance of
+:py:class:`ssl.Adapter <cheroot.ssl.Adapter>`.
+There are two ways to use :abbr:`TLS (Transport-Level Security)`:
+
+Method One
+----------
+
+ * :py:attr:`ssl_adapter.context
+ <cheroot.ssl.pyopenssl.pyOpenSSLAdapter.context>`: an instance of
+ :py:class:`SSL.Context <pyopenssl:OpenSSL.SSL.Context>`.
+
+If this is not None, it is assumed to be an :py:class:`SSL.Context
+<pyopenssl:OpenSSL.SSL.Context>` instance, and will be passed to
+:py:class:`SSL.Connection <pyopenssl:OpenSSL.SSL.Connection>` on bind().
+The developer is responsible for forming a valid :py:class:`Context
+<pyopenssl:OpenSSL.SSL.Context>` object. This
+approach is to be preferred for more flexibility, e.g. if the cert and
+key are streams instead of files, or need decryption, or
+:py:data:`SSL.SSLv3_METHOD <pyopenssl:OpenSSL.SSL.SSLv3_METHOD>`
+is desired instead of the default :py:data:`SSL.SSLv23_METHOD
+<pyopenssl:OpenSSL.SSL.SSLv3_METHOD>`, etc. Consult
+the :doc:`pyOpenSSL <pyopenssl:api/ssl>` documentation for
+complete options.
+
+Method Two (shortcut)
+---------------------
+
+ * :py:attr:`ssl_adapter.certificate
+ <cheroot.ssl.pyopenssl.pyOpenSSLAdapter.certificate>`: the file name
+ of the server's TLS certificate.
+ * :py:attr:`ssl_adapter.private_key
+ <cheroot.ssl.pyopenssl.pyOpenSSLAdapter.private_key>`: the file name
+ of the server's private key file.
+
+Both are :py:data:`None` by default. If :py:attr:`ssl_adapter.context
+<cheroot.ssl.pyopenssl.pyOpenSSLAdapter.context>` is :py:data:`None`,
+but ``.private_key`` and ``.certificate`` are both given and valid, they
+will be read, and the context will be automatically created from them.
+
+.. spelling::
+
+ pyopenssl
+"""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import socket
+import sys
+import threading
+import time
+
+import six
+
+try:
+ import OpenSSL.version
+ from OpenSSL import SSL
+ from OpenSSL import crypto
+
+ try:
+ ssl_conn_type = SSL.Connection
+ except AttributeError:
+ ssl_conn_type = SSL.ConnectionType
+except ImportError:
+ SSL = None
+
+from . import Adapter
+from .. import errors, server as cheroot_server
+from ..makefile import StreamReader, StreamWriter
+
+
+class SSLFileobjectMixin:
+ """Base mixin for a TLS socket stream."""
+
+ ssl_timeout = 3
+ ssl_retry = .01
+
+ # FIXME:
+ def _safe_call(self, is_reader, call, *args, **kwargs): # noqa: C901
+ """Wrap the given call with TLS error-trapping.
+
+ is_reader: if False EOF errors will be raised. If True, EOF errors
+ will return "" (to emulate normal sockets).
+ """
+ start = time.time()
+ while True:
+ try:
+ return call(*args, **kwargs)
+ except SSL.WantReadError:
+ # Sleep and try again. This is dangerous, because it means
+ # the rest of the stack has no way of differentiating
+ # between a "new handshake" error and "client dropped".
+ # Note this isn't an endless loop: there's a timeout below.
+ # Ref: https://stackoverflow.com/a/5133568/595220
+ time.sleep(self.ssl_retry)
+ except SSL.WantWriteError:
+ time.sleep(self.ssl_retry)
+ except SSL.SysCallError as e:
+ if is_reader and e.args == (-1, 'Unexpected EOF'):
+ return b''
+
+ errnum = e.args[0]
+ if is_reader and errnum in errors.socket_errors_to_ignore:
+ return b''
+ raise socket.error(errnum)
+ except SSL.Error as e:
+ if is_reader and e.args == (-1, 'Unexpected EOF'):
+ return b''
+
+ thirdarg = None
+ try:
+ thirdarg = e.args[0][0][2]
+ except IndexError:
+ pass
+
+ if thirdarg == 'http request':
+ # The client is talking HTTP to an HTTPS server.
+ raise errors.NoSSLError()
+
+ raise errors.FatalSSLAlert(*e.args)
+
+ if time.time() - start > self.ssl_timeout:
+ raise socket.timeout('timed out')
+
+ def recv(self, size):
+ """Receive message of a size from the socket."""
+ return self._safe_call(
+ True,
+ super(SSLFileobjectMixin, self).recv,
+ size,
+ )
+
+ def readline(self, size=-1):
+ """Receive message of a size from the socket.
+
+ Matches the following interface:
+ https://docs.python.org/3/library/io.html#io.IOBase.readline
+ """
+ return self._safe_call(
+ True,
+ super(SSLFileobjectMixin, self).readline,
+ size,
+ )
+
+ def sendall(self, *args, **kwargs):
+ """Send whole message to the socket."""
+ return self._safe_call(
+ False,
+ super(SSLFileobjectMixin, self).sendall,
+ *args, **kwargs
+ )
+
+ def send(self, *args, **kwargs):
+ """Send some part of message to the socket."""
+ return self._safe_call(
+ False,
+ super(SSLFileobjectMixin, self).send,
+ *args, **kwargs
+ )
+
+
+class SSLFileobjectStreamReader(SSLFileobjectMixin, StreamReader):
+ """SSL file object attached to a socket object."""
+
+
+class SSLFileobjectStreamWriter(SSLFileobjectMixin, StreamWriter):
+ """SSL file object attached to a socket object."""
+
+
+class SSLConnectionProxyMeta:
+ """Metaclass for generating a bunch of proxy methods."""
+
+ def __new__(mcl, name, bases, nmspc):
+ """Attach a list of proxy methods to a new class."""
+ proxy_methods = (
+ 'get_context', 'pending', 'send', 'write', 'recv', 'read',
+ 'renegotiate', 'bind', 'listen', 'connect', 'accept',
+ 'setblocking', 'fileno', 'close', 'get_cipher_list',
+ 'getpeername', 'getsockname', 'getsockopt', 'setsockopt',
+ 'makefile', 'get_app_data', 'set_app_data', 'state_string',
+ 'sock_shutdown', 'get_peer_certificate', 'want_read',
+ 'want_write', 'set_connect_state', 'set_accept_state',
+ 'connect_ex', 'sendall', 'settimeout', 'gettimeout',
+ 'shutdown',
+ )
+ proxy_methods_no_args = (
+ 'shutdown',
+ )
+
+ proxy_props = (
+ 'family',
+ )
+
+ def lock_decorator(method):
+ """Create a proxy method for a new class."""
+ def proxy_wrapper(self, *args):
+ self._lock.acquire()
+ try:
+ new_args = (
+ args[:] if method not in proxy_methods_no_args else []
+ )
+ return getattr(self._ssl_conn, method)(*new_args)
+ finally:
+ self._lock.release()
+ return proxy_wrapper
+ for m in proxy_methods:
+ nmspc[m] = lock_decorator(m)
+ nmspc[m].__name__ = m
+
+ def make_property(property_):
+ """Create a proxy method for a new class."""
+ def proxy_prop_wrapper(self):
+ return getattr(self._ssl_conn, property_)
+ proxy_prop_wrapper.__name__ = property_
+ return property(proxy_prop_wrapper)
+ for p in proxy_props:
+ nmspc[p] = make_property(p)
+
+ # Doesn't work via super() for some reason.
+ # Falling back to type() instead:
+ return type(name, bases, nmspc)
+
+
+@six.add_metaclass(SSLConnectionProxyMeta)
+class SSLConnection:
+ r"""A thread-safe wrapper for an ``SSL.Connection``.
+
+ :param tuple args: the arguments to create the wrapped \
+ :py:class:`SSL.Connection(*args) \
+ <pyopenssl:OpenSSL.SSL.Connection>`
+ """
+
+ def __init__(self, *args):
+ """Initialize SSLConnection instance."""
+ self._ssl_conn = SSL.Connection(*args)
+ self._lock = threading.RLock()
+
+
+class pyOpenSSLAdapter(Adapter):
+ """A wrapper for integrating pyOpenSSL with Cheroot."""
+
+ certificate = None
+ """The file name of the server's TLS certificate."""
+
+ private_key = None
+ """The file name of the server's private key file."""
+
+ certificate_chain = None
+ """Optional. The file name of CA's intermediate certificate bundle.
+
+ This is needed for cheaper "chained root" TLS certificates,
+ and should be left as :py:data:`None` if not required."""
+
+ context = None
+ """
+ An instance of :py:class:`SSL.Context <pyopenssl:OpenSSL.SSL.Context>`.
+ """
+
+ ciphers = None
+ """The ciphers list of TLS."""
+
+ def __init__(
+ self, certificate, private_key, certificate_chain=None,
+ ciphers=None,
+ ):
+ """Initialize OpenSSL Adapter instance."""
+ if SSL is None:
+ raise ImportError('You must install pyOpenSSL to use HTTPS.')
+
+ super(pyOpenSSLAdapter, self).__init__(
+ certificate, private_key, certificate_chain, ciphers,
+ )
+
+ self._environ = None
+
+ def bind(self, sock):
+ """Wrap and return the given socket."""
+ if self.context is None:
+ self.context = self.get_context()
+ conn = SSLConnection(self.context, sock)
+ self._environ = self.get_environ()
+ return conn
+
+ def wrap(self, sock):
+ """Wrap and return the given socket, plus WSGI environ entries."""
+ # pyOpenSSL doesn't perform the handshake until the first read/write
+ # forcing the handshake to complete tends to result in the connection
+ # closing so we can't reliably access protocol/client cert for the env
+ return sock, self._environ.copy()
+
+ def get_context(self):
+ """Return an ``SSL.Context`` from self attributes.
+
+ Ref: :py:class:`SSL.Context <pyopenssl:OpenSSL.SSL.Context>`
+ """
+ # See https://code.activestate.com/recipes/442473/
+ c = SSL.Context(SSL.SSLv23_METHOD)
+ c.use_privatekey_file(self.private_key)
+ if self.certificate_chain:
+ c.load_verify_locations(self.certificate_chain)
+ c.use_certificate_file(self.certificate)
+ return c
+
+ def get_environ(self):
+ """Return WSGI environ entries to be merged into each request."""
+ ssl_environ = {
+ 'wsgi.url_scheme': 'https',
+ 'HTTPS': 'on',
+ 'SSL_VERSION_INTERFACE': '%s %s/%s Python/%s' % (
+ cheroot_server.HTTPServer.version,
+ OpenSSL.version.__title__, OpenSSL.version.__version__,
+ sys.version,
+ ),
+ 'SSL_VERSION_LIBRARY': SSL.SSLeay_version(
+ SSL.SSLEAY_VERSION,
+ ).decode(),
+ }
+
+ if self.certificate:
+ # Server certificate attributes
+ with open(self.certificate, 'rb') as cert_file:
+ cert = crypto.load_certificate(
+ crypto.FILETYPE_PEM, cert_file.read(),
+ )
+
+ ssl_environ.update({
+ 'SSL_SERVER_M_VERSION': cert.get_version(),
+ 'SSL_SERVER_M_SERIAL': cert.get_serial_number(),
+ # 'SSL_SERVER_V_START':
+ # Validity of server's certificate (start time),
+ # 'SSL_SERVER_V_END':
+ # Validity of server's certificate (end time),
+ })
+
+ for prefix, dn in [
+ ('I', cert.get_issuer()),
+ ('S', cert.get_subject()),
+ ]:
+ # X509Name objects don't seem to have a way to get the
+ # complete DN string. Use str() and slice it instead,
+ # because str(dn) == "<X509Name object '/C=US/ST=...'>"
+ dnstr = str(dn)[18:-2]
+
+ wsgikey = 'SSL_SERVER_%s_DN' % prefix
+ ssl_environ[wsgikey] = dnstr
+
+ # The DN should be of the form: /k1=v1/k2=v2, but we must allow
+ # for any value to contain slashes itself (in a URL).
+ while dnstr:
+ pos = dnstr.rfind('=')
+ dnstr, value = dnstr[:pos], dnstr[pos + 1:]
+ pos = dnstr.rfind('/')
+ dnstr, key = dnstr[:pos], dnstr[pos + 1:]
+ if key and value:
+ wsgikey = 'SSL_SERVER_%s_DN_%s' % (prefix, key)
+ ssl_environ[wsgikey] = value
+
+ return ssl_environ
+
+ def makefile(self, sock, mode='r', bufsize=-1):
+ """Return socket file object."""
+ cls = (
+ SSLFileobjectStreamReader
+ if 'r' in mode else
+ SSLFileobjectStreamWriter
+ )
+ if SSL and isinstance(sock, ssl_conn_type):
+ wrapped_socket = cls(sock, mode, bufsize)
+ wrapped_socket.ssl_timeout = sock.gettimeout()
+ return wrapped_socket
+ # This is from past:
+ # TODO: figure out what it's meant for
+ else:
+ return cheroot_server.CP_fileobject(sock, mode, bufsize)
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/pyopenssl.pyi b/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/pyopenssl.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..d5b93471ab894952c70b57d773969c52c2fa7b62
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/ssl/pyopenssl.pyi
@@ -0,0 +1,30 @@
+from . import Adapter
+from ..makefile import StreamReader, StreamWriter
+from OpenSSL import SSL
+from typing import Any
+
+ssl_conn_type: SSL.Connection
+
+class SSLFileobjectMixin:
+ ssl_timeout: int
+ ssl_retry: float
+ def recv(self, size): ...
+ def readline(self, size: int = ...): ...
+ def sendall(self, *args, **kwargs): ...
+ def send(self, *args, **kwargs): ...
+
+class SSLFileobjectStreamReader(SSLFileobjectMixin, StreamReader): ... # type:ignore
+class SSLFileobjectStreamWriter(SSLFileobjectMixin, StreamWriter): ... # type:ignore
+
+class SSLConnectionProxyMeta:
+ def __new__(mcl, name, bases, nmspc): ...
+
+class SSLConnection():
+ def __init__(self, *args) -> None: ...
+
+class pyOpenSSLAdapter(Adapter):
+ def __init__(self, certificate, private_key, certificate_chain: Any | None = ..., ciphers: Any | None = ...) -> None: ...
+ def bind(self, sock): ...
+ def wrap(self, sock): ...
+ def get_environ(self): ...
+ def makefile(self, sock, mode: str = ..., bufsize: int = ...): ...
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/__init__.py b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e2a7b348e7b57c820a8824cf85fd4071822d9eac
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__init__.py
@@ -0,0 +1 @@
+"""Cheroot test suite."""
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c882f394815b339bc781dac93cab09c8aa14269f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/_pytest_plugin.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/_pytest_plugin.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6ac66a6b1ee1e228ab33b1e965e63fc9ff173367
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/_pytest_plugin.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/conftest.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/conftest.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2800ca4db11d754cf2fa6bea9e4671e3b97293c2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/conftest.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/helper.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/helper.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0d333d42ad489565a9e86eb189c458d9bebf83ba
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/helper.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test__compat.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test__compat.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..899c7bfe0dd8246af11956b1acd141deb4251aa3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test__compat.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_cli.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_cli.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..76423f034184a40858dd43fa66b2c3d07b899569
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_cli.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_conn.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_conn.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..15e8905f0cb11449830ea57a7dc2cbb9297830d2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_conn.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_core.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_core.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9119c1994129307be3425b10a41d9e49f8ea222f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_core.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_dispatch.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_dispatch.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..64664b89a4c317a95f9a89992d947232f681bfe4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_dispatch.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_errors.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_errors.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..234413b81ae7bfaea89eff93aaeb0763de558414
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_errors.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_makefile.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_makefile.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b3e9eabaca378885427d2f673b4b1976142c7f96
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_makefile.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_server.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_server.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4c70fc0ace8ec88da2e068d63f72b917feb80cf3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_server.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_ssl.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_ssl.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..664e2e8e9898a0736d9531c75af6d54da014a05e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_ssl.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_wsgi.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_wsgi.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c40580665554a5948c800fba84efa901f15cb203
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/test_wsgi.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/webtest.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/webtest.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7ad0079bf4aaeff558ef5c37d53a985769cc87d9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/test/__pycache__/webtest.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/_pytest_plugin.py b/monEnvTP/lib/python3.8/site-packages/cheroot/test/_pytest_plugin.py
new file mode 100644
index 0000000000000000000000000000000000000000..012211df23dcf0f32a7a80390cd584db54ef177f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/test/_pytest_plugin.py
@@ -0,0 +1,50 @@
+"""Local pytest plugin.
+
+Contains hooks, which are tightly bound to the Cheroot framework
+itself, useless for end-users' app testing.
+"""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import pytest
+
+
+pytest_version = tuple(map(int, pytest.__version__.split('.')))
+
+
+def pytest_load_initial_conftests(early_config, parser, args):
+ """Drop unfilterable warning ignores."""
+ if pytest_version < (6, 2, 0):
+ return
+
+ # pytest>=6.2.0 under Python 3.8:
+ # Refs:
+ # * https://docs.pytest.org/en/stable/usage.html#unraisable
+ # * https://github.com/pytest-dev/pytest/issues/5299
+ early_config._inicache['filterwarnings'].extend((
+ 'ignore:Exception in thread CP Server Thread-:'
+ 'pytest.PytestUnhandledThreadExceptionWarning:_pytest.threadexception',
+ 'ignore:Exception in thread Thread-:'
+ 'pytest.PytestUnhandledThreadExceptionWarning:_pytest.threadexception',
+ 'ignore:Exception ignored in. '
+ '<socket.socket fd=-1, family=AddressFamily.AF_INET, '
+ 'type=SocketKind.SOCK_STREAM, proto=.:'
+ 'pytest.PytestUnraisableExceptionWarning:_pytest.unraisableexception',
+ 'ignore:Exception ignored in. '
+ '<socket.socket fd=-1, family=AddressFamily.AF_INET6, '
+ 'type=SocketKind.SOCK_STREAM, proto=.:'
+ 'pytest.PytestUnraisableExceptionWarning:_pytest.unraisableexception',
+ 'ignore:Exception ignored in. '
+ '<socket.socket fd=-1, family=AF_INET, '
+ 'type=SocketKind.SOCK_STREAM, proto=.:'
+ 'pytest.PytestUnraisableExceptionWarning:_pytest.unraisableexception',
+ 'ignore:Exception ignored in. '
+ '<socket.socket fd=-1, family=AF_INET6, '
+ 'type=SocketKind.SOCK_STREAM, proto=.:'
+ 'pytest.PytestUnraisableExceptionWarning:_pytest.unraisableexception',
+ 'ignore:Exception ignored in. '
+ '<ssl.SSLSocket fd=-1, family=AddressFamily.AF_UNIX, '
+ 'type=SocketKind.SOCK_STREAM, proto=.:'
+ 'pytest.PytestUnraisableExceptionWarning:_pytest.unraisableexception',
+ ))
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/conftest.py b/monEnvTP/lib/python3.8/site-packages/cheroot/test/conftest.py
new file mode 100644
index 0000000000000000000000000000000000000000..e12f8a73c4f81bfe4a68d984cfcbd84bdb308344
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/test/conftest.py
@@ -0,0 +1,71 @@
+"""Pytest configuration module.
+
+Contains fixtures, which are tightly bound to the Cheroot framework
+itself, useless for end-users' app testing.
+"""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type # pylint: disable=invalid-name
+
+import threading
+import time
+
+import pytest
+
+from ..server import Gateway, HTTPServer
+from ..testing import ( # noqa: F401 # pylint: disable=unused-import
+ native_server, wsgi_server,
+)
+from ..testing import get_server_client
+
+
+@pytest.fixture
+# pylint: disable=redefined-outer-name
+def wsgi_server_client(wsgi_server): # noqa: F811
+ """Create a test client out of given WSGI server."""
+ return get_server_client(wsgi_server)
+
+
+@pytest.fixture
+# pylint: disable=redefined-outer-name
+def native_server_client(native_server): # noqa: F811
+ """Create a test client out of given HTTP server."""
+ return get_server_client(native_server)
+
+
+@pytest.fixture
+def http_server():
+ """Provision a server creator as a fixture."""
+ def start_srv():
+ bind_addr = yield
+ if bind_addr is None:
+ return
+ httpserver = make_http_server(bind_addr)
+ yield httpserver
+ yield httpserver
+
+ srv_creator = iter(start_srv())
+ next(srv_creator) # pylint: disable=stop-iteration-return
+ yield srv_creator
+ try:
+ while True:
+ httpserver = next(srv_creator)
+ if httpserver is not None:
+ httpserver.stop()
+ except StopIteration:
+ pass
+
+
+def make_http_server(bind_addr):
+ """Create and start an HTTP server bound to ``bind_addr``."""
+ httpserver = HTTPServer(
+ bind_addr=bind_addr,
+ gateway=Gateway,
+ )
+
+ threading.Thread(target=httpserver.safe_start).start()
+
+ while not httpserver.ready:
+ time.sleep(0.1)
+
+ return httpserver
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/helper.py b/monEnvTP/lib/python3.8/site-packages/cheroot/test/helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..bdf2975593426a4650cf349a605267370681a28e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/test/helper.py
@@ -0,0 +1,174 @@
+"""A library of helper functions for the Cheroot test suite."""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import datetime
+import logging
+import os
+import sys
+import time
+import threading
+import types
+
+from six.moves import http_client
+
+import six
+
+import cheroot.server
+import cheroot.wsgi
+
+from cheroot.test import webtest
+
+log = logging.getLogger(__name__)
+thisdir = os.path.abspath(os.path.dirname(__file__))
+
+
+config = {
+ 'bind_addr': ('127.0.0.1', 54583),
+ 'server': 'wsgi',
+ 'wsgi_app': None,
+}
+
+
+class CherootWebCase(webtest.WebCase):
+ """Helper class for a web app test suite."""
+
+ script_name = ''
+ scheme = 'http'
+
+ available_servers = {
+ 'wsgi': cheroot.wsgi.Server,
+ 'native': cheroot.server.HTTPServer,
+ }
+
+ @classmethod
+ def setup_class(cls):
+ """Create and run one HTTP server per class."""
+ conf = config.copy()
+ conf.update(getattr(cls, 'config', {}))
+
+ s_class = conf.pop('server', 'wsgi')
+ server_factory = cls.available_servers.get(s_class)
+ if server_factory is None:
+ raise RuntimeError('Unknown server in config: %s' % conf['server'])
+ cls.httpserver = server_factory(**conf)
+
+ cls.HOST, cls.PORT = cls.httpserver.bind_addr
+ if cls.httpserver.ssl_adapter is None:
+ ssl = ''
+ cls.scheme = 'http'
+ else:
+ ssl = ' (ssl)'
+ cls.HTTP_CONN = http_client.HTTPSConnection
+ cls.scheme = 'https'
+
+ v = sys.version.split()[0]
+ log.info('Python version used to run this test script: %s' % v)
+ log.info('Cheroot version: %s' % cheroot.__version__)
+ log.info('HTTP server version: %s%s' % (cls.httpserver.protocol, ssl))
+ log.info('PID: %s' % os.getpid())
+
+ if hasattr(cls, 'setup_server'):
+ # Clear the wsgi server so that
+ # it can be updated with the new root
+ cls.setup_server()
+ cls.start()
+
+ @classmethod
+ def teardown_class(cls):
+ """Cleanup HTTP server."""
+ if hasattr(cls, 'setup_server'):
+ cls.stop()
+
+ @classmethod
+ def start(cls):
+ """Load and start the HTTP server."""
+ threading.Thread(target=cls.httpserver.safe_start).start()
+ while not cls.httpserver.ready:
+ time.sleep(0.1)
+
+ @classmethod
+ def stop(cls):
+ """Terminate HTTP server."""
+ cls.httpserver.stop()
+ td = getattr(cls, 'teardown', None)
+ if td:
+ td()
+
+ date_tolerance = 2
+
+ def assertEqualDates(self, dt1, dt2, seconds=None):
+ """Assert ``abs(dt1 - dt2)`` is within ``Y`` seconds."""
+ if seconds is None:
+ seconds = self.date_tolerance
+
+ if dt1 > dt2:
+ diff = dt1 - dt2
+ else:
+ diff = dt2 - dt1
+ if not diff < datetime.timedelta(seconds=seconds):
+ raise AssertionError(
+ '%r and %r are not within %r seconds.' %
+ (dt1, dt2, seconds),
+ )
+
+
+class Request:
+ """HTTP request container."""
+
+ def __init__(self, environ):
+ """Initialize HTTP request."""
+ self.environ = environ
+
+
+class Response:
+ """HTTP response container."""
+
+ def __init__(self):
+ """Initialize HTTP response."""
+ self.status = '200 OK'
+ self.headers = {'Content-Type': 'text/html'}
+ self.body = None
+
+ def output(self):
+ """Generate iterable response body object."""
+ if self.body is None:
+ return []
+ elif isinstance(self.body, six.text_type):
+ return [self.body.encode('iso-8859-1')]
+ elif isinstance(self.body, six.binary_type):
+ return [self.body]
+ else:
+ return [x.encode('iso-8859-1') for x in self.body]
+
+
+class Controller:
+ """WSGI app for tests."""
+
+ def __call__(self, environ, start_response):
+ """WSGI request handler."""
+ req, resp = Request(environ), Response()
+ try:
+ # Python 3 supports unicode attribute names
+ # Python 2 encodes them
+ handler = self.handlers[environ['PATH_INFO']]
+ except KeyError:
+ resp.status = '404 Not Found'
+ else:
+ output = handler(req, resp)
+ if (
+ output is not None
+ and not any(
+ resp.status.startswith(status_code)
+ for status_code in ('204', '304')
+ )
+ ):
+ resp.body = output
+ try:
+ resp.headers.setdefault('Content-Length', str(len(output)))
+ except TypeError:
+ if not isinstance(output, types.GeneratorType):
+ raise
+ start_response(resp.status, resp.headers.items())
+ return resp.output()
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/test__compat.py b/monEnvTP/lib/python3.8/site-packages/cheroot/test/test__compat.py
new file mode 100644
index 0000000000000000000000000000000000000000..35c6280eb6715cae42f2a78fff8089266e73dd92
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/test/test__compat.py
@@ -0,0 +1,66 @@
+# -*- coding: utf-8 -*-
+"""Test suite for cross-python compatibility helpers."""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import pytest
+import six
+
+from cheroot._compat import extract_bytes, memoryview, ntob, ntou, bton
+
+
+@pytest.mark.parametrize(
+ ('func', 'inp', 'out'),
+ (
+ (ntob, 'bar', b'bar'),
+ (ntou, 'bar', u'bar'),
+ (bton, b'bar', 'bar'),
+ ),
+)
+def test_compat_functions_positive(func, inp, out):
+ """Check that compatibility functions work with correct input."""
+ assert func(inp, encoding='utf-8') == out
+
+
+@pytest.mark.parametrize(
+ 'func',
+ (
+ ntob,
+ ntou,
+ ),
+)
+def test_compat_functions_negative_nonnative(func):
+ """Check that compatibility functions fail loudly for incorrect input."""
+ non_native_test_str = u'bar' if six.PY2 else b'bar'
+ with pytest.raises(TypeError):
+ func(non_native_test_str, encoding='utf-8')
+
+
+def test_ntou_escape():
+ """Check that ``ntou`` supports escape-encoding under Python 2."""
+ expected = u'hišřії'
+ actual = ntou('hi\u0161\u0159\u0456\u0457', encoding='escape')
+ assert actual == expected
+
+
+@pytest.mark.parametrize(
+ ('input_argument', 'expected_result'),
+ (
+ (b'qwerty', b'qwerty'),
+ (memoryview(b'asdfgh'), b'asdfgh'),
+ ),
+)
+def test_extract_bytes(input_argument, expected_result):
+ """Check that legitimate inputs produce bytes."""
+ assert extract_bytes(input_argument) == expected_result
+
+
+def test_extract_bytes_invalid():
+ """Ensure that invalid input causes exception to be raised."""
+ with pytest.raises(
+ ValueError,
+ match=r'^extract_bytes\(\) only accepts bytes '
+ 'and memoryview/buffer$',
+ ):
+ extract_bytes(u'some юнікод їїї')
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_cli.py b/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_cli.py
new file mode 100644
index 0000000000000000000000000000000000000000..91edc023f6df4977feefdb11cd4fb0836457173e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_cli.py
@@ -0,0 +1,98 @@
+"""Tests to verify the command line interface.
+
+.. spelling::
+
+ cli
+"""
+# -*- coding: utf-8 -*-
+# vim: set fileencoding=utf-8 :
+import sys
+
+import six
+import pytest
+
+from cheroot.cli import (
+ Application,
+ parse_wsgi_bind_addr,
+)
+
+
+@pytest.mark.parametrize(
+ ('raw_bind_addr', 'expected_bind_addr'),
+ (
+ # tcp/ip
+ ('192.168.1.1:80', ('192.168.1.1', 80)),
+ # ipv6 ips has to be enclosed in brakets when specified in url form
+ ('[::1]:8000', ('::1', 8000)),
+ ('localhost:5000', ('localhost', 5000)),
+ # this is a valid input, but foo gets discarted
+ ('foo@bar:5000', ('bar', 5000)),
+ ('foo', ('foo', None)),
+ ('123456789', ('123456789', None)),
+ # unix sockets
+ ('/tmp/cheroot.sock', '/tmp/cheroot.sock'),
+ ('/tmp/some-random-file-name', '/tmp/some-random-file-name'),
+ # abstract sockets
+ ('@cheroot', '\x00cheroot'),
+ ),
+)
+def test_parse_wsgi_bind_addr(raw_bind_addr, expected_bind_addr):
+ """Check the parsing of the --bind option.
+
+ Verify some of the supported addresses and the expected return value.
+ """
+ assert parse_wsgi_bind_addr(raw_bind_addr) == expected_bind_addr
+
+
+@pytest.fixture
+def wsgi_app(monkeypatch):
+ """Return a WSGI app stub."""
+ class WSGIAppMock:
+ """Mock of a wsgi module."""
+
+ def application(self):
+ """Empty application method.
+
+ Default method to be called when no specific callable
+ is defined in the wsgi application identifier.
+
+ It has an empty body because we are expecting to verify that
+ the same method is return no the actual execution of it.
+ """
+
+ def main(self):
+ """Empty custom method (callable) inside the mocked WSGI app.
+
+ It has an empty body because we are expecting to verify that
+ the same method is return no the actual execution of it.
+ """
+ app = WSGIAppMock()
+ # patch sys.modules, to include the an instance of WSGIAppMock
+ # under a specific namespace
+ if six.PY2:
+ # python2 requires the previous namespaces to be part of sys.modules
+ # (e.g. for 'a.b.c' we need to insert 'a', 'a.b' and 'a.b.c')
+ # otherwise it fails, we're setting the same instance on each level,
+ # we don't really care about those, just the last one.
+ monkeypatch.setitem(sys.modules, 'mypkg', app)
+ monkeypatch.setitem(sys.modules, 'mypkg.wsgi', app)
+ return app
+
+
+@pytest.mark.parametrize(
+ ('app_name', 'app_method'),
+ (
+ (None, 'application'),
+ ('application', 'application'),
+ ('main', 'main'),
+ ),
+)
+# pylint: disable=invalid-name
+def test_Aplication_resolve(app_name, app_method, wsgi_app):
+ """Check the wsgi application name conversion."""
+ if app_name is None:
+ wsgi_app_spec = 'mypkg.wsgi'
+ else:
+ wsgi_app_spec = 'mypkg.wsgi:{app_name}'.format(**locals())
+ expected_app = getattr(wsgi_app, app_method)
+ assert Application.resolve(wsgi_app_spec).wsgi_app == expected_app
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_conn.py b/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_conn.py
new file mode 100644
index 0000000000000000000000000000000000000000..da7adcacc73ffd7dfae530aad0981984918384a5
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_conn.py
@@ -0,0 +1,1303 @@
+"""Tests for TCP connection handling, including proper and timely close."""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import errno
+import socket
+import time
+import logging
+import traceback as traceback_
+from collections import namedtuple
+
+from six.moves import range, http_client, urllib
+
+import six
+import pytest
+from jaraco.text import trim, unwrap
+
+from cheroot.test import helper, webtest
+from cheroot._compat import IS_CI, IS_MACOS, IS_PYPY, IS_WINDOWS
+import cheroot.server
+
+
+IS_SLOW_ENV = IS_MACOS or IS_WINDOWS
+
+
+timeout = 1
+pov = 'pPeErRsSiIsStTeEnNcCeE oOfF vViIsSiIoOnN'
+
+
+class Controller(helper.Controller):
+ """Controller for serving WSGI apps."""
+
+ def hello(req, resp):
+ """Render Hello world."""
+ return 'Hello, world!'
+
+ def pov(req, resp):
+ """Render ``pov`` value."""
+ return pov
+
+ def stream(req, resp):
+ """Render streaming response."""
+ if 'set_cl' in req.environ['QUERY_STRING']:
+ resp.headers['Content-Length'] = str(10)
+
+ def content():
+ for x in range(10):
+ yield str(x)
+
+ return content()
+
+ def upload(req, resp):
+ """Process file upload and render thank."""
+ if not req.environ['REQUEST_METHOD'] == 'POST':
+ raise AssertionError(
+ "'POST' != request.method %r" %
+ req.environ['REQUEST_METHOD'],
+ )
+ return "thanks for '%s'" % req.environ['wsgi.input'].read()
+
+ def custom_204(req, resp):
+ """Render response with status 204."""
+ resp.status = '204'
+ return 'Code = 204'
+
+ def custom_304(req, resp):
+ """Render response with status 304."""
+ resp.status = '304'
+ return 'Code = 304'
+
+ def err_before_read(req, resp):
+ """Render response with status 500."""
+ resp.status = '500 Internal Server Error'
+ return 'ok'
+
+ def one_megabyte_of_a(req, resp):
+ """Render 1MB response."""
+ return ['a' * 1024] * 1024
+
+ def wrong_cl_buffered(req, resp):
+ """Render buffered response with invalid length value."""
+ resp.headers['Content-Length'] = '5'
+ return 'I have too many bytes'
+
+ def wrong_cl_unbuffered(req, resp):
+ """Render unbuffered response with invalid length value."""
+ resp.headers['Content-Length'] = '5'
+ return ['I too', ' have too many bytes']
+
+ def _munge(string):
+ """Encode PATH_INFO correctly depending on Python version.
+
+ WSGI 1.0 is a mess around unicode. Create endpoints
+ that match the PATH_INFO that it produces.
+ """
+ if six.PY2:
+ return string
+ return string.encode('utf-8').decode('latin-1')
+
+ handlers = {
+ '/hello': hello,
+ '/pov': pov,
+ '/page1': pov,
+ '/page2': pov,
+ '/page3': pov,
+ '/stream': stream,
+ '/upload': upload,
+ '/custom/204': custom_204,
+ '/custom/304': custom_304,
+ '/err_before_read': err_before_read,
+ '/one_megabyte_of_a': one_megabyte_of_a,
+ '/wrong_cl_buffered': wrong_cl_buffered,
+ '/wrong_cl_unbuffered': wrong_cl_unbuffered,
+ }
+
+
+class ErrorLogMonitor:
+ """Mock class to access the server error_log calls made by the server."""
+
+ ErrorLogCall = namedtuple('ErrorLogCall', ['msg', 'level', 'traceback'])
+
+ def __init__(self):
+ """Initialize the server error log monitor/interceptor.
+
+ If you need to ignore a particular error message use the property
+ ``ignored_msgs`` by appending to the list the expected error messages.
+ """
+ self.calls = []
+ # to be used the the teardown validation
+ self.ignored_msgs = []
+
+ def __call__(self, msg='', level=logging.INFO, traceback=False):
+ """Intercept the call to the server error_log method."""
+ if traceback:
+ tblines = traceback_.format_exc()
+ else:
+ tblines = ''
+ self.calls.append(ErrorLogMonitor.ErrorLogCall(msg, level, tblines))
+
+
+@pytest.fixture
+def raw_testing_server(wsgi_server_client):
+ """Attach a WSGI app to the given server and preconfigure it."""
+ app = Controller()
+
+ def _timeout(req, resp):
+ return str(wsgi_server.timeout)
+ app.handlers['/timeout'] = _timeout
+ wsgi_server = wsgi_server_client.server_instance
+ wsgi_server.wsgi_app = app
+ wsgi_server.max_request_body_size = 1001
+ wsgi_server.timeout = timeout
+ wsgi_server.server_client = wsgi_server_client
+ wsgi_server.keep_alive_conn_limit = 2
+
+ return wsgi_server
+
+
+@pytest.fixture
+def testing_server(raw_testing_server, monkeypatch):
+ """Modify the "raw" base server to monitor the error_log messages.
+
+ If you need to ignore a particular error message use the property
+ ``testing_server.error_log.ignored_msgs`` by appending to the list
+ the expected error messages.
+ """
+ # patch the error_log calls of the server instance
+ monkeypatch.setattr(raw_testing_server, 'error_log', ErrorLogMonitor())
+
+ yield raw_testing_server
+
+ # Teardown verification, in case that the server logged an
+ # error that wasn't notified to the client or we just made a mistake.
+ # pylint: disable=possibly-unused-variable
+ for c_msg, c_level, c_traceback in raw_testing_server.error_log.calls:
+ if c_level <= logging.WARNING:
+ continue
+
+ assert c_msg in raw_testing_server.error_log.ignored_msgs, (
+ 'Found error in the error log: '
+ "message = '{c_msg}', level = '{c_level}'\n"
+ '{c_traceback}'.format(**locals()),
+ )
+
+
+@pytest.fixture
+def test_client(testing_server):
+ """Get and return a test client out of the given server."""
+ return testing_server.server_client
+
+
+def header_exists(header_name, headers):
+ """Check that a header is present."""
+ return header_name.lower() in (k.lower() for (k, _) in headers)
+
+
+def header_has_value(header_name, header_value, headers):
+ """Check that a header with a given value is present."""
+ return header_name.lower() in (
+ k.lower() for (k, v) in headers
+ if v == header_value
+ )
+
+
+def test_HTTP11_persistent_connections(test_client):
+ """Test persistent HTTP/1.1 connections."""
+ # Initialize a persistent HTTP connection
+ http_connection = test_client.get_connection()
+ http_connection.auto_open = False
+ http_connection.connect()
+
+ # Make the first request and assert there's no "Connection: close".
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/pov', http_conn=http_connection,
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == pov.encode()
+ assert not header_exists('Connection', actual_headers)
+
+ # Make another request on the same connection.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/page1', http_conn=http_connection,
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == pov.encode()
+ assert not header_exists('Connection', actual_headers)
+
+ # Test client-side close.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/page2', http_conn=http_connection,
+ headers=[('Connection', 'close')],
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == pov.encode()
+ assert header_has_value('Connection', 'close', actual_headers)
+
+ # Make another request on the same connection, which should error.
+ with pytest.raises(http_client.NotConnected):
+ test_client.get('/pov', http_conn=http_connection)
+
+
+@pytest.mark.parametrize(
+ 'set_cl',
+ (
+ False, # Without Content-Length
+ True, # With Content-Length
+ ),
+)
+def test_streaming_11(test_client, set_cl):
+ """Test serving of streaming responses with HTTP/1.1 protocol."""
+ # Initialize a persistent HTTP connection
+ http_connection = test_client.get_connection()
+ http_connection.auto_open = False
+ http_connection.connect()
+
+ # Make the first request and assert there's no "Connection: close".
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/pov', http_conn=http_connection,
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == pov.encode()
+ assert not header_exists('Connection', actual_headers)
+
+ # Make another, streamed request on the same connection.
+ if set_cl:
+ # When a Content-Length is provided, the content should stream
+ # without closing the connection.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/stream?set_cl=Yes', http_conn=http_connection,
+ )
+ assert header_exists('Content-Length', actual_headers)
+ assert not header_has_value('Connection', 'close', actual_headers)
+ assert not header_exists('Transfer-Encoding', actual_headers)
+
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == b'0123456789'
+ else:
+ # When no Content-Length response header is provided,
+ # streamed output will either close the connection, or use
+ # chunked encoding, to determine transfer-length.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/stream', http_conn=http_connection,
+ )
+ assert not header_exists('Content-Length', actual_headers)
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == b'0123456789'
+
+ chunked_response = False
+ for k, v in actual_headers:
+ if k.lower() == 'transfer-encoding':
+ if str(v) == 'chunked':
+ chunked_response = True
+
+ if chunked_response:
+ assert not header_has_value('Connection', 'close', actual_headers)
+ else:
+ assert header_has_value('Connection', 'close', actual_headers)
+
+ # Make another request on the same connection, which should
+ # error.
+ with pytest.raises(http_client.NotConnected):
+ test_client.get('/pov', http_conn=http_connection)
+
+ # Try HEAD.
+ # See https://www.bitbucket.org/cherrypy/cherrypy/issue/864.
+ # TODO: figure out how can this be possible on an closed connection
+ # (chunked_response case)
+ status_line, actual_headers, actual_resp_body = test_client.head(
+ '/stream', http_conn=http_connection,
+ )
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == b''
+ assert not header_exists('Transfer-Encoding', actual_headers)
+
+
+@pytest.mark.parametrize(
+ 'set_cl',
+ (
+ False, # Without Content-Length
+ True, # With Content-Length
+ ),
+)
+def test_streaming_10(test_client, set_cl):
+ """Test serving of streaming responses with HTTP/1.0 protocol."""
+ original_server_protocol = test_client.server_instance.protocol
+ test_client.server_instance.protocol = 'HTTP/1.0'
+
+ # Initialize a persistent HTTP connection
+ http_connection = test_client.get_connection()
+ http_connection.auto_open = False
+ http_connection.connect()
+
+ # Make the first request and assert Keep-Alive.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/pov', http_conn=http_connection,
+ headers=[('Connection', 'Keep-Alive')],
+ protocol='HTTP/1.0',
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == pov.encode()
+ assert header_has_value('Connection', 'Keep-Alive', actual_headers)
+
+ # Make another, streamed request on the same connection.
+ if set_cl:
+ # When a Content-Length is provided, the content should
+ # stream without closing the connection.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/stream?set_cl=Yes', http_conn=http_connection,
+ headers=[('Connection', 'Keep-Alive')],
+ protocol='HTTP/1.0',
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == b'0123456789'
+
+ assert header_exists('Content-Length', actual_headers)
+ assert header_has_value('Connection', 'Keep-Alive', actual_headers)
+ assert not header_exists('Transfer-Encoding', actual_headers)
+ else:
+ # When a Content-Length is not provided,
+ # the server should close the connection.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/stream', http_conn=http_connection,
+ headers=[('Connection', 'Keep-Alive')],
+ protocol='HTTP/1.0',
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == b'0123456789'
+
+ assert not header_exists('Content-Length', actual_headers)
+ assert not header_has_value('Connection', 'Keep-Alive', actual_headers)
+ assert not header_exists('Transfer-Encoding', actual_headers)
+
+ # Make another request on the same connection, which should error.
+ with pytest.raises(http_client.NotConnected):
+ test_client.get(
+ '/pov', http_conn=http_connection,
+ protocol='HTTP/1.0',
+ )
+
+ test_client.server_instance.protocol = original_server_protocol
+
+
+@pytest.mark.parametrize(
+ 'http_server_protocol',
+ (
+ 'HTTP/1.0',
+ pytest.param(
+ 'HTTP/1.1',
+ marks=pytest.mark.xfail(
+ IS_PYPY and IS_CI,
+ reason='Fails under PyPy in CI for unknown reason',
+ strict=False,
+ ),
+ ),
+ ),
+)
+def test_keepalive(test_client, http_server_protocol):
+ """Test Keep-Alive enabled connections."""
+ original_server_protocol = test_client.server_instance.protocol
+ test_client.server_instance.protocol = http_server_protocol
+
+ http_client_protocol = 'HTTP/1.0'
+
+ # Initialize a persistent HTTP connection
+ http_connection = test_client.get_connection()
+ http_connection.auto_open = False
+ http_connection.connect()
+
+ # Test a normal HTTP/1.0 request.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/page2',
+ protocol=http_client_protocol,
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == pov.encode()
+ assert not header_exists('Connection', actual_headers)
+
+ # Test a keep-alive HTTP/1.0 request.
+
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/page3', headers=[('Connection', 'Keep-Alive')],
+ http_conn=http_connection, protocol=http_client_protocol,
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == pov.encode()
+ assert header_has_value('Connection', 'Keep-Alive', actual_headers)
+ assert header_has_value(
+ 'Keep-Alive',
+ 'timeout={test_client.server_instance.timeout}'.format(**locals()),
+ actual_headers,
+ )
+
+ # Remove the keep-alive header again.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/page3', http_conn=http_connection,
+ protocol=http_client_protocol,
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == pov.encode()
+ assert not header_exists('Connection', actual_headers)
+ assert not header_exists('Keep-Alive', actual_headers)
+
+ test_client.server_instance.protocol = original_server_protocol
+
+
+def test_keepalive_conn_management(test_client):
+ """Test management of Keep-Alive connections."""
+ test_client.server_instance.timeout = 2
+
+ def connection():
+ # Initialize a persistent HTTP connection
+ http_connection = test_client.get_connection()
+ http_connection.auto_open = False
+ http_connection.connect()
+ return http_connection
+
+ def request(conn, keepalive=True):
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/page3', headers=[('Connection', 'Keep-Alive')],
+ http_conn=conn, protocol='HTTP/1.0',
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == pov.encode()
+ if keepalive:
+ assert header_has_value('Connection', 'Keep-Alive', actual_headers)
+ assert header_has_value(
+ 'Keep-Alive',
+ 'timeout={test_client.server_instance.timeout}'.
+ format(**locals()),
+ actual_headers,
+ )
+ else:
+ assert not header_exists('Connection', actual_headers)
+ assert not header_exists('Keep-Alive', actual_headers)
+
+ def check_server_idle_conn_count(count, timeout=1.0):
+ deadline = time.time() + timeout
+ while True:
+ n = test_client.server_instance._connections._num_connections
+ if n == count:
+ return
+ assert time.time() <= deadline, (
+ 'idle conn count mismatch, wanted {count}, got {n}'.
+ format(**locals()),
+ )
+
+ disconnect_errors = (
+ http_client.BadStatusLine,
+ http_client.CannotSendRequest,
+ http_client.NotConnected,
+ )
+
+ # Make a new connection.
+ c1 = connection()
+ request(c1)
+ check_server_idle_conn_count(1)
+
+ # Make a second one.
+ c2 = connection()
+ request(c2)
+ check_server_idle_conn_count(2)
+
+ # Reusing the first connection should still work.
+ request(c1)
+ check_server_idle_conn_count(2)
+
+ # Creating a new connection should still work, but we should
+ # have run out of available connections to keep alive, so the
+ # server should tell us to close.
+ c3 = connection()
+ request(c3, keepalive=False)
+ check_server_idle_conn_count(2)
+
+ # Show that the third connection was closed.
+ with pytest.raises(disconnect_errors):
+ request(c3)
+ check_server_idle_conn_count(2)
+
+ # Wait for some of our timeout.
+ time.sleep(1.2)
+
+ # Refresh the second connection.
+ request(c2)
+ check_server_idle_conn_count(2)
+
+ # Wait for the remainder of our timeout, plus one tick.
+ time.sleep(1.2)
+ check_server_idle_conn_count(1)
+
+ # First connection should now be expired.
+ with pytest.raises(disconnect_errors):
+ request(c1)
+ check_server_idle_conn_count(1)
+
+ # But the second one should still be valid.
+ request(c2)
+ check_server_idle_conn_count(1)
+
+ # Restore original timeout.
+ test_client.server_instance.timeout = timeout
+
+
+@pytest.mark.parametrize(
+ ('simulated_exception', 'error_number', 'exception_leaks'),
+ (
+ pytest.param(
+ socket.error, errno.ECONNRESET, False,
+ id='socket.error(ECONNRESET)',
+ ),
+ pytest.param(
+ socket.error, errno.EPIPE, False,
+ id='socket.error(EPIPE)',
+ ),
+ pytest.param(
+ socket.error, errno.ENOTCONN, False,
+ id='simulated socket.error(ENOTCONN)',
+ ),
+ pytest.param(
+ None, # <-- don't raise an artificial exception
+ errno.ENOTCONN, False,
+ id='real socket.error(ENOTCONN)',
+ marks=pytest.mark.xfail(
+ IS_WINDOWS,
+ reason='Now reproducible this way on Windows',
+ ),
+ ),
+ pytest.param(
+ socket.error, errno.ESHUTDOWN, False,
+ id='socket.error(ESHUTDOWN)',
+ ),
+ pytest.param(RuntimeError, 666, True, id='RuntimeError(666)'),
+ pytest.param(socket.error, -1, True, id='socket.error(-1)'),
+ ) + (
+ () if six.PY2 else (
+ pytest.param(
+ ConnectionResetError, errno.ECONNRESET, False,
+ id='ConnectionResetError(ECONNRESET)',
+ ),
+ pytest.param(
+ BrokenPipeError, errno.EPIPE, False,
+ id='BrokenPipeError(EPIPE)',
+ ),
+ pytest.param(
+ BrokenPipeError, errno.ESHUTDOWN, False,
+ id='BrokenPipeError(ESHUTDOWN)',
+ ),
+ )
+ ),
+)
+def test_broken_connection_during_tcp_fin(
+ error_number, exception_leaks,
+ mocker, monkeypatch,
+ simulated_exception, test_client,
+):
+ """Test there's no traceback on broken connection during close.
+
+ It artificially causes :py:data:`~errno.ECONNRESET` /
+ :py:data:`~errno.EPIPE` / :py:data:`~errno.ESHUTDOWN` /
+ :py:data:`~errno.ENOTCONN` as well as unrelated :py:exc:`RuntimeError`
+ and :py:exc:`socket.error(-1) <socket.error>` on the server socket when
+ :py:meth:`socket.shutdown() <socket.socket.shutdown>` is called. It's
+ triggered by closing the client socket before the server had a chance
+ to respond.
+
+ The expectation is that only :py:exc:`RuntimeError` and a
+ :py:exc:`socket.error` with an unusual error code would leak.
+
+ With the :py:data:`None`-parameter, a real non-simulated
+ :py:exc:`OSError(107, 'Transport endpoint is not connected')
+ <OSError>` happens.
+ """
+ exc_instance = (
+ None if simulated_exception is None
+ else simulated_exception(error_number, 'Simulated socket error')
+ )
+ old_close_kernel_socket = (
+ test_client.server_instance.
+ ConnectionClass._close_kernel_socket
+ )
+
+ def _close_kernel_socket(self):
+ monkeypatch.setattr( # `socket.shutdown` is read-only otherwise
+ self, 'socket',
+ mocker.mock_module.Mock(wraps=self.socket),
+ )
+ if exc_instance is not None:
+ monkeypatch.setattr(
+ self.socket, 'shutdown',
+ mocker.mock_module.Mock(side_effect=exc_instance),
+ )
+ _close_kernel_socket.fin_spy = mocker.spy(self.socket, 'shutdown')
+
+ try:
+ old_close_kernel_socket(self)
+ except simulated_exception:
+ _close_kernel_socket.exception_leaked = True
+ else:
+ _close_kernel_socket.exception_leaked = False
+
+ monkeypatch.setattr(
+ test_client.server_instance.ConnectionClass,
+ '_close_kernel_socket',
+ _close_kernel_socket,
+ )
+
+ conn = test_client.get_connection()
+ conn.auto_open = False
+ conn.connect()
+ conn.send(b'GET /hello HTTP/1.1')
+ conn.send(('Host: %s' % conn.host).encode('ascii'))
+ conn.close()
+
+ # Let the server attempt TCP shutdown:
+ for _ in range(10 * (2 if IS_SLOW_ENV else 1)):
+ time.sleep(0.1)
+ if hasattr(_close_kernel_socket, 'exception_leaked'):
+ break
+
+ if exc_instance is not None: # simulated by us
+ assert _close_kernel_socket.fin_spy.spy_exception is exc_instance
+ else: # real
+ assert isinstance(
+ _close_kernel_socket.fin_spy.spy_exception, socket.error,
+ )
+ assert _close_kernel_socket.fin_spy.spy_exception.errno == error_number
+
+ assert _close_kernel_socket.exception_leaked is exception_leaks
+
+
+@pytest.mark.parametrize(
+ 'timeout_before_headers',
+ (
+ True,
+ False,
+ ),
+)
+def test_HTTP11_Timeout(test_client, timeout_before_headers):
+ """Check timeout without sending any data.
+
+ The server will close the connection with a 408.
+ """
+ conn = test_client.get_connection()
+ conn.auto_open = False
+ conn.connect()
+
+ if not timeout_before_headers:
+ # Connect but send half the headers only.
+ conn.send(b'GET /hello HTTP/1.1')
+ conn.send(('Host: %s' % conn.host).encode('ascii'))
+ # else: Connect but send nothing.
+
+ # Wait for our socket timeout
+ time.sleep(timeout * 2)
+
+ # The request should have returned 408 already.
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ assert response.status == 408
+ conn.close()
+
+
+def test_HTTP11_Timeout_after_request(test_client):
+ """Check timeout after at least one request has succeeded.
+
+ The server should close the connection without 408.
+ """
+ fail_msg = "Writing to timed out socket didn't fail as it should have: %s"
+
+ # Make an initial request
+ conn = test_client.get_connection()
+ conn.putrequest('GET', '/timeout?t=%s' % timeout, skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.endheaders()
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ assert response.status == 200
+ actual_body = response.read()
+ expected_body = str(timeout).encode()
+ assert actual_body == expected_body
+
+ # Make a second request on the same socket
+ conn._output(b'GET /hello HTTP/1.1')
+ conn._output(('Host: %s' % conn.host).encode('ascii'))
+ conn._send_output()
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ assert response.status == 200
+ actual_body = response.read()
+ expected_body = b'Hello, world!'
+ assert actual_body == expected_body
+
+ # Wait for our socket timeout
+ time.sleep(timeout * 2)
+
+ # Make another request on the same socket, which should error
+ conn._output(b'GET /hello HTTP/1.1')
+ conn._output(('Host: %s' % conn.host).encode('ascii'))
+ conn._send_output()
+ response = conn.response_class(conn.sock, method='GET')
+ try:
+ response.begin()
+ except (socket.error, http_client.BadStatusLine):
+ pass
+ except Exception as ex:
+ pytest.fail(fail_msg % ex)
+ else:
+ if response.status != 408:
+ pytest.fail(fail_msg % response.read())
+
+ conn.close()
+
+ # Make another request on a new socket, which should work
+ conn = test_client.get_connection()
+ conn.putrequest('GET', '/pov', skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.endheaders()
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ assert response.status == 200
+ actual_body = response.read()
+ expected_body = pov.encode()
+ assert actual_body == expected_body
+
+ # Make another request on the same socket,
+ # but timeout on the headers
+ conn.send(b'GET /hello HTTP/1.1')
+ # Wait for our socket timeout
+ time.sleep(timeout * 2)
+ response = conn.response_class(conn.sock, method='GET')
+ try:
+ response.begin()
+ except (socket.error, http_client.BadStatusLine):
+ pass
+ except Exception as ex:
+ pytest.fail(fail_msg % ex)
+ else:
+ if response.status != 408:
+ pytest.fail(fail_msg % response.read())
+
+ conn.close()
+
+ # Retry the request on a new connection, which should work
+ conn = test_client.get_connection()
+ conn.putrequest('GET', '/pov', skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.endheaders()
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ assert response.status == 200
+ actual_body = response.read()
+ expected_body = pov.encode()
+ assert actual_body == expected_body
+ conn.close()
+
+
+def test_HTTP11_pipelining(test_client):
+ """Test HTTP/1.1 pipelining.
+
+ :py:mod:`http.client` doesn't support this directly.
+ """
+ conn = test_client.get_connection()
+
+ # Put request 1
+ conn.putrequest('GET', '/hello', skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.endheaders()
+
+ for trial in range(5):
+ # Put next request
+ conn._output(
+ ('GET /hello?%s HTTP/1.1' % trial).encode('iso-8859-1'),
+ )
+ conn._output(('Host: %s' % conn.host).encode('ascii'))
+ conn._send_output()
+
+ # Retrieve previous response
+ response = conn.response_class(conn.sock, method='GET')
+ # there is a bug in python3 regarding the buffering of
+ # ``conn.sock``. Until that bug get's fixed we will
+ # monkey patch the ``response`` instance.
+ # https://bugs.python.org/issue23377
+ if not six.PY2:
+ response.fp = conn.sock.makefile('rb', 0)
+ response.begin()
+ body = response.read(13)
+ assert response.status == 200
+ assert body == b'Hello, world!'
+
+ # Retrieve final response
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ body = response.read()
+ assert response.status == 200
+ assert body == b'Hello, world!'
+
+ conn.close()
+
+
+def test_100_Continue(test_client):
+ """Test 100-continue header processing."""
+ conn = test_client.get_connection()
+
+ # Try a page without an Expect request header first.
+ # Note that http.client's response.begin automatically ignores
+ # 100 Continue responses, so we must manually check for it.
+ conn.putrequest('POST', '/upload', skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.putheader('Content-Type', 'text/plain')
+ conn.putheader('Content-Length', '4')
+ conn.endheaders()
+ conn.send(b"d'oh")
+ response = conn.response_class(conn.sock, method='POST')
+ _version, status, _reason = response._read_status()
+ assert status != 100
+ conn.close()
+
+ # Now try a page with an Expect header...
+ conn.connect()
+ conn.putrequest('POST', '/upload', skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.putheader('Content-Type', 'text/plain')
+ conn.putheader('Content-Length', '17')
+ conn.putheader('Expect', '100-continue')
+ conn.endheaders()
+ response = conn.response_class(conn.sock, method='POST')
+
+ # ...assert and then skip the 100 response
+ version, status, reason = response._read_status()
+ assert status == 100
+ while True:
+ line = response.fp.readline().strip()
+ if line:
+ pytest.fail(
+ '100 Continue should not output any headers. Got %r' %
+ line,
+ )
+ else:
+ break
+
+ # ...send the body
+ body = b'I am a small file'
+ conn.send(body)
+
+ # ...get the final response
+ response.begin()
+ status_line, _actual_headers, actual_resp_body = webtest.shb(response)
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ expected_resp_body = ("thanks for '%s'" % body).encode()
+ assert actual_resp_body == expected_resp_body
+ conn.close()
+
+
+@pytest.mark.parametrize(
+ 'max_request_body_size',
+ (
+ 0,
+ 1001,
+ ),
+)
+def test_readall_or_close(test_client, max_request_body_size):
+ """Test a max_request_body_size of 0 (the default) and 1001."""
+ old_max = test_client.server_instance.max_request_body_size
+
+ test_client.server_instance.max_request_body_size = max_request_body_size
+
+ conn = test_client.get_connection()
+
+ # Get a POST page with an error
+ conn.putrequest('POST', '/err_before_read', skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.putheader('Content-Type', 'text/plain')
+ conn.putheader('Content-Length', '1000')
+ conn.putheader('Expect', '100-continue')
+ conn.endheaders()
+ response = conn.response_class(conn.sock, method='POST')
+
+ # ...assert and then skip the 100 response
+ _version, status, _reason = response._read_status()
+ assert status == 100
+ skip = True
+ while skip:
+ skip = response.fp.readline().strip()
+
+ # ...send the body
+ conn.send(b'x' * 1000)
+
+ # ...get the final response
+ response.begin()
+ status_line, _actual_headers, actual_resp_body = webtest.shb(response)
+ actual_status = int(status_line[:3])
+ assert actual_status == 500
+
+ # Now try a working page with an Expect header...
+ conn._output(b'POST /upload HTTP/1.1')
+ conn._output(('Host: %s' % conn.host).encode('ascii'))
+ conn._output(b'Content-Type: text/plain')
+ conn._output(b'Content-Length: 17')
+ conn._output(b'Expect: 100-continue')
+ conn._send_output()
+ response = conn.response_class(conn.sock, method='POST')
+
+ # ...assert and then skip the 100 response
+ version, status, reason = response._read_status()
+ assert status == 100
+ skip = True
+ while skip:
+ skip = response.fp.readline().strip()
+
+ # ...send the body
+ body = b'I am a small file'
+ conn.send(body)
+
+ # ...get the final response
+ response.begin()
+ status_line, actual_headers, actual_resp_body = webtest.shb(response)
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ expected_resp_body = ("thanks for '%s'" % body).encode()
+ assert actual_resp_body == expected_resp_body
+ conn.close()
+
+ test_client.server_instance.max_request_body_size = old_max
+
+
+def test_No_Message_Body(test_client):
+ """Test HTTP queries with an empty response body."""
+ # Initialize a persistent HTTP connection
+ http_connection = test_client.get_connection()
+ http_connection.auto_open = False
+ http_connection.connect()
+
+ # Make the first request and assert there's no "Connection: close".
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/pov', http_conn=http_connection,
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ assert actual_resp_body == pov.encode()
+ assert not header_exists('Connection', actual_headers)
+
+ # Make a 204 request on the same connection.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/custom/204', http_conn=http_connection,
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 204
+ assert not header_exists('Content-Length', actual_headers)
+ assert actual_resp_body == b''
+ assert not header_exists('Connection', actual_headers)
+
+ # Make a 304 request on the same connection.
+ status_line, actual_headers, actual_resp_body = test_client.get(
+ '/custom/304', http_conn=http_connection,
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == 304
+ assert not header_exists('Content-Length', actual_headers)
+ assert actual_resp_body == b''
+ assert not header_exists('Connection', actual_headers)
+
+
+@pytest.mark.xfail(
+ reason=unwrap(
+ trim("""
+ Headers from earlier request leak into the request
+ line for a subsequent request, resulting in 400
+ instead of 413. See cherrypy/cheroot#69 for details.
+ """),
+ ),
+)
+def test_Chunked_Encoding(test_client):
+ """Test HTTP uploads with chunked transfer-encoding."""
+ # Initialize a persistent HTTP connection
+ conn = test_client.get_connection()
+
+ # Try a normal chunked request (with extensions)
+ body = (
+ b'8;key=value\r\nxx\r\nxxxx\r\n5\r\nyyyyy\r\n0\r\n'
+ b'Content-Type: application/json\r\n'
+ b'\r\n'
+ )
+ conn.putrequest('POST', '/upload', skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.putheader('Transfer-Encoding', 'chunked')
+ conn.putheader('Trailer', 'Content-Type')
+ # Note that this is somewhat malformed:
+ # we shouldn't be sending Content-Length.
+ # RFC 2616 says the server should ignore it.
+ conn.putheader('Content-Length', '3')
+ conn.endheaders()
+ conn.send(body)
+ response = conn.getresponse()
+ status_line, _actual_headers, actual_resp_body = webtest.shb(response)
+ actual_status = int(status_line[:3])
+ assert actual_status == 200
+ assert status_line[4:] == 'OK'
+ expected_resp_body = ("thanks for '%s'" % b'xx\r\nxxxxyyyyy').encode()
+ assert actual_resp_body == expected_resp_body
+
+ # Try a chunked request that exceeds server.max_request_body_size.
+ # Note that the delimiters and trailer are included.
+ body = b'\r\n'.join((b'3e3', b'x' * 995, b'0', b'', b''))
+ conn.putrequest('POST', '/upload', skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.putheader('Transfer-Encoding', 'chunked')
+ conn.putheader('Content-Type', 'text/plain')
+ # Chunked requests don't need a content-length
+ # conn.putheader("Content-Length", len(body))
+ conn.endheaders()
+ conn.send(body)
+ response = conn.getresponse()
+ status_line, actual_headers, actual_resp_body = webtest.shb(response)
+ actual_status = int(status_line[:3])
+ assert actual_status == 413
+ conn.close()
+
+
+def test_Content_Length_in(test_client):
+ """Try a non-chunked request where Content-Length exceeds limit.
+
+ (server.max_request_body_size).
+ Assert error before body send.
+ """
+ # Initialize a persistent HTTP connection
+ conn = test_client.get_connection()
+
+ conn.putrequest('POST', '/upload', skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.putheader('Content-Type', 'text/plain')
+ conn.putheader('Content-Length', '9999')
+ conn.endheaders()
+ response = conn.getresponse()
+ status_line, _actual_headers, actual_resp_body = webtest.shb(response)
+ actual_status = int(status_line[:3])
+ assert actual_status == 413
+ expected_resp_body = (
+ b'The entity sent with the request exceeds '
+ b'the maximum allowed bytes.'
+ )
+ assert actual_resp_body == expected_resp_body
+ conn.close()
+
+
+def test_Content_Length_not_int(test_client):
+ """Test that malicious Content-Length header returns 400."""
+ status_line, _actual_headers, actual_resp_body = test_client.post(
+ '/upload',
+ headers=[
+ ('Content-Type', 'text/plain'),
+ ('Content-Length', 'not-an-integer'),
+ ],
+ )
+ actual_status = int(status_line[:3])
+
+ assert actual_status == 400
+ assert actual_resp_body == b'Malformed Content-Length Header.'
+
+
+@pytest.mark.parametrize(
+ ('uri', 'expected_resp_status', 'expected_resp_body'),
+ (
+ (
+ '/wrong_cl_buffered', 500,
+ (
+ b'The requested resource returned more bytes than the '
+ b'declared Content-Length.'
+ ),
+ ),
+ ('/wrong_cl_unbuffered', 200, b'I too'),
+ ),
+)
+def test_Content_Length_out(
+ test_client,
+ uri, expected_resp_status, expected_resp_body,
+):
+ """Test response with Content-Length less than the response body.
+
+ (non-chunked response)
+ """
+ conn = test_client.get_connection()
+ conn.putrequest('GET', uri, skip_host=True)
+ conn.putheader('Host', conn.host)
+ conn.endheaders()
+
+ response = conn.getresponse()
+ status_line, _actual_headers, actual_resp_body = webtest.shb(response)
+ actual_status = int(status_line[:3])
+
+ assert actual_status == expected_resp_status
+ assert actual_resp_body == expected_resp_body
+
+ conn.close()
+
+ # the server logs the exception that we had verified from the
+ # client perspective. Tell the error_log verification that
+ # it can ignore that message.
+ test_client.server_instance.error_log.ignored_msgs.extend((
+ # Python 3.7+:
+ "ValueError('Response body exceeds the declared Content-Length.')",
+ # Python 2.7-3.6 (macOS?):
+ "ValueError('Response body exceeds the declared Content-Length.',)",
+ ))
+
+
+@pytest.mark.xfail(
+ reason='Sometimes this test fails due to low timeout. '
+ 'Ref: https://github.com/cherrypy/cherrypy/issues/598',
+)
+def test_598(test_client):
+ """Test serving large file with a read timeout in place."""
+ # Initialize a persistent HTTP connection
+ conn = test_client.get_connection()
+ remote_data_conn = urllib.request.urlopen(
+ '%s://%s:%s/one_megabyte_of_a'
+ % ('http', conn.host, conn.port),
+ )
+ buf = remote_data_conn.read(512)
+ time.sleep(timeout * 0.6)
+ remaining = (1024 * 1024) - 512
+ while remaining:
+ data = remote_data_conn.read(remaining)
+ if not data:
+ break
+ buf += data
+ remaining -= len(data)
+
+ assert len(buf) == 1024 * 1024
+ assert buf == b'a' * 1024 * 1024
+ assert remaining == 0
+ remote_data_conn.close()
+
+
+@pytest.mark.parametrize(
+ 'invalid_terminator',
+ (
+ b'\n\n',
+ b'\r\n\n',
+ ),
+)
+def test_No_CRLF(test_client, invalid_terminator):
+ """Test HTTP queries with no valid CRLF terminators."""
+ # Initialize a persistent HTTP connection
+ conn = test_client.get_connection()
+
+ # (b'%s' % b'') is not supported in Python 3.4, so just use bytes.join()
+ conn.send(b''.join((b'GET /hello HTTP/1.1', invalid_terminator)))
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ actual_resp_body = response.read()
+ expected_resp_body = b'HTTP requires CRLF terminators'
+ assert actual_resp_body == expected_resp_body
+ conn.close()
+
+
+class FaultySelect:
+ """Mock class to insert errors in the selector.select method."""
+
+ def __init__(self, original_select):
+ """Initilize helper class to wrap the selector.select method."""
+ self.original_select = original_select
+ self.request_served = False
+ self.os_error_triggered = False
+
+ def __call__(self, timeout):
+ """Intercept the calls to selector.select."""
+ if self.request_served:
+ self.os_error_triggered = True
+ raise OSError('Error while selecting the client socket.')
+
+ return self.original_select(timeout)
+
+
+class FaultyGetMap:
+ """Mock class to insert errors in the selector.get_map method."""
+
+ def __init__(self, original_get_map):
+ """Initilize helper class to wrap the selector.get_map method."""
+ self.original_get_map = original_get_map
+ self.sabotage_conn = False
+ self.conn_closed = False
+
+ def __call__(self):
+ """Intercept the calls to selector.get_map."""
+ sabotage_targets = (
+ conn for _, (_, _, _, conn) in self.original_get_map().items()
+ if isinstance(conn, cheroot.server.HTTPConnection)
+ ) if self.sabotage_conn and not self.conn_closed else ()
+
+ for conn in sabotage_targets:
+ # close the socket to cause OSError
+ conn.close()
+ self.conn_closed = True
+
+ return self.original_get_map()
+
+
+def test_invalid_selected_connection(test_client, monkeypatch):
+ """Test the error handling segment of HTTP connection selection.
+
+ See :py:meth:`cheroot.connections.ConnectionManager.get_conn`.
+ """
+ # patch the select method
+ faux_select = FaultySelect(
+ test_client.server_instance._connections._selector.select,
+ )
+ monkeypatch.setattr(
+ test_client.server_instance._connections._selector,
+ 'select',
+ faux_select,
+ )
+
+ # patch the get_map method
+ faux_get_map = FaultyGetMap(
+ test_client.server_instance._connections._selector._selector.get_map,
+ )
+
+ monkeypatch.setattr(
+ test_client.server_instance._connections._selector._selector,
+ 'get_map',
+ faux_get_map,
+ )
+
+ # request a page with connection keep-alive to make sure
+ # we'll have a connection to be modified.
+ resp_status, _resp_headers, _resp_body = test_client.request(
+ '/page1', headers=[('Connection', 'Keep-Alive')],
+ )
+
+ assert resp_status == '200 OK'
+ # trigger the internal errors
+ faux_get_map.sabotage_conn = faux_select.request_served = True
+ # give time to make sure the error gets handled
+ time.sleep(test_client.server_instance.expiration_interval * 2)
+ assert faux_select.os_error_triggered
+ assert faux_get_map.conn_closed
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_core.py b/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_core.py
new file mode 100644
index 0000000000000000000000000000000000000000..685fe971ec29ee6c0f215cc8a00ad11769ec3d83
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_core.py
@@ -0,0 +1,454 @@
+"""Tests for managing HTTP issues (malformed requests, etc)."""
+# -*- coding: utf-8 -*-
+# vim: set fileencoding=utf-8 :
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import errno
+import socket
+
+import pytest
+import six
+from six.moves import urllib
+
+from cheroot.test import helper
+
+
+HTTP_BAD_REQUEST = 400
+HTTP_LENGTH_REQUIRED = 411
+HTTP_NOT_FOUND = 404
+HTTP_REQUEST_ENTITY_TOO_LARGE = 413
+HTTP_OK = 200
+HTTP_VERSION_NOT_SUPPORTED = 505
+
+
+class HelloController(helper.Controller):
+ """Controller for serving WSGI apps."""
+
+ def hello(req, resp):
+ """Render Hello world."""
+ return 'Hello world!'
+
+ def body_required(req, resp):
+ """Render Hello world or set 411."""
+ if req.environ.get('Content-Length', None) is None:
+ resp.status = '411 Length Required'
+ return
+ return 'Hello world!'
+
+ def query_string(req, resp):
+ """Render QUERY_STRING value."""
+ return req.environ.get('QUERY_STRING', '')
+
+ def asterisk(req, resp):
+ """Render request method value."""
+ # pylint: disable=possibly-unused-variable
+ method = req.environ.get('REQUEST_METHOD', 'NO METHOD FOUND')
+ tmpl = 'Got asterisk URI path with {method} method'
+ return tmpl.format(**locals())
+
+ def _munge(string):
+ """Encode PATH_INFO correctly depending on Python version.
+
+ WSGI 1.0 is a mess around unicode. Create endpoints
+ that match the PATH_INFO that it produces.
+ """
+ if six.PY2:
+ return string
+ return string.encode('utf-8').decode('latin-1')
+
+ handlers = {
+ '/hello': hello,
+ '/no_body': hello,
+ '/body_required': body_required,
+ '/query_string': query_string,
+ _munge('/привіт'): hello,
+ _munge('/Юххууу'): hello,
+ '/\xa0Ðblah key 0 900 4 data': hello,
+ '/*': asterisk,
+ }
+
+
+def _get_http_response(connection, method='GET'):
+ c = connection
+ kwargs = {'strict': c.strict} if hasattr(c, 'strict') else {}
+ # Python 3.2 removed the 'strict' feature, saying:
+ # "http.client now always assumes HTTP/1.x compliant servers."
+ return c.response_class(c.sock, method=method, **kwargs)
+
+
+@pytest.fixture
+def testing_server(wsgi_server_client):
+ """Attach a WSGI app to the given server and preconfigure it."""
+ wsgi_server = wsgi_server_client.server_instance
+ wsgi_server.wsgi_app = HelloController()
+ wsgi_server.max_request_body_size = 30000000
+ wsgi_server.server_client = wsgi_server_client
+ return wsgi_server
+
+
+@pytest.fixture
+def test_client(testing_server):
+ """Get and return a test client out of the given server."""
+ return testing_server.server_client
+
+
+@pytest.fixture
+def testing_server_with_defaults(wsgi_server_client):
+ """Attach a WSGI app to the given server and preconfigure it."""
+ wsgi_server = wsgi_server_client.server_instance
+ wsgi_server.wsgi_app = HelloController()
+ wsgi_server.server_client = wsgi_server_client
+ return wsgi_server
+
+
+@pytest.fixture
+def test_client_with_defaults(testing_server_with_defaults):
+ """Get and return a test client out of the given server."""
+ return testing_server_with_defaults.server_client
+
+
+def test_http_connect_request(test_client):
+ """Check that CONNECT query results in Method Not Allowed status."""
+ status_line = test_client.connect('/anything')[0]
+ actual_status = int(status_line[:3])
+ assert actual_status == 405
+
+
+def test_normal_request(test_client):
+ """Check that normal GET query succeeds."""
+ status_line, _, actual_resp_body = test_client.get('/hello')
+ actual_status = int(status_line[:3])
+ assert actual_status == HTTP_OK
+ assert actual_resp_body == b'Hello world!'
+
+
+def test_query_string_request(test_client):
+ """Check that GET param is parsed well."""
+ status_line, _, actual_resp_body = test_client.get(
+ '/query_string?test=True',
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == HTTP_OK
+ assert actual_resp_body == b'test=True'
+
+
+@pytest.mark.parametrize(
+ 'uri',
+ (
+ '/hello', # plain
+ '/query_string?test=True', # query
+ '/{0}?{1}={2}'.format( # quoted unicode
+ *map(urllib.parse.quote, ('Юххууу', 'ї', 'йо'))
+ ),
+ ),
+)
+def test_parse_acceptable_uri(test_client, uri):
+ """Check that server responds with OK to valid GET queries."""
+ status_line = test_client.get(uri)[0]
+ actual_status = int(status_line[:3])
+ assert actual_status == HTTP_OK
+
+
+@pytest.mark.xfail(six.PY2, reason='Fails on Python 2')
+def test_parse_uri_unsafe_uri(test_client):
+ """Test that malicious URI does not allow HTTP injection.
+
+ This effectively checks that sending GET request with URL
+
+ /%A0%D0blah%20key%200%20900%204%20data
+
+ is not converted into
+
+ GET /
+ blah key 0 900 4 data
+ HTTP/1.1
+
+ which would be a security issue otherwise.
+ """
+ c = test_client.get_connection()
+ resource = '/\xa0Ðblah key 0 900 4 data'.encode('latin-1')
+ quoted = urllib.parse.quote(resource)
+ assert quoted == '/%A0%D0blah%20key%200%20900%204%20data'
+ request = 'GET {quoted} HTTP/1.1'.format(**locals())
+ c._output(request.encode('utf-8'))
+ c._send_output()
+ response = _get_http_response(c, method='GET')
+ response.begin()
+ assert response.status == HTTP_OK
+ assert response.read(12) == b'Hello world!'
+ c.close()
+
+
+def test_parse_uri_invalid_uri(test_client):
+ """Check that server responds with Bad Request to invalid GET queries.
+
+ Invalid request line test case: it should only contain US-ASCII.
+ """
+ c = test_client.get_connection()
+ c._output(u'GET /йопта! HTTP/1.1'.encode('utf-8'))
+ c._send_output()
+ response = _get_http_response(c, method='GET')
+ response.begin()
+ assert response.status == HTTP_BAD_REQUEST
+ assert response.read(21) == b'Malformed Request-URI'
+ c.close()
+
+
+@pytest.mark.parametrize(
+ 'uri',
+ (
+ 'hello', # ascii
+ 'привіт', # non-ascii
+ ),
+)
+def test_parse_no_leading_slash_invalid(test_client, uri):
+ """Check that server responds with Bad Request to invalid GET queries.
+
+ Invalid request line test case: it should have leading slash (be absolute).
+ """
+ status_line, _, actual_resp_body = test_client.get(
+ urllib.parse.quote(uri),
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == HTTP_BAD_REQUEST
+ assert b'starting with a slash' in actual_resp_body
+
+
+def test_parse_uri_absolute_uri(test_client):
+ """Check that server responds with Bad Request to Absolute URI.
+
+ Only proxy servers should allow this.
+ """
+ status_line, _, actual_resp_body = test_client.get('http://google.com/')
+ actual_status = int(status_line[:3])
+ assert actual_status == HTTP_BAD_REQUEST
+ expected_body = b'Absolute URI not allowed if server is not a proxy.'
+ assert actual_resp_body == expected_body
+
+
+def test_parse_uri_asterisk_uri(test_client):
+ """Check that server responds with OK to OPTIONS with "*" Absolute URI."""
+ status_line, _, actual_resp_body = test_client.options('*')
+ actual_status = int(status_line[:3])
+ assert actual_status == HTTP_OK
+ expected_body = b'Got asterisk URI path with OPTIONS method'
+ assert actual_resp_body == expected_body
+
+
+def test_parse_uri_fragment_uri(test_client):
+ """Check that server responds with Bad Request to URI with fragment."""
+ status_line, _, actual_resp_body = test_client.get(
+ '/hello?test=something#fake',
+ )
+ actual_status = int(status_line[:3])
+ assert actual_status == HTTP_BAD_REQUEST
+ expected_body = b'Illegal #fragment in Request-URI.'
+ assert actual_resp_body == expected_body
+
+
+def test_no_content_length(test_client):
+ """Test POST query with an empty body being successful."""
+ # "The presence of a message-body in a request is signaled by the
+ # inclusion of a Content-Length or Transfer-Encoding header field in
+ # the request's message-headers."
+ #
+ # Send a message with neither header and no body.
+ c = test_client.get_connection()
+ c.request('POST', '/no_body')
+ response = c.getresponse()
+ actual_resp_body = response.read()
+ actual_status = response.status
+ assert actual_status == HTTP_OK
+ assert actual_resp_body == b'Hello world!'
+
+
+def test_content_length_required(test_client):
+ """Test POST query with body failing because of missing Content-Length."""
+ # Now send a message that has no Content-Length, but does send a body.
+ # Verify that CP times out the socket and responds
+ # with 411 Length Required.
+
+ c = test_client.get_connection()
+ c.request('POST', '/body_required')
+ response = c.getresponse()
+ response.read()
+
+ actual_status = response.status
+ assert actual_status == HTTP_LENGTH_REQUIRED
+
+
+@pytest.mark.xfail(
+ reason='https://github.com/cherrypy/cheroot/issues/106',
+ strict=False, # sometimes it passes
+)
+def test_large_request(test_client_with_defaults):
+ """Test GET query with maliciously large Content-Length."""
+ # If the server's max_request_body_size is not set (i.e. is set to 0)
+ # then this will result in an `OverflowError: Python int too large to
+ # convert to C ssize_t` in the server.
+ # We expect that this should instead return that the request is too
+ # large.
+ c = test_client_with_defaults.get_connection()
+ c.putrequest('GET', '/hello')
+ c.putheader('Content-Length', str(2**64))
+ c.endheaders()
+
+ response = c.getresponse()
+ actual_status = response.status
+
+ assert actual_status == HTTP_REQUEST_ENTITY_TOO_LARGE
+
+
+@pytest.mark.parametrize(
+ ('request_line', 'status_code', 'expected_body'),
+ (
+ (
+ b'GET /', # missing proto
+ HTTP_BAD_REQUEST, b'Malformed Request-Line',
+ ),
+ (
+ b'GET / HTTPS/1.1', # invalid proto
+ HTTP_BAD_REQUEST, b'Malformed Request-Line: bad protocol',
+ ),
+ (
+ b'GET / HTTP/1', # invalid version
+ HTTP_BAD_REQUEST, b'Malformed Request-Line: bad version',
+ ),
+ (
+ b'GET / HTTP/2.15', # invalid ver
+ HTTP_VERSION_NOT_SUPPORTED, b'Cannot fulfill request',
+ ),
+ ),
+)
+def test_malformed_request_line(
+ test_client, request_line,
+ status_code, expected_body,
+):
+ """Test missing or invalid HTTP version in Request-Line."""
+ c = test_client.get_connection()
+ c._output(request_line)
+ c._send_output()
+ response = _get_http_response(c, method='GET')
+ response.begin()
+ assert response.status == status_code
+ assert response.read(len(expected_body)) == expected_body
+ c.close()
+
+
+def test_malformed_http_method(test_client):
+ """Test non-uppercase HTTP method."""
+ c = test_client.get_connection()
+ c.putrequest('GeT', '/malformed_method_case')
+ c.putheader('Content-Type', 'text/plain')
+ c.endheaders()
+
+ response = c.getresponse()
+ actual_status = response.status
+ assert actual_status == HTTP_BAD_REQUEST
+ actual_resp_body = response.read(21)
+ assert actual_resp_body == b'Malformed method name'
+
+
+def test_malformed_header(test_client):
+ """Check that broken HTTP header results in Bad Request."""
+ c = test_client.get_connection()
+ c.putrequest('GET', '/')
+ c.putheader('Content-Type', 'text/plain')
+ # See https://www.bitbucket.org/cherrypy/cherrypy/issue/941
+ c._output(b'Re, 1.2.3.4#015#012')
+ c.endheaders()
+
+ response = c.getresponse()
+ actual_status = response.status
+ assert actual_status == HTTP_BAD_REQUEST
+ actual_resp_body = response.read(20)
+ assert actual_resp_body == b'Illegal header line.'
+
+
+def test_request_line_split_issue_1220(test_client):
+ """Check that HTTP request line of exactly 256 chars length is OK."""
+ Request_URI = (
+ '/hello?'
+ 'intervenant-entreprise-evenement_classaction='
+ 'evenement-mailremerciements'
+ '&_path=intervenant-entreprise-evenement'
+ '&intervenant-entreprise-evenement_action-id=19404'
+ '&intervenant-entreprise-evenement_id=19404'
+ '&intervenant-entreprise_id=28092'
+ )
+ assert len('GET %s HTTP/1.1\r\n' % Request_URI) == 256
+
+ actual_resp_body = test_client.get(Request_URI)[2]
+ assert actual_resp_body == b'Hello world!'
+
+
+def test_garbage_in(test_client):
+ """Test that server sends an error for garbage received over TCP."""
+ # Connect without SSL regardless of server.scheme
+
+ c = test_client.get_connection()
+ c._output(b'gjkgjklsgjklsgjkljklsg')
+ c._send_output()
+ response = c.response_class(c.sock, method='GET')
+ try:
+ response.begin()
+ actual_status = response.status
+ assert actual_status == HTTP_BAD_REQUEST
+ actual_resp_body = response.read(22)
+ assert actual_resp_body == b'Malformed Request-Line'
+ c.close()
+ except socket.error as ex:
+ # "Connection reset by peer" is also acceptable.
+ if ex.errno != errno.ECONNRESET:
+ raise
+
+
+class CloseController:
+ """Controller for testing the close callback."""
+
+ def __call__(self, environ, start_response):
+ """Get the req to know header sent status."""
+ self.req = start_response.__self__.req
+ resp = CloseResponse(self.close)
+ start_response(resp.status, resp.headers.items())
+ return resp
+
+ def close(self):
+ """Close, writing hello."""
+ self.req.write(b'hello')
+
+
+class CloseResponse:
+ """Dummy empty response to trigger the no body status."""
+
+ def __init__(self, close):
+ """Use some defaults to ensure we have a header."""
+ self.status = '200 OK'
+ self.headers = {'Content-Type': 'text/html'}
+ self.close = close
+
+ def __getitem__(self, index):
+ """Ensure we don't have a body."""
+ raise IndexError()
+
+ def output(self):
+ """Return self to hook the close method."""
+ return self
+
+
+@pytest.fixture
+def testing_server_close(wsgi_server_client):
+ """Attach a WSGI app to the given server and preconfigure it."""
+ wsgi_server = wsgi_server_client.server_instance
+ wsgi_server.wsgi_app = CloseController()
+ wsgi_server.max_request_body_size = 30000000
+ wsgi_server.server_client = wsgi_server_client
+ return wsgi_server
+
+
+def test_send_header_before_closing(testing_server_close):
+ """Test we are actually sending the headers before calling 'close'."""
+ _, _, resp_body = testing_server_close.server_client.get('/')
+ assert resp_body == b'hello'
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_dispatch.py b/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_dispatch.py
new file mode 100644
index 0000000000000000000000000000000000000000..9974fdabc134e8a7fbfa8b30141f9187ea7d1c1f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_dispatch.py
@@ -0,0 +1,55 @@
+"""Tests for the HTTP server."""
+# -*- coding: utf-8 -*-
+# vim: set fileencoding=utf-8 :
+
+from __future__ import absolute_import, division, print_function
+
+from cheroot.wsgi import PathInfoDispatcher
+
+
+def wsgi_invoke(app, environ):
+ """Serve 1 request from a WSGI application."""
+ response = {}
+
+ def start_response(status, headers):
+ response.update({
+ 'status': status,
+ 'headers': headers,
+ })
+
+ response['body'] = b''.join(
+ app(environ, start_response),
+ )
+
+ return response
+
+
+def test_dispatch_no_script_name():
+ """Dispatch despite lack of ``SCRIPT_NAME`` in environ."""
+ # Bare bones WSGI hello world app (from PEP 333).
+ def app(environ, start_response):
+ start_response(
+ '200 OK', [
+ ('Content-Type', 'text/plain; charset=utf-8'),
+ ],
+ )
+ return [u'Hello, world!'.encode('utf-8')]
+
+ # Build a dispatch table.
+ d = PathInfoDispatcher([
+ ('/', app),
+ ])
+
+ # Dispatch a request without `SCRIPT_NAME`.
+ response = wsgi_invoke(
+ d, {
+ 'PATH_INFO': '/foo',
+ },
+ )
+ assert response == {
+ 'status': '200 OK',
+ 'headers': [
+ ('Content-Type', 'text/plain; charset=utf-8'),
+ ],
+ 'body': b'Hello, world!',
+ }
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_errors.py b/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_errors.py
new file mode 100644
index 0000000000000000000000000000000000000000..a3eee67b9d76c3960836bb57fe3b99a89740d43b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_errors.py
@@ -0,0 +1,30 @@
+"""Test suite for ``cheroot.errors``."""
+
+import pytest
+
+from cheroot import errors
+
+from .._compat import IS_LINUX, IS_MACOS, IS_WINDOWS # noqa: WPS130
+
+
+@pytest.mark.parametrize(
+ ('err_names', 'err_nums'),
+ (
+ (('', 'some-nonsense-name'), []),
+ (
+ (
+ 'EPROTOTYPE', 'EAGAIN', 'EWOULDBLOCK',
+ 'WSAEWOULDBLOCK', 'EPIPE',
+ ),
+ (91, 11, 32) if IS_LINUX else
+ (32, 35, 41) if IS_MACOS else
+ (32, 10041, 11, 10035) if IS_WINDOWS else
+ (),
+ ),
+ ),
+)
+def test_plat_specific_errors(err_names, err_nums):
+ """Test that ``plat_specific_errors`` gets correct error numbers list."""
+ actual_err_nums = errors.plat_specific_errors(*err_names)
+ assert len(actual_err_nums) == len(err_nums)
+ assert sorted(actual_err_nums) == sorted(err_nums)
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_makefile.py b/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_makefile.py
new file mode 100644
index 0000000000000000000000000000000000000000..cdded07e87092401793d765eb8e19e745766d94b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_makefile.py
@@ -0,0 +1,52 @@
+"""Tests for :py:mod:`cheroot.makefile`."""
+
+from cheroot import makefile
+
+
+__metaclass__ = type
+
+
+class MockSocket:
+ """A mock socket."""
+
+ def __init__(self):
+ """Initialize :py:class:`MockSocket`."""
+ self.messages = []
+
+ def recv_into(self, buf):
+ """Simulate ``recv_into`` for Python 3."""
+ if not self.messages:
+ return 0
+ msg = self.messages.pop(0)
+ for index, byte in enumerate(msg):
+ buf[index] = byte
+ return len(msg)
+
+ def recv(self, size):
+ """Simulate ``recv`` for Python 2."""
+ try:
+ return self.messages.pop(0)
+ except IndexError:
+ return ''
+
+ def send(self, val):
+ """Simulate a send."""
+ return len(val)
+
+
+def test_bytes_read():
+ """Reader should capture bytes read."""
+ sock = MockSocket()
+ sock.messages.append(b'foo')
+ rfile = makefile.MakeFile(sock, 'r')
+ rfile.read()
+ assert rfile.bytes_read == 3
+
+
+def test_bytes_written():
+ """Writer should capture bytes written."""
+ sock = MockSocket()
+ sock.messages.append(b'foo')
+ wfile = makefile.MakeFile(sock, 'w')
+ wfile.write(b'bar')
+ assert wfile.bytes_written == 3
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_server.py b/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_server.py
new file mode 100644
index 0000000000000000000000000000000000000000..8305c78c87ccdc8ff36b4496299cbe8808382959
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_server.py
@@ -0,0 +1,431 @@
+"""Tests for the HTTP server."""
+# -*- coding: utf-8 -*-
+# vim: set fileencoding=utf-8 :
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import os
+import socket
+import tempfile
+import threading
+import uuid
+
+import pytest
+import requests
+import requests_unixsocket
+import six
+
+from pypytools.gc.custom import DefaultGc
+from six.moves import queue, urllib
+
+from .._compat import bton, ntob
+from .._compat import IS_LINUX, IS_MACOS, IS_WINDOWS, SYS_PLATFORM
+from ..server import IS_UID_GID_RESOLVABLE, Gateway, HTTPServer
+from ..testing import (
+ ANY_INTERFACE_IPV4,
+ ANY_INTERFACE_IPV6,
+ EPHEMERAL_PORT,
+)
+
+
+IS_SLOW_ENV = IS_MACOS or IS_WINDOWS
+
+
+unix_only_sock_test = pytest.mark.skipif(
+ not hasattr(socket, 'AF_UNIX'),
+ reason='UNIX domain sockets are only available under UNIX-based OS',
+)
+
+
+non_macos_sock_test = pytest.mark.skipif(
+ IS_MACOS,
+ reason='Peercreds lookup does not work under macOS/BSD currently.',
+)
+
+
+@pytest.fixture(params=('abstract', 'file'))
+def unix_sock_file(request):
+ """Check that bound UNIX socket address is stored in server."""
+ name = 'unix_{request.param}_sock'.format(**locals())
+ return request.getfixturevalue(name)
+
+
+@pytest.fixture
+def unix_abstract_sock():
+ """Return an abstract UNIX socket address."""
+ if not IS_LINUX:
+ pytest.skip(
+ '{os} does not support an abstract '
+ 'socket namespace'.format(os=SYS_PLATFORM),
+ )
+ return b''.join((
+ b'\x00cheroot-test-socket',
+ ntob(str(uuid.uuid4())),
+ )).decode()
+
+
+@pytest.fixture
+def unix_file_sock():
+ """Yield a unix file socket."""
+ tmp_sock_fh, tmp_sock_fname = tempfile.mkstemp()
+
+ yield tmp_sock_fname
+
+ os.close(tmp_sock_fh)
+ os.unlink(tmp_sock_fname)
+
+
+def test_prepare_makes_server_ready():
+ """Check that prepare() makes the server ready, and stop() clears it."""
+ httpserver = HTTPServer(
+ bind_addr=(ANY_INTERFACE_IPV4, EPHEMERAL_PORT),
+ gateway=Gateway,
+ )
+
+ assert not httpserver.ready
+ assert not httpserver.requests._threads
+
+ httpserver.prepare()
+
+ assert httpserver.ready
+ assert httpserver.requests._threads
+ for thr in httpserver.requests._threads:
+ assert thr.ready
+
+ httpserver.stop()
+
+ assert not httpserver.requests._threads
+ assert not httpserver.ready
+
+
+def test_stop_interrupts_serve():
+ """Check that stop() interrupts running of serve()."""
+ httpserver = HTTPServer(
+ bind_addr=(ANY_INTERFACE_IPV4, EPHEMERAL_PORT),
+ gateway=Gateway,
+ )
+
+ httpserver.prepare()
+ serve_thread = threading.Thread(target=httpserver.serve)
+ serve_thread.start()
+
+ serve_thread.join(0.5)
+ assert serve_thread.is_alive()
+
+ httpserver.stop()
+
+ serve_thread.join(0.5)
+ assert not serve_thread.is_alive()
+
+
+@pytest.mark.parametrize(
+ 'exc_cls',
+ (
+ IOError,
+ KeyboardInterrupt,
+ OSError,
+ RuntimeError,
+ ),
+)
+def test_server_interrupt(exc_cls):
+ """Check that assigning interrupt stops the server."""
+ interrupt_msg = 'should catch {uuid!s}'.format(uuid=uuid.uuid4())
+ raise_marker_sentinel = object()
+
+ httpserver = HTTPServer(
+ bind_addr=(ANY_INTERFACE_IPV4, EPHEMERAL_PORT),
+ gateway=Gateway,
+ )
+
+ result_q = queue.Queue()
+
+ def serve_thread():
+ # ensure we catch the exception on the serve() thread
+ try:
+ httpserver.serve()
+ except exc_cls as e:
+ if str(e) == interrupt_msg:
+ result_q.put(raise_marker_sentinel)
+
+ httpserver.prepare()
+ serve_thread = threading.Thread(target=serve_thread)
+ serve_thread.start()
+
+ serve_thread.join(0.5)
+ assert serve_thread.is_alive()
+
+ # this exception is raised on the serve() thread,
+ # not in the calling context.
+ httpserver.interrupt = exc_cls(interrupt_msg)
+
+ serve_thread.join(0.5)
+ assert not serve_thread.is_alive()
+ assert result_q.get_nowait() is raise_marker_sentinel
+
+
+def test_serving_is_false_and_stop_returns_after_ctrlc():
+ """Check that stop() interrupts running of serve()."""
+ httpserver = HTTPServer(
+ bind_addr=(ANY_INTERFACE_IPV4, EPHEMERAL_PORT),
+ gateway=Gateway,
+ )
+
+ httpserver.prepare()
+
+ # Simulate a Ctrl-C on the first call to `run`.
+ def raise_keyboard_interrupt(*args, **kwargs):
+ raise KeyboardInterrupt()
+
+ httpserver._connections._selector.select = raise_keyboard_interrupt
+
+ serve_thread = threading.Thread(target=httpserver.serve)
+ serve_thread.start()
+
+ # The thread should exit right away due to the interrupt.
+ serve_thread.join(
+ httpserver.expiration_interval * (4 if IS_SLOW_ENV else 2),
+ )
+ assert not serve_thread.is_alive()
+
+ assert not httpserver._connections._serving
+ httpserver.stop()
+
+
+@pytest.mark.parametrize(
+ 'ip_addr',
+ (
+ ANY_INTERFACE_IPV4,
+ ANY_INTERFACE_IPV6,
+ ),
+)
+def test_bind_addr_inet(http_server, ip_addr):
+ """Check that bound IP address is stored in server."""
+ httpserver = http_server.send((ip_addr, EPHEMERAL_PORT))
+
+ assert httpserver.bind_addr[0] == ip_addr
+ assert httpserver.bind_addr[1] != EPHEMERAL_PORT
+
+
+@unix_only_sock_test
+def test_bind_addr_unix(http_server, unix_sock_file):
+ """Check that bound UNIX socket address is stored in server."""
+ httpserver = http_server.send(unix_sock_file)
+
+ assert httpserver.bind_addr == unix_sock_file
+
+
+@unix_only_sock_test
+def test_bind_addr_unix_abstract(http_server, unix_abstract_sock):
+ """Check that bound UNIX abstract socket address is stored in server."""
+ httpserver = http_server.send(unix_abstract_sock)
+
+ assert httpserver.bind_addr == unix_abstract_sock
+
+
+PEERCRED_IDS_URI = '/peer_creds/ids'
+PEERCRED_TEXTS_URI = '/peer_creds/texts'
+
+
+class _TestGateway(Gateway):
+ def respond(self):
+ req = self.req
+ conn = req.conn
+ req_uri = bton(req.uri)
+ if req_uri == PEERCRED_IDS_URI:
+ peer_creds = conn.peer_pid, conn.peer_uid, conn.peer_gid
+ self.send_payload('|'.join(map(str, peer_creds)))
+ return
+ elif req_uri == PEERCRED_TEXTS_URI:
+ self.send_payload('!'.join((conn.peer_user, conn.peer_group)))
+ return
+ return super(_TestGateway, self).respond()
+
+ def send_payload(self, payload):
+ req = self.req
+ req.status = b'200 OK'
+ req.ensure_headers_sent()
+ req.write(ntob(payload))
+
+
+@pytest.fixture
+def peercreds_enabled_server(http_server, unix_sock_file):
+ """Construct a test server with ``peercreds_enabled``."""
+ httpserver = http_server.send(unix_sock_file)
+ httpserver.gateway = _TestGateway
+ httpserver.peercreds_enabled = True
+ return httpserver
+
+
+@unix_only_sock_test
+@non_macos_sock_test
+def test_peercreds_unix_sock(peercreds_enabled_server):
+ """Check that ``PEERCRED`` lookup works when enabled."""
+ httpserver = peercreds_enabled_server
+ bind_addr = httpserver.bind_addr
+
+ if isinstance(bind_addr, six.binary_type):
+ bind_addr = bind_addr.decode()
+
+ # pylint: disable=possibly-unused-variable
+ quoted = urllib.parse.quote(bind_addr, safe='')
+ unix_base_uri = 'http+unix://{quoted}'.format(**locals())
+
+ expected_peercreds = os.getpid(), os.getuid(), os.getgid()
+ expected_peercreds = '|'.join(map(str, expected_peercreds))
+
+ with requests_unixsocket.monkeypatch():
+ peercreds_resp = requests.get(unix_base_uri + PEERCRED_IDS_URI)
+ peercreds_resp.raise_for_status()
+ assert peercreds_resp.text == expected_peercreds
+
+ peercreds_text_resp = requests.get(unix_base_uri + PEERCRED_TEXTS_URI)
+ assert peercreds_text_resp.status_code == 500
+
+
+@pytest.mark.skipif(
+ not IS_UID_GID_RESOLVABLE,
+ reason='Modules `grp` and `pwd` are not available '
+ 'under the current platform',
+)
+@unix_only_sock_test
+@non_macos_sock_test
+def test_peercreds_unix_sock_with_lookup(peercreds_enabled_server):
+ """Check that ``PEERCRED`` resolution works when enabled."""
+ httpserver = peercreds_enabled_server
+ httpserver.peercreds_resolve_enabled = True
+
+ bind_addr = httpserver.bind_addr
+
+ if isinstance(bind_addr, six.binary_type):
+ bind_addr = bind_addr.decode()
+
+ # pylint: disable=possibly-unused-variable
+ quoted = urllib.parse.quote(bind_addr, safe='')
+ unix_base_uri = 'http+unix://{quoted}'.format(**locals())
+
+ import grp
+ import pwd
+ expected_textcreds = (
+ pwd.getpwuid(os.getuid()).pw_name,
+ grp.getgrgid(os.getgid()).gr_name,
+ )
+ expected_textcreds = '!'.join(map(str, expected_textcreds))
+ with requests_unixsocket.monkeypatch():
+ peercreds_text_resp = requests.get(unix_base_uri + PEERCRED_TEXTS_URI)
+ peercreds_text_resp.raise_for_status()
+ assert peercreds_text_resp.text == expected_textcreds
+
+
+@pytest.mark.skipif(
+ IS_WINDOWS,
+ reason='This regression test is for a Linux bug, '
+ 'and the resource module is not available on Windows',
+)
+@pytest.mark.parametrize(
+ 'resource_limit',
+ (
+ 1024,
+ 2048,
+ ),
+ indirect=('resource_limit',),
+)
+@pytest.mark.usefixtures('many_open_sockets')
+def test_high_number_of_file_descriptors(native_server_client, resource_limit):
+ """Test the server does not crash with a high file-descriptor value.
+
+ This test shouldn't cause a server crash when trying to access
+ file-descriptor higher than 1024.
+
+ The earlier implementation used to rely on ``select()`` syscall that
+ doesn't support file descriptors with numbers higher than 1024.
+ """
+ # We want to force the server to use a file-descriptor with
+ # a number above resource_limit
+
+ # Patch the method that processes
+ _old_process_conn = native_server_client.server_instance.process_conn
+
+ def native_process_conn(conn):
+ native_process_conn.filenos.add(conn.socket.fileno())
+ return _old_process_conn(conn)
+ native_process_conn.filenos = set()
+ native_server_client.server_instance.process_conn = native_process_conn
+
+ # Trigger a crash if select() is used in the implementation
+ native_server_client.connect('/')
+
+ # Ensure that at least one connection got accepted, otherwise the
+ # follow-up check wouldn't make sense
+ assert len(native_process_conn.filenos) > 0
+
+ # Check at least one of the sockets created are above the target number
+ assert any(fn >= resource_limit for fn in native_process_conn.filenos)
+
+
+if not IS_WINDOWS:
+ test_high_number_of_file_descriptors = pytest.mark.forked(
+ test_high_number_of_file_descriptors,
+ )
+
+
+@pytest.fixture
+def _garbage_bin():
+ """Disable garbage collection when this fixture is in use."""
+ with DefaultGc().nogc():
+ yield
+
+
+@pytest.fixture
+def resource_limit(request):
+ """Set the resource limit two times bigger then requested."""
+ resource = pytest.importorskip(
+ 'resource',
+ reason='The "resource" module is Unix-specific',
+ )
+
+ # Get current resource limits to restore them later
+ soft_limit, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE)
+
+ # We have to increase the nofile limit above 1024
+ # Otherwise we see a 'Too many files open' error, instead of
+ # an error due to the file descriptor number being too high
+ resource.setrlimit(
+ resource.RLIMIT_NOFILE,
+ (request.param * 2, hard_limit),
+ )
+
+ try: # noqa: WPS501
+ yield request.param
+ finally:
+ # Reset the resource limit back to the original soft limit
+ resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit, hard_limit))
+
+
+@pytest.fixture
+def many_open_sockets(request, resource_limit):
+ """Allocate a lot of file descriptors by opening dummy sockets."""
+ # NOTE: `@pytest.mark.usefixtures` doesn't work on fixtures which
+ # NOTE: forces us to invoke this one dynamically to avoid having an
+ # NOTE: unused argument.
+ request.getfixturevalue('_garbage_bin')
+
+ # Hoard a lot of file descriptors by opening and storing a lot of sockets
+ test_sockets = []
+ # Open a lot of file descriptors, so the next one the server
+ # opens is a high number
+ try:
+ for _ in range(resource_limit):
+ sock = socket.socket()
+ test_sockets.append(sock)
+ # If we reach a high enough number, we don't need to open more
+ if sock.fileno() >= resource_limit:
+ break
+ # Check we opened enough descriptors to reach a high number
+ the_highest_fileno = test_sockets[-1].fileno()
+ assert the_highest_fileno >= resource_limit
+ yield the_highest_fileno
+ finally:
+ # Close our open resources
+ for test_socket in test_sockets:
+ test_socket.close()
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_ssl.py b/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_ssl.py
new file mode 100644
index 0000000000000000000000000000000000000000..8da330dfa0d046874c1e94fd5352550765863290
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_ssl.py
@@ -0,0 +1,763 @@
+"""Tests for TLS support."""
+# -*- coding: utf-8 -*-
+# vim: set fileencoding=utf-8 :
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import functools
+import json
+import os
+import ssl
+import subprocess
+import sys
+import threading
+import time
+import traceback
+
+import OpenSSL.SSL
+import pytest
+import requests
+import six
+import trustme
+
+from .._compat import bton, ntob, ntou
+from .._compat import IS_ABOVE_OPENSSL10, IS_CI, IS_PYPY
+from .._compat import IS_LINUX, IS_MACOS, IS_WINDOWS
+from ..server import HTTPServer, get_ssl_adapter_class
+from ..testing import (
+ ANY_INTERFACE_IPV4,
+ ANY_INTERFACE_IPV6,
+ EPHEMERAL_PORT,
+ # get_server_client,
+ _get_conn_data,
+ _probe_ipv6_sock,
+)
+from ..wsgi import Gateway_10
+
+
+IS_GITHUB_ACTIONS_WORKFLOW = bool(os.getenv('GITHUB_WORKFLOW'))
+IS_WIN2016 = (
+ IS_WINDOWS
+ # pylint: disable=unsupported-membership-test
+ and b'Microsoft Windows Server 2016 Datacenter' in subprocess.check_output(
+ ('systeminfo',),
+ )
+)
+IS_LIBRESSL_BACKEND = ssl.OPENSSL_VERSION.startswith('LibreSSL')
+IS_PYOPENSSL_SSL_VERSION_1_0 = (
+ OpenSSL.SSL.SSLeay_version(OpenSSL.SSL.SSLEAY_VERSION).
+ startswith(b'OpenSSL 1.0.')
+)
+PY27 = sys.version_info[:2] == (2, 7)
+PY34 = sys.version_info[:2] == (3, 4)
+PY3 = not six.PY2
+PY310_PLUS = sys.version_info[:2] >= (3, 10)
+
+
+_stdlib_to_openssl_verify = {
+ ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
+ ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
+ ssl.CERT_REQUIRED:
+ OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
+}
+
+
+fails_under_py3 = pytest.mark.xfail(
+ not six.PY2,
+ reason='Fails under Python 3+',
+)
+
+
+fails_under_py3_in_pypy = pytest.mark.xfail(
+ not six.PY2 and IS_PYPY,
+ reason='Fails under PyPy3',
+)
+
+
+missing_ipv6 = pytest.mark.skipif(
+ not _probe_ipv6_sock('::1'),
+ reason=''
+ 'IPv6 is disabled '
+ '(for example, under Travis CI '
+ 'which runs under GCE supporting only IPv4)',
+)
+
+
+class HelloWorldGateway(Gateway_10):
+ """Gateway responding with Hello World to root URI."""
+
+ def respond(self):
+ """Respond with dummy content via HTTP."""
+ req = self.req
+ req_uri = bton(req.uri)
+ if req_uri == '/':
+ req.status = b'200 OK'
+ req.ensure_headers_sent()
+ req.write(b'Hello world!')
+ return
+ if req_uri == '/env':
+ req.status = b'200 OK'
+ req.ensure_headers_sent()
+ env = self.get_environ()
+ # drop files so that it can be json dumped
+ env.pop('wsgi.errors')
+ env.pop('wsgi.input')
+ print(env)
+ req.write(json.dumps(env).encode('utf-8'))
+ return
+ return super(HelloWorldGateway, self).respond()
+
+
+def make_tls_http_server(bind_addr, ssl_adapter, request):
+ """Create and start an HTTP server bound to ``bind_addr``."""
+ httpserver = HTTPServer(
+ bind_addr=bind_addr,
+ gateway=HelloWorldGateway,
+ )
+ # httpserver.gateway = HelloWorldGateway
+ httpserver.ssl_adapter = ssl_adapter
+
+ threading.Thread(target=httpserver.safe_start).start()
+
+ while not httpserver.ready:
+ time.sleep(0.1)
+
+ request.addfinalizer(httpserver.stop)
+
+ return httpserver
+
+
+@pytest.fixture
+def tls_http_server(request):
+ """Provision a server creator as a fixture."""
+ return functools.partial(make_tls_http_server, request=request)
+
+
+@pytest.fixture
+def ca():
+ """Provide a certificate authority via fixture."""
+ return trustme.CA()
+
+
+@pytest.fixture
+def tls_ca_certificate_pem_path(ca):
+ """Provide a certificate authority certificate file via fixture."""
+ with ca.cert_pem.tempfile() as ca_cert_pem:
+ yield ca_cert_pem
+
+
+@pytest.fixture
+def tls_certificate(ca):
+ """Provide a leaf certificate via fixture."""
+ interface, _host, _port = _get_conn_data(ANY_INTERFACE_IPV4)
+ return ca.issue_cert(ntou(interface))
+
+
+@pytest.fixture
+def tls_certificate_chain_pem_path(tls_certificate):
+ """Provide a certificate chain PEM file path via fixture."""
+ with tls_certificate.private_key_and_cert_chain_pem.tempfile() as cert_pem:
+ yield cert_pem
+
+
+@pytest.fixture
+def tls_certificate_private_key_pem_path(tls_certificate):
+ """Provide a certificate private key PEM file path via fixture."""
+ with tls_certificate.private_key_pem.tempfile() as cert_key_pem:
+ yield cert_key_pem
+
+
+def _thread_except_hook(exceptions, args):
+ """Append uncaught exception ``args`` in threads to ``exceptions``."""
+ if issubclass(args.exc_type, SystemExit):
+ return
+ # cannot store the exception, it references the thread's stack
+ exceptions.append((
+ args.exc_type,
+ str(args.exc_value),
+ ''.join(
+ traceback.format_exception(
+ args.exc_type, args.exc_value, args.exc_traceback,
+ ),
+ ),
+ ))
+
+
+@pytest.fixture
+def thread_exceptions():
+ """Provide a list of uncaught exceptions from threads via a fixture.
+
+ Only catches exceptions on Python 3.8+.
+ The list contains: ``(type, str(value), str(traceback))``
+ """
+ exceptions = []
+ # Python 3.8+
+ orig_hook = getattr(threading, 'excepthook', None)
+ if orig_hook is not None:
+ threading.excepthook = functools.partial(
+ _thread_except_hook, exceptions,
+ )
+ try:
+ yield exceptions
+ finally:
+ if orig_hook is not None:
+ threading.excepthook = orig_hook
+
+
+@pytest.mark.parametrize(
+ 'adapter_type',
+ (
+ 'builtin',
+ 'pyopenssl',
+ ),
+)
+def test_ssl_adapters(
+ tls_http_server, adapter_type,
+ tls_certificate,
+ tls_certificate_chain_pem_path,
+ tls_certificate_private_key_pem_path,
+ tls_ca_certificate_pem_path,
+):
+ """Test ability to connect to server via HTTPS using adapters."""
+ interface, _host, port = _get_conn_data(ANY_INTERFACE_IPV4)
+ tls_adapter_cls = get_ssl_adapter_class(name=adapter_type)
+ tls_adapter = tls_adapter_cls(
+ tls_certificate_chain_pem_path, tls_certificate_private_key_pem_path,
+ )
+ if adapter_type == 'pyopenssl':
+ tls_adapter.context = tls_adapter.get_context()
+
+ tls_certificate.configure_cert(tls_adapter.context)
+
+ tlshttpserver = tls_http_server((interface, port), tls_adapter)
+
+ # testclient = get_server_client(tlshttpserver)
+ # testclient.get('/')
+
+ interface, _host, port = _get_conn_data(
+ tlshttpserver.bind_addr,
+ )
+
+ resp = requests.get(
+ 'https://{host!s}:{port!s}/'.format(host=interface, port=port),
+ verify=tls_ca_certificate_pem_path,
+ )
+
+ assert resp.status_code == 200
+ assert resp.text == 'Hello world!'
+
+
+@pytest.mark.parametrize( # noqa: C901 # FIXME
+ 'adapter_type',
+ (
+ 'builtin',
+ 'pyopenssl',
+ ),
+)
+@pytest.mark.parametrize(
+ ('is_trusted_cert', 'tls_client_identity'),
+ (
+ (True, 'localhost'), (True, '127.0.0.1'),
+ (True, '*.localhost'), (True, 'not_localhost'),
+ (False, 'localhost'),
+ ),
+)
+@pytest.mark.parametrize(
+ 'tls_verify_mode',
+ (
+ ssl.CERT_NONE, # server shouldn't validate client cert
+ ssl.CERT_OPTIONAL, # same as CERT_REQUIRED in client mode, don't use
+ ssl.CERT_REQUIRED, # server should validate if client cert CA is OK
+ ),
+)
+@pytest.mark.xfail(
+ IS_PYPY and IS_CI,
+ reason='Fails under PyPy in CI for unknown reason',
+ strict=False,
+)
+def test_tls_client_auth( # noqa: C901 # FIXME
+ # FIXME: remove twisted logic, separate tests
+ mocker,
+ tls_http_server, adapter_type,
+ ca,
+ tls_certificate,
+ tls_certificate_chain_pem_path,
+ tls_certificate_private_key_pem_path,
+ tls_ca_certificate_pem_path,
+ is_trusted_cert, tls_client_identity,
+ tls_verify_mode,
+):
+ """Verify that client TLS certificate auth works correctly."""
+ test_cert_rejection = (
+ tls_verify_mode != ssl.CERT_NONE
+ and not is_trusted_cert
+ )
+ interface, _host, port = _get_conn_data(ANY_INTERFACE_IPV4)
+
+ client_cert_root_ca = ca if is_trusted_cert else trustme.CA()
+ with mocker.mock_module.patch(
+ 'idna.core.ulabel',
+ return_value=ntob(tls_client_identity),
+ ):
+ client_cert = client_cert_root_ca.issue_cert(
+ ntou(tls_client_identity),
+ )
+ del client_cert_root_ca
+
+ with client_cert.private_key_and_cert_chain_pem.tempfile() as cl_pem:
+ tls_adapter_cls = get_ssl_adapter_class(name=adapter_type)
+ tls_adapter = tls_adapter_cls(
+ tls_certificate_chain_pem_path,
+ tls_certificate_private_key_pem_path,
+ )
+ if adapter_type == 'pyopenssl':
+ tls_adapter.context = tls_adapter.get_context()
+ tls_adapter.context.set_verify(
+ _stdlib_to_openssl_verify[tls_verify_mode],
+ lambda conn, cert, errno, depth, preverify_ok: preverify_ok,
+ )
+ else:
+ tls_adapter.context.verify_mode = tls_verify_mode
+
+ ca.configure_trust(tls_adapter.context)
+ tls_certificate.configure_cert(tls_adapter.context)
+
+ tlshttpserver = tls_http_server((interface, port), tls_adapter)
+
+ interface, _host, port = _get_conn_data(tlshttpserver.bind_addr)
+
+ make_https_request = functools.partial(
+ requests.get,
+ 'https://{host!s}:{port!s}/'.format(host=interface, port=port),
+
+ # Server TLS certificate verification:
+ verify=tls_ca_certificate_pem_path,
+
+ # Client TLS certificate verification:
+ cert=cl_pem,
+ )
+
+ if not test_cert_rejection:
+ resp = make_https_request()
+ is_req_successful = resp.status_code == 200
+ if (
+ not is_req_successful
+ and IS_PYOPENSSL_SSL_VERSION_1_0
+ and adapter_type == 'builtin'
+ and tls_verify_mode == ssl.CERT_REQUIRED
+ and tls_client_identity == 'localhost'
+ and is_trusted_cert
+ ) or PY34:
+ pytest.xfail(
+ 'OpenSSL 1.0 has problems with verifying client certs',
+ )
+ assert is_req_successful
+ assert resp.text == 'Hello world!'
+ return
+
+ # xfail some flaky tests
+ # https://github.com/cherrypy/cheroot/issues/237
+ issue_237 = (
+ IS_MACOS
+ and adapter_type == 'builtin'
+ and tls_verify_mode != ssl.CERT_NONE
+ )
+ if issue_237:
+ pytest.xfail('Test sometimes fails')
+
+ expected_ssl_errors = (
+ requests.exceptions.SSLError,
+ OpenSSL.SSL.Error,
+ ) if PY34 else (
+ requests.exceptions.SSLError,
+ )
+ if IS_WINDOWS or IS_GITHUB_ACTIONS_WORKFLOW:
+ expected_ssl_errors += requests.exceptions.ConnectionError,
+ with pytest.raises(expected_ssl_errors) as ssl_err:
+ make_https_request()
+
+ if PY34 and isinstance(ssl_err, OpenSSL.SSL.Error):
+ pytest.xfail(
+ 'OpenSSL behaves wierdly under Python 3.4 '
+ 'because of an outdated urllib3',
+ )
+
+ try:
+ err_text = ssl_err.value.args[0].reason.args[0].args[0]
+ except AttributeError:
+ if PY34:
+ pytest.xfail('OpenSSL behaves wierdly under Python 3.4')
+ elif IS_WINDOWS or IS_GITHUB_ACTIONS_WORKFLOW:
+ err_text = str(ssl_err.value)
+ else:
+ raise
+
+ if isinstance(err_text, int):
+ err_text = str(ssl_err.value)
+
+ expected_substrings = (
+ 'sslv3 alert bad certificate' if IS_LIBRESSL_BACKEND
+ else 'tlsv1 alert unknown ca',
+ )
+ if not six.PY2:
+ if IS_MACOS and IS_PYPY and adapter_type == 'pyopenssl':
+ expected_substrings = ('tlsv1 alert unknown ca',)
+ if (
+ tls_verify_mode in (
+ ssl.CERT_REQUIRED,
+ ssl.CERT_OPTIONAL,
+ )
+ and not is_trusted_cert
+ and tls_client_identity == 'localhost'
+ ):
+ expected_substrings += (
+ 'bad handshake: '
+ "SysCallError(10054, 'WSAECONNRESET')",
+ "('Connection aborted.', "
+ 'OSError("(10054, \'WSAECONNRESET\')"))',
+ "('Connection aborted.', "
+ 'OSError("(10054, \'WSAECONNRESET\')",))',
+ "('Connection aborted.', "
+ 'error("(10054, \'WSAECONNRESET\')",))',
+ "('Connection aborted.', "
+ 'ConnectionResetError(10054, '
+ "'An existing connection was forcibly closed "
+ "by the remote host', None, 10054, None))",
+ "('Connection aborted.', "
+ 'error(10054, '
+ "'An existing connection was forcibly closed "
+ "by the remote host'))",
+ ) if IS_WINDOWS else (
+ "('Connection aborted.', "
+ 'OSError("(104, \'ECONNRESET\')"))',
+ "('Connection aborted.', "
+ 'OSError("(104, \'ECONNRESET\')",))',
+ "('Connection aborted.', "
+ 'error("(104, \'ECONNRESET\')",))',
+ "('Connection aborted.', "
+ "ConnectionResetError(104, 'Connection reset by peer'))",
+ "('Connection aborted.', "
+ "error(104, 'Connection reset by peer'))",
+ ) if (
+ IS_GITHUB_ACTIONS_WORKFLOW
+ and IS_LINUX
+ ) else (
+ "('Connection aborted.', "
+ "BrokenPipeError(32, 'Broken pipe'))",
+ )
+
+ if PY310_PLUS:
+ # FIXME: Figure out what's happening and correct the problem
+ expected_substrings += (
+ 'SSLError(SSLEOFError(8, '
+ "'EOF occurred in violation of protocol (_ssl.c:",
+ )
+ if IS_GITHUB_ACTIONS_WORKFLOW and IS_WINDOWS and PY310_PLUS:
+ expected_substrings += (
+ "('Connection aborted.', "
+ 'RemoteDisconnected('
+ "'Remote end closed connection without response'))",
+ )
+
+ assert any(e in err_text for e in expected_substrings)
+
+
+@pytest.mark.parametrize( # noqa: C901 # FIXME
+ 'adapter_type',
+ (
+ pytest.param(
+ 'builtin',
+ marks=pytest.mark.xfail(
+ IS_GITHUB_ACTIONS_WORKFLOW and IS_MACOS and PY310_PLUS,
+ reason='Unclosed TLS resource warnings happen on macOS '
+ 'under Python 3.10',
+ strict=False,
+ ),
+ ),
+ 'pyopenssl',
+ ),
+)
+@pytest.mark.parametrize(
+ ('tls_verify_mode', 'use_client_cert'),
+ (
+ (ssl.CERT_NONE, False),
+ (ssl.CERT_NONE, True),
+ (ssl.CERT_OPTIONAL, False),
+ (ssl.CERT_OPTIONAL, True),
+ (ssl.CERT_REQUIRED, True),
+ ),
+)
+def test_ssl_env( # noqa: C901 # FIXME
+ thread_exceptions,
+ recwarn,
+ mocker,
+ tls_http_server, adapter_type,
+ ca, tls_verify_mode, tls_certificate,
+ tls_certificate_chain_pem_path,
+ tls_certificate_private_key_pem_path,
+ tls_ca_certificate_pem_path,
+ use_client_cert,
+):
+ """Test the SSL environment generated by the SSL adapters."""
+ interface, _host, port = _get_conn_data(ANY_INTERFACE_IPV4)
+
+ with mocker.mock_module.patch(
+ 'idna.core.ulabel',
+ return_value=ntob('127.0.0.1'),
+ ):
+ client_cert = ca.issue_cert(ntou('127.0.0.1'))
+
+ with client_cert.private_key_and_cert_chain_pem.tempfile() as cl_pem:
+ tls_adapter_cls = get_ssl_adapter_class(name=adapter_type)
+ tls_adapter = tls_adapter_cls(
+ tls_certificate_chain_pem_path,
+ tls_certificate_private_key_pem_path,
+ )
+ if adapter_type == 'pyopenssl':
+ tls_adapter.context = tls_adapter.get_context()
+ tls_adapter.context.set_verify(
+ _stdlib_to_openssl_verify[tls_verify_mode],
+ lambda conn, cert, errno, depth, preverify_ok: preverify_ok,
+ )
+ else:
+ tls_adapter.context.verify_mode = tls_verify_mode
+
+ ca.configure_trust(tls_adapter.context)
+ tls_certificate.configure_cert(tls_adapter.context)
+
+ tlswsgiserver = tls_http_server((interface, port), tls_adapter)
+
+ interface, _host, port = _get_conn_data(tlswsgiserver.bind_addr)
+
+ resp = requests.get(
+ 'https://' + interface + ':' + str(port) + '/env',
+ verify=tls_ca_certificate_pem_path,
+ cert=cl_pem if use_client_cert else None,
+ )
+ if PY34 and resp.status_code != 200:
+ pytest.xfail(
+ 'Python 3.4 has problems with verifying client certs',
+ )
+
+ env = json.loads(resp.content.decode('utf-8'))
+
+ # hard coded env
+ assert env['wsgi.url_scheme'] == 'https'
+ assert env['HTTPS'] == 'on'
+
+ # ensure these are present
+ for key in {'SSL_VERSION_INTERFACE', 'SSL_VERSION_LIBRARY'}:
+ assert key in env
+
+ # pyOpenSSL generates the env before the handshake completes
+ if adapter_type == 'pyopenssl':
+ return
+
+ for key in {'SSL_PROTOCOL', 'SSL_CIPHER'}:
+ assert key in env
+
+ # client certificate env
+ if tls_verify_mode == ssl.CERT_NONE or not use_client_cert:
+ assert env['SSL_CLIENT_VERIFY'] == 'NONE'
+ else:
+ assert env['SSL_CLIENT_VERIFY'] == 'SUCCESS'
+
+ with open(cl_pem, 'rt') as f:
+ assert env['SSL_CLIENT_CERT'] in f.read()
+
+ for key in {
+ 'SSL_CLIENT_M_VERSION', 'SSL_CLIENT_M_SERIAL',
+ 'SSL_CLIENT_I_DN', 'SSL_CLIENT_S_DN',
+ }:
+ assert key in env
+
+ # builtin ssl environment generation may use a loopback socket
+ # ensure no ResourceWarning was raised during the test
+ # NOTE: python 2.7 does not emit ResourceWarning for ssl sockets
+ if IS_PYPY:
+ # NOTE: PyPy doesn't have ResourceWarning
+ # Ref: https://doc.pypy.org/en/latest/cpython_differences.html
+ return
+ for warn in recwarn:
+ if not issubclass(warn.category, ResourceWarning):
+ continue
+
+ # the tests can sporadically generate resource warnings
+ # due to timing issues
+ # all of these sporadic warnings appear to be about socket.socket
+ # and have been observed to come from requests connection pool
+ msg = str(warn.message)
+ if 'socket.socket' in msg:
+ pytest.xfail(
+ '\n'.join((
+ 'Sometimes this test fails due to '
+ 'a socket.socket ResourceWarning:',
+ msg,
+ )),
+ )
+ pytest.fail(msg)
+
+ # to perform the ssl handshake over that loopback socket,
+ # the builtin ssl environment generation uses a thread
+ for _, _, trace in thread_exceptions:
+ print(trace, file=sys.stderr)
+ assert not thread_exceptions, ': '.join((
+ thread_exceptions[0][0].__name__,
+ thread_exceptions[0][1],
+ ))
+
+
+@pytest.mark.parametrize(
+ 'ip_addr',
+ (
+ ANY_INTERFACE_IPV4,
+ ANY_INTERFACE_IPV6,
+ ),
+)
+def test_https_over_http_error(http_server, ip_addr):
+ """Ensure that connecting over HTTPS to HTTP port is handled."""
+ httpserver = http_server.send((ip_addr, EPHEMERAL_PORT))
+ interface, _host, port = _get_conn_data(httpserver.bind_addr)
+ with pytest.raises(ssl.SSLError) as ssl_err:
+ six.moves.http_client.HTTPSConnection(
+ '{interface}:{port}'.format(
+ interface=interface,
+ port=port,
+ ),
+ ).request('GET', '/')
+ expected_substring = (
+ 'wrong version number' if IS_ABOVE_OPENSSL10
+ else 'unknown protocol'
+ )
+ assert expected_substring in ssl_err.value.args[-1]
+
+
+http_over_https_error_builtin_marks = []
+if IS_WINDOWS and six.PY2:
+ http_over_https_error_builtin_marks.append(
+ pytest.mark.flaky(reruns=5, reruns_delay=2),
+ )
+
+
+@pytest.mark.parametrize(
+ 'adapter_type',
+ (
+ pytest.param(
+ 'builtin',
+ marks=http_over_https_error_builtin_marks,
+ ),
+ 'pyopenssl',
+ ),
+)
+@pytest.mark.parametrize(
+ 'ip_addr',
+ (
+ ANY_INTERFACE_IPV4,
+ pytest.param(ANY_INTERFACE_IPV6, marks=missing_ipv6),
+ ),
+)
+def test_http_over_https_error(
+ tls_http_server, adapter_type,
+ ca, ip_addr,
+ tls_certificate,
+ tls_certificate_chain_pem_path,
+ tls_certificate_private_key_pem_path,
+):
+ """Ensure that connecting over HTTP to HTTPS port is handled."""
+ # disable some flaky tests
+ # https://github.com/cherrypy/cheroot/issues/225
+ issue_225 = (
+ IS_MACOS
+ and adapter_type == 'builtin'
+ )
+ if issue_225:
+ pytest.xfail('Test fails in Travis-CI')
+
+ tls_adapter_cls = get_ssl_adapter_class(name=adapter_type)
+ tls_adapter = tls_adapter_cls(
+ tls_certificate_chain_pem_path, tls_certificate_private_key_pem_path,
+ )
+ if adapter_type == 'pyopenssl':
+ tls_adapter.context = tls_adapter.get_context()
+
+ tls_certificate.configure_cert(tls_adapter.context)
+
+ interface, _host, port = _get_conn_data(ip_addr)
+ tlshttpserver = tls_http_server((interface, port), tls_adapter)
+
+ interface, _host, port = _get_conn_data(
+ tlshttpserver.bind_addr,
+ )
+
+ fqdn = interface
+ if ip_addr is ANY_INTERFACE_IPV6:
+ fqdn = '[{fqdn}]'.format(**locals())
+
+ expect_fallback_response_over_plain_http = (
+ (
+ adapter_type == 'pyopenssl'
+ and (IS_ABOVE_OPENSSL10 or not six.PY2)
+ )
+ or PY27
+ ) or (
+ IS_GITHUB_ACTIONS_WORKFLOW
+ and IS_WINDOWS
+ and six.PY2
+ and not IS_WIN2016
+ )
+ if (
+ IS_GITHUB_ACTIONS_WORKFLOW
+ and IS_WINDOWS
+ and six.PY2
+ and IS_WIN2016
+ and adapter_type == 'builtin'
+ and ip_addr is ANY_INTERFACE_IPV6
+ ):
+ expect_fallback_response_over_plain_http = True
+ if (
+ IS_GITHUB_ACTIONS_WORKFLOW
+ and IS_WINDOWS
+ and six.PY2
+ and not IS_WIN2016
+ and adapter_type == 'builtin'
+ and ip_addr is not ANY_INTERFACE_IPV6
+ ):
+ expect_fallback_response_over_plain_http = False
+ if expect_fallback_response_over_plain_http:
+ resp = requests.get(
+ 'http://{host!s}:{port!s}/'.format(host=fqdn, port=port),
+ )
+ assert resp.status_code == 400
+ assert resp.text == (
+ 'The client sent a plain HTTP request, '
+ 'but this server only speaks HTTPS on this port.'
+ )
+ return
+
+ with pytest.raises(requests.exceptions.ConnectionError) as ssl_err:
+ requests.get( # FIXME: make stdlib ssl behave like PyOpenSSL
+ 'http://{host!s}:{port!s}/'.format(host=fqdn, port=port),
+ )
+
+ if IS_LINUX:
+ expected_error_code, expected_error_text = (
+ 104, 'Connection reset by peer',
+ )
+ if IS_MACOS:
+ expected_error_code, expected_error_text = (
+ 54, 'Connection reset by peer',
+ )
+ if IS_WINDOWS:
+ expected_error_code, expected_error_text = (
+ 10054,
+ 'An existing connection was forcibly closed by the remote host',
+ )
+
+ underlying_error = ssl_err.value.args[0].args[-1]
+ err_text = str(underlying_error)
+ assert underlying_error.errno == expected_error_code, (
+ 'The underlying error is {underlying_error!r}'.
+ format(**locals())
+ )
+ assert expected_error_text in err_text
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_wsgi.py b/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_wsgi.py
new file mode 100644
index 0000000000000000000000000000000000000000..91dfb71e7a5ed12e484fb6e5fc7763838172f3e5
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/test/test_wsgi.py
@@ -0,0 +1,83 @@
+"""Test wsgi."""
+
+from concurrent.futures.thread import ThreadPoolExecutor
+from traceback import print_tb
+
+import pytest
+import portend
+import requests
+from requests_toolbelt.sessions import BaseUrlSession as Session
+from jaraco.context import ExceptionTrap
+
+from cheroot import wsgi
+from cheroot._compat import IS_MACOS, IS_WINDOWS
+
+
+IS_SLOW_ENV = IS_MACOS or IS_WINDOWS
+
+
+@pytest.fixture
+def simple_wsgi_server():
+ """Fucking simple wsgi server fixture (duh)."""
+ port = portend.find_available_local_port()
+
+ def app(_environ, start_response):
+ status = '200 OK'
+ response_headers = [('Content-type', 'text/plain')]
+ start_response(status, response_headers)
+ return [b'Hello world!']
+
+ host = '::'
+ addr = host, port
+ server = wsgi.Server(addr, app, timeout=600 if IS_SLOW_ENV else 20)
+ # pylint: disable=possibly-unused-variable
+ url = 'http://localhost:{port}/'.format(**locals())
+ # pylint: disable=possibly-unused-variable
+ with server._run_in_thread() as thread:
+ yield locals()
+
+
+def test_connection_keepalive(simple_wsgi_server):
+ """Test the connection keepalive works (duh)."""
+ session = Session(base_url=simple_wsgi_server['url'])
+ pooled = requests.adapters.HTTPAdapter(
+ pool_connections=1, pool_maxsize=1000,
+ )
+ session.mount('http://', pooled)
+
+ def do_request():
+ with ExceptionTrap(requests.exceptions.ConnectionError) as trap:
+ resp = session.get('info')
+ resp.raise_for_status()
+ print_tb(trap.tb)
+ return bool(trap)
+
+ with ThreadPoolExecutor(max_workers=10 if IS_SLOW_ENV else 50) as pool:
+ tasks = [
+ pool.submit(do_request)
+ for n in range(250 if IS_SLOW_ENV else 1000)
+ ]
+ failures = sum(task.result() for task in tasks)
+
+ assert not failures
+
+
+def test_gateway_start_response_called_twice(monkeypatch):
+ """Verify that repeat calls of ``Gateway.start_response()`` fail."""
+ monkeypatch.setattr(wsgi.Gateway, 'get_environ', lambda self: {})
+ wsgi_gateway = wsgi.Gateway(None)
+ wsgi_gateway.started_response = True
+
+ err_msg = '^WSGI start_response called a second time with no exc_info.$'
+ with pytest.raises(RuntimeError, match=err_msg):
+ wsgi_gateway.start_response('200', (), None)
+
+
+def test_gateway_write_needs_start_response_called_before(monkeypatch):
+ """Check that calling ``Gateway.write()`` needs started response."""
+ monkeypatch.setattr(wsgi.Gateway, 'get_environ', lambda self: {})
+ wsgi_gateway = wsgi.Gateway(None)
+
+ err_msg = '^WSGI write called before start_response.$'
+ with pytest.raises(RuntimeError, match=err_msg):
+ wsgi_gateway.write(None) # The actual arg value is unimportant
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/test/webtest.py b/monEnvTP/lib/python3.8/site-packages/cheroot/test/webtest.py
new file mode 100644
index 0000000000000000000000000000000000000000..118014a6fc4d76686e38ab7434129efa336a7533
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/test/webtest.py
@@ -0,0 +1,613 @@
+"""Extensions to unittest for web frameworks.
+
+Use the :py:meth:`WebCase.getPage` method to request a page
+from your HTTP server.
+
+Framework Integration
+=====================
+If you have control over your server process, you can handle errors
+in the server-side of the HTTP conversation a bit better. You must run
+both the client (your :py:class:`WebCase` tests) and the server in the
+same process (but in separate threads, obviously).
+When an error occurs in the framework, call server_error. It will print
+the traceback to stdout, and keep any assertions you have from running
+(the assumption is that, if the server errors, the page output will not
+be of further significance to your tests).
+"""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import pprint
+import re
+import socket
+import sys
+import time
+import traceback
+import os
+import json
+import unittest # pylint: disable=deprecated-module,preferred-module
+import warnings
+import functools
+
+from six.moves import http_client, map, urllib_parse
+import six
+
+from more_itertools.more import always_iterable
+import jaraco.functools
+
+
+def interface(host):
+ """Return an IP address for a client connection given the server host.
+
+ If the server is listening on '0.0.0.0' (INADDR_ANY)
+ or '::' (IN6ADDR_ANY), this will return the proper localhost.
+ """
+ if host == '0.0.0.0':
+ # INADDR_ANY, which should respond on localhost.
+ return '127.0.0.1'
+ if host == '::':
+ # IN6ADDR_ANY, which should respond on localhost.
+ return '::1'
+ return host
+
+
+try:
+ # Jython support
+ if sys.platform[:4] == 'java':
+ def getchar():
+ """Get a key press."""
+ # Hopefully this is enough
+ return sys.stdin.read(1)
+ else:
+ # On Windows, msvcrt.getch reads a single char without output.
+ import msvcrt
+
+ def getchar():
+ """Get a key press."""
+ return msvcrt.getch()
+except ImportError:
+ # Unix getchr
+ import tty
+ import termios
+
+ def getchar():
+ """Get a key press."""
+ fd = sys.stdin.fileno()
+ old_settings = termios.tcgetattr(fd)
+ try:
+ tty.setraw(sys.stdin.fileno())
+ ch = sys.stdin.read(1)
+ finally:
+ termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
+ return ch
+
+
+# from jaraco.properties
+class NonDataProperty:
+ """Non-data property decorator."""
+
+ def __init__(self, fget):
+ """Initialize a non-data property."""
+ assert fget is not None, 'fget cannot be none'
+ assert callable(fget), 'fget must be callable'
+ self.fget = fget
+
+ def __get__(self, obj, objtype=None):
+ """Return a class property."""
+ if obj is None:
+ return self
+ return self.fget(obj)
+
+
+class WebCase(unittest.TestCase):
+ """Helper web test suite base."""
+
+ HOST = '127.0.0.1'
+ PORT = 8000
+ HTTP_CONN = http_client.HTTPConnection
+ PROTOCOL = 'HTTP/1.1'
+
+ scheme = 'http'
+ url = None
+ ssl_context = None
+
+ status = None
+ headers = None
+ body = None
+
+ encoding = 'utf-8'
+
+ time = None
+
+ @property
+ def _Conn(self):
+ """Return HTTPConnection or HTTPSConnection based on self.scheme.
+
+ * from :py:mod:`python:http.client`.
+ """
+ cls_name = '{scheme}Connection'.format(scheme=self.scheme.upper())
+ return getattr(http_client, cls_name)
+
+ def get_conn(self, auto_open=False):
+ """Return a connection to our HTTP server."""
+ conn = self._Conn(self.interface(), self.PORT)
+ # Automatically re-connect?
+ conn.auto_open = auto_open
+ conn.connect()
+ return conn
+
+ def set_persistent(self, on=True, auto_open=False):
+ """Make our HTTP_CONN persistent (or not).
+
+ If the 'on' argument is True (the default), then self.HTTP_CONN
+ will be set to an instance of HTTP(S)?Connection
+ to persist across requests.
+ As this class only allows for a single open connection, if
+ self already has an open connection, it will be closed.
+ """
+ try:
+ self.HTTP_CONN.close()
+ except (TypeError, AttributeError):
+ pass
+
+ self.HTTP_CONN = (
+ self.get_conn(auto_open=auto_open)
+ if on
+ else self._Conn
+ )
+
+ @property
+ def persistent(self):
+ """Presence of the persistent HTTP connection."""
+ return hasattr(self.HTTP_CONN, '__class__')
+
+ @persistent.setter
+ def persistent(self, on):
+ self.set_persistent(on)
+
+ def interface(self):
+ """Return an IP address for a client connection.
+
+ If the server is listening on '0.0.0.0' (INADDR_ANY)
+ or '::' (IN6ADDR_ANY), this will return the proper localhost.
+ """
+ return interface(self.HOST)
+
+ def getPage(
+ self, url, headers=None, method='GET', body=None,
+ protocol=None, raise_subcls=(),
+ ):
+ """Open the url with debugging support.
+
+ Return status, headers, body.
+
+ url should be the identifier passed to the server, typically a
+ server-absolute path and query string (sent between method and
+ protocol), and should only be an absolute URI if proxy support is
+ enabled in the server.
+
+ If the application under test generates absolute URIs, be sure
+ to wrap them first with :py:func:`strip_netloc`::
+
+ >>> class MyAppWebCase(WebCase):
+ ... def getPage(url, *args, **kwargs):
+ ... super(MyAppWebCase, self).getPage(
+ ... cheroot.test.webtest.strip_netloc(url),
+ ... *args, **kwargs
+ ... )
+
+ ``raise_subcls`` is passed through to :py:func:`openURL`.
+ """
+ ServerError.on = False
+
+ if isinstance(url, six.text_type):
+ url = url.encode('utf-8')
+ if isinstance(body, six.text_type):
+ body = body.encode('utf-8')
+
+ # for compatibility, support raise_subcls is None
+ raise_subcls = raise_subcls or ()
+
+ self.url = url
+ self.time = None
+ start = time.time()
+ result = openURL(
+ url, headers, method, body, self.HOST, self.PORT,
+ self.HTTP_CONN, protocol or self.PROTOCOL,
+ raise_subcls=raise_subcls,
+ ssl_context=self.ssl_context,
+ )
+ self.time = time.time() - start
+ self.status, self.headers, self.body = result
+
+ # Build a list of request cookies from the previous response cookies.
+ self.cookies = [
+ ('Cookie', v) for k, v in self.headers
+ if k.lower() == 'set-cookie'
+ ]
+
+ if ServerError.on:
+ raise ServerError()
+ return result
+
+ @NonDataProperty
+ def interactive(self):
+ """Determine whether tests are run in interactive mode.
+
+ Load interactivity setting from environment, where
+ the value can be numeric or a string like true or
+ False or 1 or 0.
+ """
+ env_str = os.environ.get('WEBTEST_INTERACTIVE', 'True')
+ is_interactive = bool(json.loads(env_str.lower()))
+ if is_interactive:
+ warnings.warn(
+ 'Interactive test failure interceptor support via '
+ 'WEBTEST_INTERACTIVE environment variable is deprecated.',
+ DeprecationWarning,
+ )
+ return is_interactive
+
+ console_height = 30
+
+ def _handlewebError(self, msg): # noqa: C901 # FIXME
+ print('')
+ print(' ERROR: %s' % msg)
+
+ if not self.interactive:
+ raise self.failureException(msg)
+
+ p = (
+ ' Show: '
+ '[B]ody [H]eaders [S]tatus [U]RL; '
+ '[I]gnore, [R]aise, or sys.e[X]it >> '
+ )
+ sys.stdout.write(p)
+ sys.stdout.flush()
+ while True:
+ i = getchar().upper()
+ if not isinstance(i, type('')):
+ i = i.decode('ascii')
+ if i not in 'BHSUIRX':
+ continue
+ print(i.upper()) # Also prints new line
+ if i == 'B':
+ for x, line in enumerate(self.body.splitlines()):
+ if (x + 1) % self.console_height == 0:
+ # The \r and comma should make the next line overwrite
+ sys.stdout.write('<-- More -->\r')
+ m = getchar().lower()
+ # Erase our "More" prompt
+ sys.stdout.write(' \r')
+ if m == 'q':
+ break
+ print(line)
+ elif i == 'H':
+ pprint.pprint(self.headers)
+ elif i == 'S':
+ print(self.status)
+ elif i == 'U':
+ print(self.url)
+ elif i == 'I':
+ # return without raising the normal exception
+ return
+ elif i == 'R':
+ raise self.failureException(msg)
+ elif i == 'X':
+ sys.exit()
+ sys.stdout.write(p)
+ sys.stdout.flush()
+
+ @property
+ def status_code(self): # noqa: D401; irrelevant for properties
+ """Integer HTTP status code."""
+ return int(self.status[:3])
+
+ def status_matches(self, expected):
+ """Check whether actual status matches expected."""
+ actual = (
+ self.status_code
+ if isinstance(expected, int) else
+ self.status
+ )
+ return expected == actual
+
+ def assertStatus(self, status, msg=None):
+ """Fail if self.status != status.
+
+ status may be integer code, exact string status, or
+ iterable of allowed possibilities.
+ """
+ if any(map(self.status_matches, always_iterable(status))):
+ return
+
+ tmpl = 'Status {self.status} does not match {status}'
+ msg = msg or tmpl.format(**locals())
+ self._handlewebError(msg)
+
+ def assertHeader(self, key, value=None, msg=None):
+ """Fail if (key, [value]) not in self.headers."""
+ lowkey = key.lower()
+ for k, v in self.headers:
+ if k.lower() == lowkey:
+ if value is None or str(value) == v:
+ return v
+
+ if msg is None:
+ if value is None:
+ msg = '%r not in headers' % key
+ else:
+ msg = '%r:%r not in headers' % (key, value)
+ self._handlewebError(msg)
+
+ def assertHeaderIn(self, key, values, msg=None):
+ """Fail if header indicated by key doesn't have one of the values."""
+ lowkey = key.lower()
+ for k, v in self.headers:
+ if k.lower() == lowkey:
+ matches = [value for value in values if str(value) == v]
+ if matches:
+ return matches
+
+ if msg is None:
+ msg = '%(key)r not in %(values)r' % vars()
+ self._handlewebError(msg)
+
+ def assertHeaderItemValue(self, key, value, msg=None):
+ """Fail if the header does not contain the specified value."""
+ actual_value = self.assertHeader(key, msg=msg)
+ header_values = map(str.strip, actual_value.split(','))
+ if value in header_values:
+ return value
+
+ if msg is None:
+ msg = '%r not in %r' % (value, header_values)
+ self._handlewebError(msg)
+
+ def assertNoHeader(self, key, msg=None):
+ """Fail if key in self.headers."""
+ lowkey = key.lower()
+ matches = [k for k, v in self.headers if k.lower() == lowkey]
+ if matches:
+ if msg is None:
+ msg = '%r in headers' % key
+ self._handlewebError(msg)
+
+ def assertNoHeaderItemValue(self, key, value, msg=None):
+ """Fail if the header contains the specified value."""
+ lowkey = key.lower()
+ hdrs = self.headers
+ matches = [k for k, v in hdrs if k.lower() == lowkey and v == value]
+ if matches:
+ if msg is None:
+ msg = '%r:%r in %r' % (key, value, hdrs)
+ self._handlewebError(msg)
+
+ def assertBody(self, value, msg=None):
+ """Fail if value != self.body."""
+ if isinstance(value, six.text_type):
+ value = value.encode(self.encoding)
+ if value != self.body:
+ if msg is None:
+ msg = 'expected body:\n%r\n\nactual body:\n%r' % (
+ value, self.body,
+ )
+ self._handlewebError(msg)
+
+ def assertInBody(self, value, msg=None):
+ """Fail if value not in self.body."""
+ if isinstance(value, six.text_type):
+ value = value.encode(self.encoding)
+ if value not in self.body:
+ if msg is None:
+ msg = '%r not in body: %s' % (value, self.body)
+ self._handlewebError(msg)
+
+ def assertNotInBody(self, value, msg=None):
+ """Fail if value in self.body."""
+ if isinstance(value, six.text_type):
+ value = value.encode(self.encoding)
+ if value in self.body:
+ if msg is None:
+ msg = '%r found in body' % value
+ self._handlewebError(msg)
+
+ def assertMatchesBody(self, pattern, msg=None, flags=0):
+ """Fail if value (a regex pattern) is not in self.body."""
+ if isinstance(pattern, six.text_type):
+ pattern = pattern.encode(self.encoding)
+ if re.search(pattern, self.body, flags) is None:
+ if msg is None:
+ msg = 'No match for %r in body' % pattern
+ self._handlewebError(msg)
+
+
+methods_with_bodies = ('POST', 'PUT', 'PATCH')
+
+
+def cleanHeaders(headers, method, body, host, port):
+ """Return request headers, with required headers added (if missing)."""
+ if headers is None:
+ headers = []
+
+ # Add the required Host request header if not present.
+ # [This specifies the host:port of the server, not the client.]
+ found = False
+ for k, _v in headers:
+ if k.lower() == 'host':
+ found = True
+ break
+ if not found:
+ if port == 80:
+ headers.append(('Host', host))
+ else:
+ headers.append(('Host', '%s:%s' % (host, port)))
+
+ if method in methods_with_bodies:
+ # Stick in default type and length headers if not present
+ found = False
+ for k, v in headers:
+ if k.lower() == 'content-type':
+ found = True
+ break
+ if not found:
+ headers.append(
+ ('Content-Type', 'application/x-www-form-urlencoded'),
+ )
+ headers.append(('Content-Length', str(len(body or ''))))
+
+ return headers
+
+
+def shb(response):
+ """Return status, headers, body the way we like from a response."""
+ resp_status_line = '%s %s' % (response.status, response.reason)
+
+ if not six.PY2:
+ return resp_status_line, response.getheaders(), response.read()
+
+ h = []
+ key, value = None, None
+ for line in response.msg.headers:
+ if line:
+ if line[0] in ' \t':
+ value += line.strip()
+ else:
+ if key and value:
+ h.append((key, value))
+ key, value = line.split(':', 1)
+ key = key.strip()
+ value = value.strip()
+ if key and value:
+ h.append((key, value))
+
+ return resp_status_line, h, response.read()
+
+
+# def openURL(*args, raise_subcls=(), **kwargs):
+# py27 compatible signature:
+def openURL(*args, **kwargs):
+ """
+ Open a URL, retrying when it fails.
+
+ Specify ``raise_subcls`` (class or tuple of classes) to exclude
+ those socket.error subclasses from being suppressed and retried.
+ """
+ raise_subcls = kwargs.pop('raise_subcls', ())
+ opener = functools.partial(_open_url_once, *args, **kwargs)
+
+ def on_exception():
+ exc = sys.exc_info()[1]
+ if isinstance(exc, raise_subcls):
+ raise exc
+ time.sleep(0.5)
+
+ # Try up to 10 times
+ return jaraco.functools.retry_call(
+ opener,
+ retries=9,
+ cleanup=on_exception,
+ trap=socket.error,
+ )
+
+
+def _open_url_once(
+ url, headers=None, method='GET', body=None,
+ host='127.0.0.1', port=8000, http_conn=http_client.HTTPConnection,
+ protocol='HTTP/1.1', ssl_context=None,
+):
+ """Open the given HTTP resource and return status, headers, and body."""
+ headers = cleanHeaders(headers, method, body, host, port)
+
+ # Allow http_conn to be a class or an instance
+ if hasattr(http_conn, 'host'):
+ conn = http_conn
+ else:
+ kw = {}
+ if ssl_context:
+ kw['context'] = ssl_context
+ conn = http_conn(interface(host), port, **kw)
+ conn._http_vsn_str = protocol
+ conn._http_vsn = int(''.join([x for x in protocol if x.isdigit()]))
+ if not six.PY2 and isinstance(url, bytes):
+ url = url.decode()
+ conn.putrequest(
+ method.upper(), url, skip_host=True,
+ skip_accept_encoding=True,
+ )
+ for key, value in headers:
+ conn.putheader(key, value.encode('Latin-1'))
+ conn.endheaders()
+ if body is not None:
+ conn.send(body)
+ # Handle response
+ response = conn.getresponse()
+ s, h, b = shb(response)
+ if not hasattr(http_conn, 'host'):
+ # We made our own conn instance. Close it.
+ conn.close()
+ return s, h, b
+
+
+def strip_netloc(url):
+ """Return absolute-URI path from URL.
+
+ Strip the scheme and host from the URL, returning the
+ server-absolute portion.
+
+ Useful for wrapping an absolute-URI for which only the
+ path is expected (such as in calls to :py:meth:`WebCase.getPage`).
+
+ .. testsetup::
+
+ from cheroot.test.webtest import strip_netloc
+
+ >>> strip_netloc('https://google.com/foo/bar?bing#baz')
+ '/foo/bar?bing'
+
+ >>> strip_netloc('//google.com/foo/bar?bing#baz')
+ '/foo/bar?bing'
+
+ >>> strip_netloc('/foo/bar?bing#baz')
+ '/foo/bar?bing'
+ """
+ parsed = urllib_parse.urlparse(url)
+ _scheme, _netloc, path, params, query, _fragment = parsed
+ stripped = '', '', path, params, query, ''
+ return urllib_parse.urlunparse(stripped)
+
+
+# Add any exceptions which your web framework handles
+# normally (that you don't want server_error to trap).
+ignored_exceptions = []
+
+# You'll want set this to True when you can't guarantee
+# that each response will immediately follow each request;
+# for example, when handling requests via multiple threads.
+ignore_all = False
+
+
+class ServerError(Exception):
+ """Exception for signalling server error."""
+
+ on = False
+
+
+def server_error(exc=None):
+ """Server debug hook.
+
+ Return True if exception handled, False if ignored.
+ You probably want to wrap this, so you can still handle an error using
+ your framework when it's ignored.
+ """
+ if exc is None:
+ exc = sys.exc_info()
+
+ if ignore_all or exc[0] in ignored_exceptions:
+ return False
+ else:
+ ServerError.on = True
+ print('')
+ print(''.join(traceback.format_exception(*exc)))
+ return True
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/testing.py b/monEnvTP/lib/python3.8/site-packages/cheroot/testing.py
new file mode 100644
index 0000000000000000000000000000000000000000..c9a6ac992a0f0fe3bba9cde47bc500e38244ba0f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/testing.py
@@ -0,0 +1,153 @@
+"""Pytest fixtures and other helpers for doing testing by end-users."""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+from contextlib import closing
+import errno
+import socket
+import threading
+import time
+
+import pytest
+from six.moves import http_client
+
+import cheroot.server
+from cheroot.test import webtest
+import cheroot.wsgi
+
+EPHEMERAL_PORT = 0
+NO_INTERFACE = None # Using this or '' will cause an exception
+ANY_INTERFACE_IPV4 = '0.0.0.0'
+ANY_INTERFACE_IPV6 = '::'
+
+config = {
+ cheroot.wsgi.Server: {
+ 'bind_addr': (NO_INTERFACE, EPHEMERAL_PORT),
+ 'wsgi_app': None,
+ },
+ cheroot.server.HTTPServer: {
+ 'bind_addr': (NO_INTERFACE, EPHEMERAL_PORT),
+ 'gateway': cheroot.server.Gateway,
+ },
+}
+
+
+def cheroot_server(server_factory):
+ """Set up and tear down a Cheroot server instance."""
+ conf = config[server_factory].copy()
+ bind_port = conf.pop('bind_addr')[-1]
+
+ for interface in ANY_INTERFACE_IPV6, ANY_INTERFACE_IPV4:
+ try:
+ actual_bind_addr = (interface, bind_port)
+ httpserver = server_factory( # create it
+ bind_addr=actual_bind_addr,
+ **conf
+ )
+ except OSError:
+ pass
+ else:
+ break
+
+ httpserver.shutdown_timeout = 0 # Speed-up tests teardown
+
+ threading.Thread(target=httpserver.safe_start).start() # spawn it
+ while not httpserver.ready: # wait until fully initialized and bound
+ time.sleep(0.1)
+
+ yield httpserver
+
+ httpserver.stop() # destroy it
+
+
+@pytest.fixture
+def wsgi_server():
+ """Set up and tear down a Cheroot WSGI server instance."""
+ for srv in cheroot_server(cheroot.wsgi.Server):
+ yield srv
+
+
+@pytest.fixture
+def native_server():
+ """Set up and tear down a Cheroot HTTP server instance."""
+ for srv in cheroot_server(cheroot.server.HTTPServer):
+ yield srv
+
+
+class _TestClient:
+ def __init__(self, server):
+ self._interface, self._host, self._port = _get_conn_data(
+ server.bind_addr,
+ )
+ self.server_instance = server
+ self._http_connection = self.get_connection()
+
+ def get_connection(self):
+ name = '{interface}:{port}'.format(
+ interface=self._interface,
+ port=self._port,
+ )
+ conn_cls = (
+ http_client.HTTPConnection
+ if self.server_instance.ssl_adapter is None else
+ http_client.HTTPSConnection
+ )
+ return conn_cls(name)
+
+ def request(
+ self, uri, method='GET', headers=None, http_conn=None,
+ protocol='HTTP/1.1',
+ ):
+ return webtest.openURL(
+ uri, method=method,
+ headers=headers,
+ host=self._host, port=self._port,
+ http_conn=http_conn or self._http_connection,
+ protocol=protocol,
+ )
+
+ def __getattr__(self, attr_name):
+ def _wrapper(uri, **kwargs):
+ http_method = attr_name.upper()
+ return self.request(uri, method=http_method, **kwargs)
+
+ return _wrapper
+
+
+def _probe_ipv6_sock(interface):
+ # Alternate way is to check IPs on interfaces using glibc, like:
+ # github.com/Gautier/minifail/blob/master/minifail/getifaddrs.py
+ try:
+ with closing(socket.socket(family=socket.AF_INET6)) as sock:
+ sock.bind((interface, 0))
+ except (OSError, socket.error) as sock_err:
+ # In Python 3 socket.error is an alias for OSError
+ # In Python 2 socket.error is a subclass of IOError
+ if sock_err.errno != errno.EADDRNOTAVAIL:
+ raise
+ else:
+ return True
+
+ return False
+
+
+def _get_conn_data(bind_addr):
+ if isinstance(bind_addr, tuple):
+ host, port = bind_addr
+ else:
+ host, port = bind_addr, 0
+
+ interface = webtest.interface(host)
+
+ if ':' in interface and not _probe_ipv6_sock(interface):
+ interface = '127.0.0.1'
+ if ':' in host:
+ host = interface
+
+ return interface, host, port
+
+
+def get_server_client(server):
+ """Create and return a test client for the given server."""
+ return _TestClient(server)
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/testing.pyi b/monEnvTP/lib/python3.8/site-packages/cheroot/testing.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..4c825f98674d3d8ffc0677e5d6204e1114cf8b6a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/testing.pyi
@@ -0,0 +1,17 @@
+from typing import Any, Iterator, Optional, TypeVar
+
+from .server import HTTPServer
+from .wsgi import Server
+
+T = TypeVar('T', bound=HTTPServer)
+
+EPHEMERAL_PORT: int
+NO_INTERFACE: Optional[str]
+ANY_INTERFACE_IPV4: str
+ANY_INTERFACE_IPV6: str
+config: dict
+
+def cheroot_server(server_factory: T) -> Iterator[T]: ...
+def wsgi_server() -> Iterator[Server]: ...
+def native_server() -> Iterator[HTTPServer]: ...
+def get_server_client(server) -> Any: ...
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/workers/__init__.py b/monEnvTP/lib/python3.8/site-packages/cheroot/workers/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..098b8f25ff80ca80fbaa182c45106a4edbff4519
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/workers/__init__.py
@@ -0,0 +1 @@
+"""HTTP workers pool."""
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/workers/__init__.pyi b/monEnvTP/lib/python3.8/site-packages/cheroot/workers/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/workers/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/workers/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0b83ee9b811a2a3da83235c204713ea78454fe1e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/workers/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/workers/__pycache__/threadpool.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cheroot/workers/__pycache__/threadpool.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2fe536831d6d3ac18810e91975e5bc9790be3fea
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cheroot/workers/__pycache__/threadpool.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/workers/threadpool.py b/monEnvTP/lib/python3.8/site-packages/cheroot/workers/threadpool.py
new file mode 100644
index 0000000000000000000000000000000000000000..795ebc6d53a54c255875b6767d077aacc0fdb563
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/workers/threadpool.py
@@ -0,0 +1,330 @@
+"""A thread-based worker pool.
+
+.. spelling::
+
+ joinable
+"""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+import collections
+import threading
+import time
+import socket
+import warnings
+
+from six.moves import queue
+
+from jaraco.functools import pass_none
+
+
+__all__ = ('WorkerThread', 'ThreadPool')
+
+
+class TrueyZero:
+ """Object which equals and does math like the integer 0 but evals True."""
+
+ def __add__(self, other):
+ return other
+
+ def __radd__(self, other):
+ return other
+
+
+trueyzero = TrueyZero()
+
+_SHUTDOWNREQUEST = None
+
+
+class WorkerThread(threading.Thread):
+ """Thread which continuously polls a Queue for Connection objects.
+
+ Due to the timing issues of polling a Queue, a WorkerThread does not
+ check its own 'ready' flag after it has started. To stop the thread,
+ it is necessary to stick a _SHUTDOWNREQUEST object onto the Queue
+ (one for each running WorkerThread).
+ """
+
+ conn = None
+ """The current connection pulled off the Queue, or None."""
+
+ server = None
+ """The HTTP Server which spawned this thread, and which owns the
+ Queue and is placing active connections into it."""
+
+ ready = False
+ """A simple flag for the calling server to know when this thread
+ has begun polling the Queue."""
+
+ def __init__(self, server):
+ """Initialize WorkerThread instance.
+
+ Args:
+ server (cheroot.server.HTTPServer): web server object
+ receiving this request
+ """
+ self.ready = False
+ self.server = server
+
+ self.requests_seen = 0
+ self.bytes_read = 0
+ self.bytes_written = 0
+ self.start_time = None
+ self.work_time = 0
+ self.stats = {
+ 'Requests': lambda s: self.requests_seen + (
+ self.start_time is None
+ and trueyzero
+ or self.conn.requests_seen
+ ),
+ 'Bytes Read': lambda s: self.bytes_read + (
+ self.start_time is None
+ and trueyzero
+ or self.conn.rfile.bytes_read
+ ),
+ 'Bytes Written': lambda s: self.bytes_written + (
+ self.start_time is None
+ and trueyzero
+ or self.conn.wfile.bytes_written
+ ),
+ 'Work Time': lambda s: self.work_time + (
+ self.start_time is None
+ and trueyzero
+ or time.time() - self.start_time
+ ),
+ 'Read Throughput': lambda s: s['Bytes Read'](s) / (
+ s['Work Time'](s) or 1e-6
+ ),
+ 'Write Throughput': lambda s: s['Bytes Written'](s) / (
+ s['Work Time'](s) or 1e-6
+ ),
+ }
+ threading.Thread.__init__(self)
+
+ def run(self):
+ """Process incoming HTTP connections.
+
+ Retrieves incoming connections from thread pool.
+ """
+ self.server.stats['Worker Threads'][self.name] = self.stats
+ try:
+ self.ready = True
+ while True:
+ conn = self.server.requests.get()
+ if conn is _SHUTDOWNREQUEST:
+ return
+
+ self.conn = conn
+ is_stats_enabled = self.server.stats['Enabled']
+ if is_stats_enabled:
+ self.start_time = time.time()
+ keep_conn_open = False
+ try:
+ keep_conn_open = conn.communicate()
+ finally:
+ if keep_conn_open:
+ self.server.put_conn(conn)
+ else:
+ conn.close()
+ if is_stats_enabled:
+ self.requests_seen += self.conn.requests_seen
+ self.bytes_read += self.conn.rfile.bytes_read
+ self.bytes_written += self.conn.wfile.bytes_written
+ self.work_time += time.time() - self.start_time
+ self.start_time = None
+ self.conn = None
+ except (KeyboardInterrupt, SystemExit) as ex:
+ self.server.interrupt = ex
+
+
+class ThreadPool:
+ """A Request Queue for an HTTPServer which pools threads.
+
+ ThreadPool objects must provide min, get(), put(obj), start()
+ and stop(timeout) attributes.
+ """
+
+ def __init__(
+ self, server, min=10, max=-1, accepted_queue_size=-1,
+ accepted_queue_timeout=10,
+ ):
+ """Initialize HTTP requests queue instance.
+
+ Args:
+ server (cheroot.server.HTTPServer): web server object
+ receiving this request
+ min (int): minimum number of worker threads
+ max (int): maximum number of worker threads
+ accepted_queue_size (int): maximum number of active
+ requests in queue
+ accepted_queue_timeout (int): timeout for putting request
+ into queue
+ """
+ self.server = server
+ self.min = min
+ self.max = max
+ self._threads = []
+ self._queue = queue.Queue(maxsize=accepted_queue_size)
+ self._queue_put_timeout = accepted_queue_timeout
+ self.get = self._queue.get
+ self._pending_shutdowns = collections.deque()
+
+ def start(self):
+ """Start the pool of threads."""
+ for _ in range(self.min):
+ self._threads.append(WorkerThread(self.server))
+ for worker in self._threads:
+ worker.name = (
+ 'CP Server {worker_name!s}'.
+ format(worker_name=worker.name),
+ )
+ worker.start()
+ for worker in self._threads:
+ while not worker.ready:
+ time.sleep(.1)
+
+ @property
+ def idle(self): # noqa: D401; irrelevant for properties
+ """Number of worker threads which are idle. Read-only.""" # noqa: D401
+ idles = len([t for t in self._threads if t.conn is None])
+ return max(idles - len(self._pending_shutdowns), 0)
+
+ def put(self, obj):
+ """Put request into queue.
+
+ Args:
+ obj (:py:class:`~cheroot.server.HTTPConnection`): HTTP connection
+ waiting to be processed
+ """
+ self._queue.put(obj, block=True, timeout=self._queue_put_timeout)
+
+ def _clear_dead_threads(self):
+ # Remove any dead threads from our list
+ for t in [t for t in self._threads if not t.is_alive()]:
+ self._threads.remove(t)
+ try:
+ self._pending_shutdowns.popleft()
+ except IndexError:
+ pass
+
+ def grow(self, amount):
+ """Spawn new worker threads (not above self.max)."""
+ if self.max > 0:
+ budget = max(self.max - len(self._threads), 0)
+ else:
+ # self.max <= 0 indicates no maximum
+ budget = float('inf')
+
+ n_new = min(amount, budget)
+
+ workers = [self._spawn_worker() for i in range(n_new)]
+ while not all(worker.ready for worker in workers):
+ time.sleep(.1)
+ self._threads.extend(workers)
+
+ def _spawn_worker(self):
+ worker = WorkerThread(self.server)
+ worker.name = (
+ 'CP Server {worker_name!s}'.
+ format(worker_name=worker.name),
+ )
+ worker.start()
+ return worker
+
+ def shrink(self, amount):
+ """Kill off worker threads (not below self.min)."""
+ # Grow/shrink the pool if necessary.
+ # Remove any dead threads from our list
+ amount -= len(self._pending_shutdowns)
+ self._clear_dead_threads()
+ if amount <= 0:
+ return
+
+ # calculate the number of threads above the minimum
+ n_extra = max(len(self._threads) - self.min, 0)
+
+ # don't remove more than amount
+ n_to_remove = min(amount, n_extra)
+
+ # put shutdown requests on the queue equal to the number of threads
+ # to remove. As each request is processed by a worker, that worker
+ # will terminate and be culled from the list.
+ for _ in range(n_to_remove):
+ self._pending_shutdowns.append(None)
+ self._queue.put(_SHUTDOWNREQUEST)
+
+ def stop(self, timeout=5):
+ """Terminate all worker threads.
+
+ Args:
+ timeout (int): time to wait for threads to stop gracefully
+ """
+ # for compatability, negative timeouts are treated like None
+ # TODO: treat negative timeouts like already expired timeouts
+ if timeout is not None and timeout < 0:
+ timeout = None
+ warnings.warning(
+ 'In the future, negative timeouts to Server.stop() '
+ 'will be equivalent to a timeout of zero.',
+ stacklevel=2,
+ )
+
+ if timeout is not None:
+ endtime = time.time() + timeout
+
+ # Must shut down threads here so the code that calls
+ # this method can know when all threads are stopped.
+ for worker in self._threads:
+ self._queue.put(_SHUTDOWNREQUEST)
+
+ ignored_errors = (
+ # Raised when start_response called >1 time w/o exc_info or
+ # wsgi write is called before start_response. See cheroot#261
+ RuntimeError,
+ # Ignore repeated Ctrl-C. See cherrypy#691.
+ KeyboardInterrupt,
+ )
+
+ for worker in self._clear_threads():
+ remaining_time = timeout and endtime - time.time()
+ try:
+ worker.join(remaining_time)
+ if worker.is_alive():
+ # Timeout exhausted; forcibly shut down the socket.
+ self._force_close(worker.conn)
+ worker.join()
+ except ignored_errors:
+ pass
+
+ @staticmethod
+ @pass_none
+ def _force_close(conn):
+ if conn.rfile.closed:
+ return
+ try:
+ try:
+ conn.socket.shutdown(socket.SHUT_RD)
+ except TypeError:
+ # pyOpenSSL sockets don't take an arg
+ conn.socket.shutdown()
+ except OSError:
+ # shutdown sometimes fails (race with 'closed' check?)
+ # ref #238
+ pass
+
+ def _clear_threads(self):
+ """Clear self._threads and yield all joinable threads."""
+ # threads = pop_all(self._threads)
+ threads, self._threads[:] = self._threads[:], []
+ return (
+ thread
+ for thread in threads
+ if thread is not threading.current_thread()
+ )
+
+ @property
+ def qsize(self):
+ """Return the queue size."""
+ return self._queue.qsize()
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/workers/threadpool.pyi b/monEnvTP/lib/python3.8/site-packages/cheroot/workers/threadpool.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..201d39140bd0f0f78b83e12b72477ce59e26590d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/workers/threadpool.pyi
@@ -0,0 +1,37 @@
+import threading
+from typing import Any
+
+class TrueyZero:
+ def __add__(self, other): ...
+ def __radd__(self, other): ...
+
+trueyzero: TrueyZero
+
+class WorkerThread(threading.Thread):
+ conn: Any
+ server: Any
+ ready: bool
+ requests_seen: int
+ bytes_read: int
+ bytes_written: int
+ start_time: Any
+ work_time: int
+ stats: Any
+ def __init__(self, server): ...
+ def run(self) -> None: ...
+
+class ThreadPool:
+ server: Any
+ min: Any
+ max: Any
+ get: Any
+ def __init__(self, server, min: int = ..., max: int = ..., accepted_queue_size: int = ..., accepted_queue_timeout: int = ...) -> None: ...
+ def start(self) -> None: ...
+ @property
+ def idle(self): ...
+ def put(self, obj) -> None: ...
+ def grow(self, amount) -> None: ...
+ def shrink(self, amount) -> None: ...
+ def stop(self, timeout: int = ...) -> None: ...
+ @property
+ def qsize(self) -> int: ...
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/wsgi.py b/monEnvTP/lib/python3.8/site-packages/cheroot/wsgi.py
new file mode 100644
index 0000000000000000000000000000000000000000..583d52a9f365925c770c515915661b7b2099415c
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/wsgi.py
@@ -0,0 +1,435 @@
+"""This class holds Cheroot WSGI server implementation.
+
+Simplest example on how to use this server::
+
+ from cheroot import wsgi
+
+ def my_crazy_app(environ, start_response):
+ status = '200 OK'
+ response_headers = [('Content-type','text/plain')]
+ start_response(status, response_headers)
+ return [b'Hello world!']
+
+ addr = '0.0.0.0', 8070
+ server = wsgi.Server(addr, my_crazy_app)
+ server.start()
+
+The Cheroot WSGI server can serve as many WSGI applications
+as you want in one instance by using a PathInfoDispatcher::
+
+ path_map = {
+ '/': my_crazy_app,
+ '/blog': my_blog_app,
+ }
+ d = wsgi.PathInfoDispatcher(path_map)
+ server = wsgi.Server(addr, d)
+"""
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+import sys
+
+import six
+from six.moves import filter
+
+from . import server
+from .workers import threadpool
+from ._compat import ntob, bton
+
+
+class Server(server.HTTPServer):
+ """A subclass of HTTPServer which calls a WSGI application."""
+
+ wsgi_version = (1, 0)
+ """The version of WSGI to produce."""
+
+ def __init__(
+ self, bind_addr, wsgi_app, numthreads=10, server_name=None,
+ max=-1, request_queue_size=5, timeout=10, shutdown_timeout=5,
+ accepted_queue_size=-1, accepted_queue_timeout=10,
+ peercreds_enabled=False, peercreds_resolve_enabled=False,
+ ):
+ """Initialize WSGI Server instance.
+
+ Args:
+ bind_addr (tuple): network interface to listen to
+ wsgi_app (callable): WSGI application callable
+ numthreads (int): number of threads for WSGI thread pool
+ server_name (str): web server name to be advertised via
+ Server HTTP header
+ max (int): maximum number of worker threads
+ request_queue_size (int): the 'backlog' arg to
+ socket.listen(); max queued connections
+ timeout (int): the timeout in seconds for accepted connections
+ shutdown_timeout (int): the total time, in seconds, to
+ wait for worker threads to cleanly exit
+ accepted_queue_size (int): maximum number of active
+ requests in queue
+ accepted_queue_timeout (int): timeout for putting request
+ into queue
+ """
+ super(Server, self).__init__(
+ bind_addr,
+ gateway=wsgi_gateways[self.wsgi_version],
+ server_name=server_name,
+ peercreds_enabled=peercreds_enabled,
+ peercreds_resolve_enabled=peercreds_resolve_enabled,
+ )
+ self.wsgi_app = wsgi_app
+ self.request_queue_size = request_queue_size
+ self.timeout = timeout
+ self.shutdown_timeout = shutdown_timeout
+ self.requests = threadpool.ThreadPool(
+ self, min=numthreads or 1, max=max,
+ accepted_queue_size=accepted_queue_size,
+ accepted_queue_timeout=accepted_queue_timeout,
+ )
+
+ @property
+ def numthreads(self):
+ """Set minimum number of threads."""
+ return self.requests.min
+
+ @numthreads.setter
+ def numthreads(self, value):
+ self.requests.min = value
+
+
+class Gateway(server.Gateway):
+ """A base class to interface HTTPServer with WSGI."""
+
+ def __init__(self, req):
+ """Initialize WSGI Gateway instance with request.
+
+ Args:
+ req (HTTPRequest): current HTTP request
+ """
+ super(Gateway, self).__init__(req)
+ self.started_response = False
+ self.env = self.get_environ()
+ self.remaining_bytes_out = None
+
+ @classmethod
+ def gateway_map(cls):
+ """Create a mapping of gateways and their versions.
+
+ Returns:
+ dict[tuple[int,int],class]: map of gateway version and
+ corresponding class
+
+ """
+ return {gw.version: gw for gw in cls.__subclasses__()}
+
+ def get_environ(self):
+ """Return a new environ dict targeting the given wsgi.version."""
+ raise NotImplementedError # pragma: no cover
+
+ def respond(self):
+ """Process the current request.
+
+ From :pep:`333`:
+
+ The start_response callable must not actually transmit
+ the response headers. Instead, it must store them for the
+ server or gateway to transmit only after the first
+ iteration of the application return value that yields
+ a NON-EMPTY string, or upon the application's first
+ invocation of the write() callable.
+ """
+ response = self.req.server.wsgi_app(self.env, self.start_response)
+ try:
+ for chunk in filter(None, response):
+ if not isinstance(chunk, six.binary_type):
+ raise ValueError('WSGI Applications must yield bytes')
+ self.write(chunk)
+ finally:
+ # Send headers if not already sent
+ self.req.ensure_headers_sent()
+ if hasattr(response, 'close'):
+ response.close()
+
+ def start_response(self, status, headers, exc_info=None):
+ """WSGI callable to begin the HTTP response."""
+ # "The application may call start_response more than once,
+ # if and only if the exc_info argument is provided."
+ if self.started_response and not exc_info:
+ raise RuntimeError(
+ 'WSGI start_response called a second '
+ 'time with no exc_info.',
+ )
+ self.started_response = True
+
+ # "if exc_info is provided, and the HTTP headers have already been
+ # sent, start_response must raise an error, and should raise the
+ # exc_info tuple."
+ if self.req.sent_headers:
+ try:
+ six.reraise(*exc_info)
+ finally:
+ exc_info = None
+
+ self.req.status = self._encode_status(status)
+
+ for k, v in headers:
+ if not isinstance(k, str):
+ raise TypeError(
+ 'WSGI response header key %r is not of type str.' % k,
+ )
+ if not isinstance(v, str):
+ raise TypeError(
+ 'WSGI response header value %r is not of type str.' % v,
+ )
+ if k.lower() == 'content-length':
+ self.remaining_bytes_out = int(v)
+ out_header = ntob(k), ntob(v)
+ self.req.outheaders.append(out_header)
+
+ return self.write
+
+ @staticmethod
+ def _encode_status(status):
+ """Cast status to bytes representation of current Python version.
+
+ According to :pep:`3333`, when using Python 3, the response status
+ and headers must be bytes masquerading as Unicode; that is, they
+ must be of type "str" but are restricted to code points in the
+ "Latin-1" set.
+ """
+ if six.PY2:
+ return status
+ if not isinstance(status, str):
+ raise TypeError('WSGI response status is not of type str.')
+ return status.encode('ISO-8859-1')
+
+ def write(self, chunk):
+ """WSGI callable to write unbuffered data to the client.
+
+ This method is also used internally by start_response (to write
+ data from the iterable returned by the WSGI application).
+ """
+ if not self.started_response:
+ raise RuntimeError('WSGI write called before start_response.')
+
+ chunklen = len(chunk)
+ rbo = self.remaining_bytes_out
+ if rbo is not None and chunklen > rbo:
+ if not self.req.sent_headers:
+ # Whew. We can send a 500 to the client.
+ self.req.simple_response(
+ '500 Internal Server Error',
+ 'The requested resource returned more bytes than the '
+ 'declared Content-Length.',
+ )
+ else:
+ # Dang. We have probably already sent data. Truncate the chunk
+ # to fit (so the client doesn't hang) and raise an error later.
+ chunk = chunk[:rbo]
+
+ self.req.ensure_headers_sent()
+
+ self.req.write(chunk)
+
+ if rbo is not None:
+ rbo -= chunklen
+ if rbo < 0:
+ raise ValueError(
+ 'Response body exceeds the declared Content-Length.',
+ )
+
+
+class Gateway_10(Gateway):
+ """A Gateway class to interface HTTPServer with WSGI 1.0.x."""
+
+ version = 1, 0
+
+ def get_environ(self):
+ """Return a new environ dict targeting the given wsgi.version."""
+ req = self.req
+ req_conn = req.conn
+ env = {
+ # set a non-standard environ entry so the WSGI app can know what
+ # the *real* server protocol is (and what features to support).
+ # See http://www.faqs.org/rfcs/rfc2145.html.
+ 'ACTUAL_SERVER_PROTOCOL': req.server.protocol,
+ 'PATH_INFO': bton(req.path),
+ 'QUERY_STRING': bton(req.qs),
+ 'REMOTE_ADDR': req_conn.remote_addr or '',
+ 'REMOTE_PORT': str(req_conn.remote_port or ''),
+ 'REQUEST_METHOD': bton(req.method),
+ 'REQUEST_URI': bton(req.uri),
+ 'SCRIPT_NAME': '',
+ 'SERVER_NAME': req.server.server_name,
+ # Bah. "SERVER_PROTOCOL" is actually the REQUEST protocol.
+ 'SERVER_PROTOCOL': bton(req.request_protocol),
+ 'SERVER_SOFTWARE': req.server.software,
+ 'wsgi.errors': sys.stderr,
+ 'wsgi.input': req.rfile,
+ 'wsgi.input_terminated': bool(req.chunked_read),
+ 'wsgi.multiprocess': False,
+ 'wsgi.multithread': True,
+ 'wsgi.run_once': False,
+ 'wsgi.url_scheme': bton(req.scheme),
+ 'wsgi.version': self.version,
+ }
+
+ if isinstance(req.server.bind_addr, six.string_types):
+ # AF_UNIX. This isn't really allowed by WSGI, which doesn't
+ # address unix domain sockets. But it's better than nothing.
+ env['SERVER_PORT'] = ''
+ try:
+ env['X_REMOTE_PID'] = str(req_conn.peer_pid)
+ env['X_REMOTE_UID'] = str(req_conn.peer_uid)
+ env['X_REMOTE_GID'] = str(req_conn.peer_gid)
+
+ env['X_REMOTE_USER'] = str(req_conn.peer_user)
+ env['X_REMOTE_GROUP'] = str(req_conn.peer_group)
+
+ env['REMOTE_USER'] = env['X_REMOTE_USER']
+ except RuntimeError:
+ """Unable to retrieve peer creds data.
+
+ Unsupported by current kernel or socket error happened, or
+ unsupported socket type, or disabled.
+ """
+ else:
+ env['SERVER_PORT'] = str(req.server.bind_addr[1])
+
+ # Request headers
+ env.update(
+ (
+ 'HTTP_{header_name!s}'.
+ format(header_name=bton(k).upper().replace('-', '_')),
+ bton(v),
+ )
+ for k, v in req.inheaders.items()
+ )
+
+ # CONTENT_TYPE/CONTENT_LENGTH
+ ct = env.pop('HTTP_CONTENT_TYPE', None)
+ if ct is not None:
+ env['CONTENT_TYPE'] = ct
+ cl = env.pop('HTTP_CONTENT_LENGTH', None)
+ if cl is not None:
+ env['CONTENT_LENGTH'] = cl
+
+ if req.conn.ssl_env:
+ env.update(req.conn.ssl_env)
+
+ return env
+
+
+class Gateway_u0(Gateway_10):
+ """A Gateway class to interface HTTPServer with WSGI u.0.
+
+ WSGI u.0 is an experimental protocol, which uses Unicode for keys
+ and values in both Python 2 and Python 3.
+ """
+
+ version = 'u', 0
+
+ def get_environ(self):
+ """Return a new environ dict targeting the given wsgi.version."""
+ req = self.req
+ env_10 = super(Gateway_u0, self).get_environ()
+ env = dict(map(self._decode_key, env_10.items()))
+
+ # Request-URI
+ enc = env.setdefault(six.u('wsgi.url_encoding'), six.u('utf-8'))
+ try:
+ env['PATH_INFO'] = req.path.decode(enc)
+ env['QUERY_STRING'] = req.qs.decode(enc)
+ except UnicodeDecodeError:
+ # Fall back to latin 1 so apps can transcode if needed.
+ env['wsgi.url_encoding'] = 'ISO-8859-1'
+ env['PATH_INFO'] = env_10['PATH_INFO']
+ env['QUERY_STRING'] = env_10['QUERY_STRING']
+
+ env.update(map(self._decode_value, env.items()))
+
+ return env
+
+ @staticmethod
+ def _decode_key(item):
+ k, v = item
+ if six.PY2:
+ k = k.decode('ISO-8859-1')
+ return k, v
+
+ @staticmethod
+ def _decode_value(item):
+ k, v = item
+ skip_keys = 'REQUEST_URI', 'wsgi.input'
+ if not six.PY2 or not isinstance(v, bytes) or k in skip_keys:
+ return k, v
+ return k, v.decode('ISO-8859-1')
+
+
+wsgi_gateways = Gateway.gateway_map()
+
+
+class PathInfoDispatcher:
+ """A WSGI dispatcher for dispatch based on the PATH_INFO."""
+
+ def __init__(self, apps):
+ """Initialize path info WSGI app dispatcher.
+
+ Args:
+ apps (dict[str,object]|list[tuple[str,object]]): URI prefix
+ and WSGI app pairs
+ """
+ try:
+ apps = list(apps.items())
+ except AttributeError:
+ pass
+
+ # Sort the apps by len(path), descending
+ def by_path_len(app):
+ return len(app[0])
+ apps.sort(key=by_path_len, reverse=True)
+
+ # The path_prefix strings must start, but not end, with a slash.
+ # Use "" instead of "/".
+ self.apps = [(p.rstrip('/'), a) for p, a in apps]
+
+ def __call__(self, environ, start_response):
+ """Process incoming WSGI request.
+
+ Ref: :pep:`3333`
+
+ Args:
+ environ (Mapping): a dict containing WSGI environment variables
+ start_response (callable): function, which sets response
+ status and headers
+
+ Returns:
+ list[bytes]: iterable containing bytes to be returned in
+ HTTP response body
+
+ """
+ path = environ['PATH_INFO'] or '/'
+ for p, app in self.apps:
+ # The apps list should be sorted by length, descending.
+ if path.startswith('{path!s}/'.format(path=p)) or path == p:
+ environ = environ.copy()
+ environ['SCRIPT_NAME'] = environ.get('SCRIPT_NAME', '') + p
+ environ['PATH_INFO'] = path[len(p):]
+ return app(environ, start_response)
+
+ start_response(
+ '404 Not Found', [
+ ('Content-Type', 'text/plain'),
+ ('Content-Length', '0'),
+ ],
+ )
+ return ['']
+
+
+# compatibility aliases
+globals().update(
+ WSGIServer=Server,
+ WSGIGateway=Gateway,
+ WSGIGateway_u0=Gateway_u0,
+ WSGIGateway_10=Gateway_10,
+ WSGIPathInfoDispatcher=PathInfoDispatcher,
+)
diff --git a/monEnvTP/lib/python3.8/site-packages/cheroot/wsgi.pyi b/monEnvTP/lib/python3.8/site-packages/cheroot/wsgi.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..b4851a3d4d9b0e551c2ec3fa210e09a35c66c9a5
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cheroot/wsgi.pyi
@@ -0,0 +1,42 @@
+from . import server
+from typing import Any
+
+class Server(server.HTTPServer):
+ wsgi_version: Any
+ wsgi_app: Any
+ request_queue_size: Any
+ timeout: Any
+ shutdown_timeout: Any
+ requests: Any
+ def __init__(self, bind_addr, wsgi_app, numthreads: int = ..., server_name: Any | None = ..., max: int = ..., request_queue_size: int = ..., timeout: int = ..., shutdown_timeout: int = ..., accepted_queue_size: int = ..., accepted_queue_timeout: int = ..., peercreds_enabled: bool = ..., peercreds_resolve_enabled: bool = ...) -> None: ...
+ @property
+ def numthreads(self): ...
+ @numthreads.setter
+ def numthreads(self, value) -> None: ...
+
+class Gateway(server.Gateway):
+ started_response: bool
+ env: Any
+ remaining_bytes_out: Any
+ def __init__(self, req) -> None: ...
+ @classmethod
+ def gateway_map(cls): ...
+ def get_environ(self) -> None: ...
+ def respond(self) -> None: ...
+ def start_response(self, status, headers, exc_info: Any | None = ...): ...
+ def write(self, chunk) -> None: ...
+
+class Gateway_10(Gateway):
+ version: Any
+ def get_environ(self): ...
+
+class Gateway_u0(Gateway_10):
+ version: Any
+ def get_environ(self): ...
+
+wsgi_gateways: Any
+
+class PathInfoDispatcher:
+ apps: Any
+ def __init__(self, apps): ...
+ def __call__(self, environ, start_response): ...
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__init__.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..8e27c81215342d4e401e729ec9f3dc37d80e0049
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/__init__.py
@@ -0,0 +1,370 @@
+"""CherryPy is a pythonic, object-oriented HTTP framework.
+
+CherryPy consists of not one, but four separate API layers.
+
+The APPLICATION LAYER is the simplest. CherryPy applications are written as
+a tree of classes and methods, where each branch in the tree corresponds to
+a branch in the URL path. Each method is a 'page handler', which receives
+GET and POST params as keyword arguments, and returns or yields the (HTML)
+body of the response. The special method name 'index' is used for paths
+that end in a slash, and the special method name 'default' is used to
+handle multiple paths via a single handler. This layer also includes:
+
+ * the 'exposed' attribute (and cherrypy.expose)
+ * cherrypy.quickstart()
+ * _cp_config attributes
+ * cherrypy.tools (including cherrypy.session)
+ * cherrypy.url()
+
+The ENVIRONMENT LAYER is used by developers at all levels. It provides
+information about the current request and response, plus the application
+and server environment, via a (default) set of top-level objects:
+
+ * cherrypy.request
+ * cherrypy.response
+ * cherrypy.engine
+ * cherrypy.server
+ * cherrypy.tree
+ * cherrypy.config
+ * cherrypy.thread_data
+ * cherrypy.log
+ * cherrypy.HTTPError, NotFound, and HTTPRedirect
+ * cherrypy.lib
+
+The EXTENSION LAYER allows advanced users to construct and share their own
+plugins. It consists of:
+
+ * Hook API
+ * Tool API
+ * Toolbox API
+ * Dispatch API
+ * Config Namespace API
+
+Finally, there is the CORE LAYER, which uses the core API's to construct
+the default components which are available at higher layers. You can think
+of the default components as the 'reference implementation' for CherryPy.
+Megaframeworks (and advanced users) may replace the default components
+with customized or extended components. The core API's are:
+
+ * Application API
+ * Engine API
+ * Request API
+ * Server API
+ * WSGI API
+
+These API's are described in the `CherryPy specification
+<https://github.com/cherrypy/cherrypy/wiki/CherryPySpec>`_.
+"""
+
+try:
+ import pkg_resources
+except ImportError:
+ pass
+
+from threading import local as _local
+
+from ._cperror import (
+ HTTPError, HTTPRedirect, InternalRedirect,
+ NotFound, CherryPyException,
+)
+
+from . import _cpdispatch as dispatch
+
+from ._cptools import default_toolbox as tools, Tool
+from ._helper import expose, popargs, url
+
+from . import _cprequest, _cpserver, _cptree, _cplogging, _cpconfig
+
+import cherrypy.lib.httputil as _httputil
+
+from ._cptree import Application
+from . import _cpwsgi as wsgi
+
+from . import process
+try:
+ from .process import win32
+ engine = win32.Win32Bus()
+ engine.console_control_handler = win32.ConsoleCtrlHandler(engine)
+ del win32
+except ImportError:
+ engine = process.bus
+
+from . import _cpchecker
+
+__all__ = (
+ 'HTTPError', 'HTTPRedirect', 'InternalRedirect',
+ 'NotFound', 'CherryPyException',
+ 'dispatch', 'tools', 'Tool', 'Application',
+ 'wsgi', 'process', 'tree', 'engine',
+ 'quickstart', 'serving', 'request', 'response', 'thread_data',
+ 'log', 'expose', 'popargs', 'url', 'config',
+)
+
+
+__import__('cherrypy._cptools')
+__import__('cherrypy._cprequest')
+
+
+tree = _cptree.Tree()
+
+
+try:
+ __version__ = pkg_resources.require('cherrypy')[0].version
+except Exception:
+ __version__ = 'unknown'
+
+
+engine.listeners['before_request'] = set()
+engine.listeners['after_request'] = set()
+
+
+engine.autoreload = process.plugins.Autoreloader(engine)
+engine.autoreload.subscribe()
+
+engine.thread_manager = process.plugins.ThreadManager(engine)
+engine.thread_manager.subscribe()
+
+engine.signal_handler = process.plugins.SignalHandler(engine)
+
+
+class _HandleSignalsPlugin(object):
+ """Handle signals from other processes.
+
+ Based on the configured platform handlers above.
+ """
+
+ def __init__(self, bus):
+ self.bus = bus
+
+ def subscribe(self):
+ """Add the handlers based on the platform."""
+ if hasattr(self.bus, 'signal_handler'):
+ self.bus.signal_handler.subscribe()
+ if hasattr(self.bus, 'console_control_handler'):
+ self.bus.console_control_handler.subscribe()
+
+
+engine.signals = _HandleSignalsPlugin(engine)
+
+
+server = _cpserver.Server()
+server.subscribe()
+
+
+def quickstart(root=None, script_name='', config=None):
+ """Mount the given root, start the builtin server (and engine), then block.
+
+ root: an instance of a "controller class" (a collection of page handler
+ methods) which represents the root of the application.
+ script_name: a string containing the "mount point" of the application.
+ This should start with a slash, and be the path portion of the URL
+ at which to mount the given root. For example, if root.index() will
+ handle requests to "http://www.example.com:8080/dept/app1/", then
+ the script_name argument would be "/dept/app1".
+
+ It MUST NOT end in a slash. If the script_name refers to the root
+ of the URI, it MUST be an empty string (not "/").
+ config: a file or dict containing application config. If this contains
+ a [global] section, those entries will be used in the global
+ (site-wide) config.
+ """
+ if config:
+ _global_conf_alias.update(config)
+
+ tree.mount(root, script_name, config)
+
+ engine.signals.subscribe()
+ engine.start()
+ engine.block()
+
+
+class _Serving(_local):
+ """An interface for registering request and response objects.
+
+ Rather than have a separate "thread local" object for the request and
+ the response, this class works as a single threadlocal container for
+ both objects (and any others which developers wish to define). In this
+ way, we can easily dump those objects when we stop/start a new HTTP
+ conversation, yet still refer to them as module-level globals in a
+ thread-safe way.
+ """
+
+ request = _cprequest.Request(_httputil.Host('127.0.0.1', 80),
+ _httputil.Host('127.0.0.1', 1111))
+ """
+ The request object for the current thread. In the main thread,
+ and any threads which are not receiving HTTP requests, this is None."""
+
+ response = _cprequest.Response()
+ """
+ The response object for the current thread. In the main thread,
+ and any threads which are not receiving HTTP requests, this is None."""
+
+ def load(self, request, response):
+ self.request = request
+ self.response = response
+
+ def clear(self):
+ """Remove all attributes of self."""
+ self.__dict__.clear()
+
+
+serving = _Serving()
+
+
+class _ThreadLocalProxy(object):
+
+ __slots__ = ['__attrname__', '__dict__']
+
+ def __init__(self, attrname):
+ self.__attrname__ = attrname
+
+ def __getattr__(self, name):
+ child = getattr(serving, self.__attrname__)
+ return getattr(child, name)
+
+ def __setattr__(self, name, value):
+ if name in ('__attrname__', ):
+ object.__setattr__(self, name, value)
+ else:
+ child = getattr(serving, self.__attrname__)
+ setattr(child, name, value)
+
+ def __delattr__(self, name):
+ child = getattr(serving, self.__attrname__)
+ delattr(child, name)
+
+ @property
+ def __dict__(self):
+ child = getattr(serving, self.__attrname__)
+ d = child.__class__.__dict__.copy()
+ d.update(child.__dict__)
+ return d
+
+ def __getitem__(self, key):
+ child = getattr(serving, self.__attrname__)
+ return child[key]
+
+ def __setitem__(self, key, value):
+ child = getattr(serving, self.__attrname__)
+ child[key] = value
+
+ def __delitem__(self, key):
+ child = getattr(serving, self.__attrname__)
+ del child[key]
+
+ def __contains__(self, key):
+ child = getattr(serving, self.__attrname__)
+ return key in child
+
+ def __len__(self):
+ child = getattr(serving, self.__attrname__)
+ return len(child)
+
+ def __nonzero__(self):
+ child = getattr(serving, self.__attrname__)
+ return bool(child)
+ # Python 3
+ __bool__ = __nonzero__
+
+
+# Create request and response object (the same objects will be used
+# throughout the entire life of the webserver, but will redirect
+# to the "serving" object)
+request = _ThreadLocalProxy('request')
+response = _ThreadLocalProxy('response')
+
+# Create thread_data object as a thread-specific all-purpose storage
+
+
+class _ThreadData(_local):
+ """A container for thread-specific data."""
+
+
+thread_data = _ThreadData()
+
+
+# Monkeypatch pydoc to allow help() to go through the threadlocal proxy.
+# Jan 2007: no Googleable examples of anyone else replacing pydoc.resolve.
+# The only other way would be to change what is returned from type(request)
+# and that's not possible in pure Python (you'd have to fake ob_type).
+def _cherrypy_pydoc_resolve(thing, forceload=0):
+ """Given an object or a path to an object, get the object and its name."""
+ if isinstance(thing, _ThreadLocalProxy):
+ thing = getattr(serving, thing.__attrname__)
+ return _pydoc._builtin_resolve(thing, forceload)
+
+
+try:
+ import pydoc as _pydoc
+ _pydoc._builtin_resolve = _pydoc.resolve
+ _pydoc.resolve = _cherrypy_pydoc_resolve
+except ImportError:
+ pass
+
+
+class _GlobalLogManager(_cplogging.LogManager):
+ """A site-wide LogManager; routes to app.log or global log as appropriate.
+
+ This :class:`LogManager<cherrypy._cplogging.LogManager>` implements
+ cherrypy.log() and cherrypy.log.access(). If either
+ function is called during a request, the message will be sent to the
+ logger for the current Application. If they are called outside of a
+ request, the message will be sent to the site-wide logger.
+ """
+
+ def __call__(self, *args, **kwargs):
+ """Log the given message to the app.log or global log.
+
+ Log the given message to the app.log or global
+ log as appropriate.
+ """
+ # Do NOT use try/except here. See
+ # https://github.com/cherrypy/cherrypy/issues/945
+ if hasattr(request, 'app') and hasattr(request.app, 'log'):
+ log = request.app.log
+ else:
+ log = self
+ return log.error(*args, **kwargs)
+
+ def access(self):
+ """Log an access message to the app.log or global log.
+
+ Log the given message to the app.log or global
+ log as appropriate.
+ """
+ try:
+ return request.app.log.access()
+ except AttributeError:
+ return _cplogging.LogManager.access(self)
+
+
+log = _GlobalLogManager()
+# Set a default screen handler on the global log.
+log.screen = True
+log.error_file = ''
+# Using an access file makes CP about 10% slower. Leave off by default.
+log.access_file = ''
+
+
+@engine.subscribe('log')
+def _buslog(msg, level):
+ log.error(msg, 'ENGINE', severity=level)
+
+
+# Use _global_conf_alias so quickstart can use 'config' as an arg
+# without shadowing cherrypy.config.
+config = _global_conf_alias = _cpconfig.Config()
+config.defaults = {
+ 'tools.log_tracebacks.on': True,
+ 'tools.log_headers.on': True,
+ 'tools.trailing_slash.on': True,
+ 'tools.encode.on': True
+}
+config.namespaces['log'] = lambda k, v: setattr(log, k, v)
+config.namespaces['checker'] = lambda k, v: setattr(checker, k, v)
+# Must reset to get our defaults applied.
+config.reset()
+
+checker = _cpchecker.Checker()
+engine.subscribe('start', checker)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__main__.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/__main__.py
new file mode 100644
index 0000000000000000000000000000000000000000..6674f7cb4c72db1f3740f4e9228276fb5779e3e3
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/__main__.py
@@ -0,0 +1,5 @@
+"""CherryPy'd cherryd daemon runner."""
+from cherrypy.daemon import run
+
+
+__name__ == '__main__' and run()
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..eb9f8ec888c5f85038a71d24fde888c2f4442ccf
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/__main__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/__main__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2ba7c0bed951da0e7e64f7dcfd2d9e2f7844df75
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/__main__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpchecker.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpchecker.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..66b0cd49549ac645fb951d7e189797cbe7e26b72
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpchecker.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpcompat.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpcompat.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..08833ba763d15014827ba02b11f95a9b2b015c56
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpcompat.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpconfig.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpconfig.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e739a236ad69334283acfd4b4d4b40adacdcd50b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpconfig.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpdispatch.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpdispatch.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..58abb66eb4aba811b525546359b5e4fa8c363770
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpdispatch.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cperror.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cperror.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b33e3f503eee08cc80e490f363b2d04c0e21a28c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cperror.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cplogging.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cplogging.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..19a02a079e6dfff0e5939a38b0912f43f5e69098
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cplogging.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpmodpy.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpmodpy.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3aa33deaafe05fc8e09a7b8efa9d2f463d6cb865
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpmodpy.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpnative_server.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpnative_server.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..708adb1ac06f6ec20ab7de4cebd830fc27f62db7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpnative_server.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpreqbody.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpreqbody.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7f183224bf92b28ac56c910a6e5bb206a83996a8
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpreqbody.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cprequest.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cprequest.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ea6f68710f11bcfd3731286d4aaac2397f63fb44
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cprequest.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpserver.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpserver.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7fe2de2faff5c06c0bd4d561071263018ccf4897
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpserver.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cptools.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cptools.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f1d0786797cf5d7c4d12b0443815f4f129028a25
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cptools.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cptree.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cptree.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..65dc88c3cc85a40c098a354d42b9e6ff2b0038a5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cptree.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpwsgi.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpwsgi.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2df388029dcede4d92dcb249208d5f04ac3593ff
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpwsgi.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpwsgi_server.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpwsgi_server.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fbba45d8bc5172d0fb03893c712a960b7c0a56ce
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_cpwsgi_server.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_helper.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_helper.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..aea3bcf9272699ea6e4bdbffac4801cfebca4ac8
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_helper.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_json.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_json.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bd4b34c552188d1ee5241198ffd33c2df33c7880
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/_json.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/daemon.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/daemon.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9652440230cab61d412ebf2a1a869081475ae5f4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/__pycache__/daemon.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpchecker.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpchecker.py
new file mode 100644
index 0000000000000000000000000000000000000000..f26f319cf8c2b8b24a474fbaa1272d5eb332e466
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpchecker.py
@@ -0,0 +1,323 @@
+"""Checker for CherryPy sites and mounted apps."""
+import os
+import warnings
+import builtins
+
+import cherrypy
+
+
+class Checker(object):
+ """A checker for CherryPy sites and their mounted applications.
+
+ When this object is called at engine startup, it executes each
+ of its own methods whose names start with ``check_``. If you wish
+ to disable selected checks, simply add a line in your global
+ config which sets the appropriate method to False::
+
+ [global]
+ checker.check_skipped_app_config = False
+
+ You may also dynamically add or replace ``check_*`` methods in this way.
+ """
+
+ on = True
+ """If True (the default), run all checks; if False, turn off all checks."""
+
+ def __init__(self):
+ """Initialize Checker instance."""
+ self._populate_known_types()
+
+ def __call__(self):
+ """Run all check_* methods."""
+ if self.on:
+ oldformatwarning = warnings.formatwarning
+ warnings.formatwarning = self.formatwarning
+ try:
+ for name in dir(self):
+ if name.startswith('check_'):
+ method = getattr(self, name)
+ if method and hasattr(method, '__call__'):
+ method()
+ finally:
+ warnings.formatwarning = oldformatwarning
+
+ def formatwarning(self, message, category, filename, lineno, line=None):
+ """Format a warning."""
+ return 'CherryPy Checker:\n%s\n\n' % message
+
+ # This value should be set inside _cpconfig.
+ global_config_contained_paths = False
+
+ def check_app_config_entries_dont_start_with_script_name(self):
+ """Check for App config with sections that repeat script_name."""
+ for sn, app in cherrypy.tree.apps.items():
+ if not isinstance(app, cherrypy.Application):
+ continue
+ if not app.config:
+ continue
+ if sn == '':
+ continue
+ sn_atoms = sn.strip('/').split('/')
+ for key in app.config.keys():
+ key_atoms = key.strip('/').split('/')
+ if key_atoms[:len(sn_atoms)] == sn_atoms:
+ warnings.warn(
+ 'The application mounted at %r has config '
+ 'entries that start with its script name: %r' % (sn,
+ key))
+
+ def check_site_config_entries_in_app_config(self):
+ """Check for mounted Applications that have site-scoped config."""
+ for sn, app in cherrypy.tree.apps.items():
+ if not isinstance(app, cherrypy.Application):
+ continue
+
+ msg = []
+ for section, entries in app.config.items():
+ if section.startswith('/'):
+ for key, value in entries.items():
+ for n in ('engine.', 'server.', 'tree.', 'checker.'):
+ if key.startswith(n):
+ msg.append('[%s] %s = %s' %
+ (section, key, value))
+ if msg:
+ msg.insert(0,
+ 'The application mounted at %r contains the '
+ 'following config entries, which are only allowed '
+ 'in site-wide config. Move them to a [global] '
+ 'section and pass them to cherrypy.config.update() '
+ 'instead of tree.mount().' % sn)
+ warnings.warn(os.linesep.join(msg))
+
+ def check_skipped_app_config(self):
+ """Check for mounted Applications that have no config."""
+ for sn, app in cherrypy.tree.apps.items():
+ if not isinstance(app, cherrypy.Application):
+ continue
+ if not app.config:
+ msg = 'The Application mounted at %r has an empty config.' % sn
+ if self.global_config_contained_paths:
+ msg += (' It looks like the config you passed to '
+ 'cherrypy.config.update() contains application-'
+ 'specific sections. You must explicitly pass '
+ 'application config via '
+ 'cherrypy.tree.mount(..., config=app_config)')
+ warnings.warn(msg)
+ return
+
+ def check_app_config_brackets(self):
+ """Check for App config with extraneous brackets in section names."""
+ for sn, app in cherrypy.tree.apps.items():
+ if not isinstance(app, cherrypy.Application):
+ continue
+ if not app.config:
+ continue
+ for key in app.config.keys():
+ if key.startswith('[') or key.endswith(']'):
+ warnings.warn(
+ 'The application mounted at %r has config '
+ 'section names with extraneous brackets: %r. '
+ 'Config *files* need brackets; config *dicts* '
+ '(e.g. passed to tree.mount) do not.' % (sn, key))
+
+ def check_static_paths(self):
+ """Check Application config for incorrect static paths."""
+ # Use the dummy Request object in the main thread.
+ request = cherrypy.request
+ for sn, app in cherrypy.tree.apps.items():
+ if not isinstance(app, cherrypy.Application):
+ continue
+ request.app = app
+ for section in app.config:
+ # get_resource will populate request.config
+ request.get_resource(section + '/dummy.html')
+ conf = request.config.get
+
+ if conf('tools.staticdir.on', False):
+ msg = ''
+ root = conf('tools.staticdir.root')
+ dir = conf('tools.staticdir.dir')
+ if dir is None:
+ msg = 'tools.staticdir.dir is not set.'
+ else:
+ fulldir = ''
+ if os.path.isabs(dir):
+ fulldir = dir
+ if root:
+ msg = ('dir is an absolute path, even '
+ 'though a root is provided.')
+ testdir = os.path.join(root, dir[1:])
+ if os.path.exists(testdir):
+ msg += (
+ '\nIf you meant to serve the '
+ 'filesystem folder at %r, remove the '
+ 'leading slash from dir.' % (testdir,))
+ else:
+ if not root:
+ msg = (
+ 'dir is a relative path and '
+ 'no root provided.')
+ else:
+ fulldir = os.path.join(root, dir)
+ if not os.path.isabs(fulldir):
+ msg = ('%r is not an absolute path.' % (
+ fulldir,))
+
+ if fulldir and not os.path.exists(fulldir):
+ if msg:
+ msg += '\n'
+ msg += ('%r (root + dir) is not an existing '
+ 'filesystem path.' % fulldir)
+
+ if msg:
+ warnings.warn('%s\nsection: [%s]\nroot: %r\ndir: %r'
+ % (msg, section, root, dir))
+
+ # -------------------------- Compatibility -------------------------- #
+ obsolete = {
+ 'server.default_content_type': 'tools.response_headers.headers',
+ 'log_access_file': 'log.access_file',
+ 'log_config_options': None,
+ 'log_file': 'log.error_file',
+ 'log_file_not_found': None,
+ 'log_request_headers': 'tools.log_headers.on',
+ 'log_to_screen': 'log.screen',
+ 'show_tracebacks': 'request.show_tracebacks',
+ 'throw_errors': 'request.throw_errors',
+ 'profiler.on': ('cherrypy.tree.mount(profiler.make_app('
+ 'cherrypy.Application(Root())))'),
+ }
+
+ deprecated = {}
+
+ def _compat(self, config):
+ """Process config and warn on each obsolete or deprecated entry."""
+ for section, conf in config.items():
+ if isinstance(conf, dict):
+ for k in conf:
+ if k in self.obsolete:
+ warnings.warn('%r is obsolete. Use %r instead.\n'
+ 'section: [%s]' %
+ (k, self.obsolete[k], section))
+ elif k in self.deprecated:
+ warnings.warn('%r is deprecated. Use %r instead.\n'
+ 'section: [%s]' %
+ (k, self.deprecated[k], section))
+ else:
+ if section in self.obsolete:
+ warnings.warn('%r is obsolete. Use %r instead.'
+ % (section, self.obsolete[section]))
+ elif section in self.deprecated:
+ warnings.warn('%r is deprecated. Use %r instead.'
+ % (section, self.deprecated[section]))
+
+ def check_compatibility(self):
+ """Process config and warn on each obsolete or deprecated entry."""
+ self._compat(cherrypy.config)
+ for sn, app in cherrypy.tree.apps.items():
+ if not isinstance(app, cherrypy.Application):
+ continue
+ self._compat(app.config)
+
+ # ------------------------ Known Namespaces ------------------------ #
+ extra_config_namespaces = []
+
+ def _known_ns(self, app):
+ ns = ['wsgi']
+ ns.extend(app.toolboxes)
+ ns.extend(app.namespaces)
+ ns.extend(app.request_class.namespaces)
+ ns.extend(cherrypy.config.namespaces)
+ ns += self.extra_config_namespaces
+
+ for section, conf in app.config.items():
+ is_path_section = section.startswith('/')
+ if is_path_section and isinstance(conf, dict):
+ for k in conf:
+ atoms = k.split('.')
+ if len(atoms) > 1:
+ if atoms[0] not in ns:
+ # Spit out a special warning if a known
+ # namespace is preceded by "cherrypy."
+ if atoms[0] == 'cherrypy' and atoms[1] in ns:
+ msg = (
+ 'The config entry %r is invalid; '
+ 'try %r instead.\nsection: [%s]'
+ % (k, '.'.join(atoms[1:]), section))
+ else:
+ msg = (
+ 'The config entry %r is invalid, '
+ 'because the %r config namespace '
+ 'is unknown.\n'
+ 'section: [%s]' % (k, atoms[0], section))
+ warnings.warn(msg)
+ elif atoms[0] == 'tools':
+ if atoms[1] not in dir(cherrypy.tools):
+ msg = (
+ 'The config entry %r may be invalid, '
+ 'because the %r tool was not found.\n'
+ 'section: [%s]' % (k, atoms[1], section))
+ warnings.warn(msg)
+
+ def check_config_namespaces(self):
+ """Process config and warn on each unknown config namespace."""
+ for sn, app in cherrypy.tree.apps.items():
+ if not isinstance(app, cherrypy.Application):
+ continue
+ self._known_ns(app)
+
+ # -------------------------- Config Types -------------------------- #
+ known_config_types = {}
+
+ def _populate_known_types(self):
+ b = [x for x in vars(builtins).values()
+ if type(x) is type(str)]
+
+ def traverse(obj, namespace):
+ for name in dir(obj):
+ # Hack for 3.2's warning about body_params
+ if name == 'body_params':
+ continue
+ vtype = type(getattr(obj, name, None))
+ if vtype in b:
+ self.known_config_types[namespace + '.' + name] = vtype
+
+ traverse(cherrypy.request, 'request')
+ traverse(cherrypy.response, 'response')
+ traverse(cherrypy.server, 'server')
+ traverse(cherrypy.engine, 'engine')
+ traverse(cherrypy.log, 'log')
+
+ def _known_types(self, config):
+ msg = ('The config entry %r in section %r is of type %r, '
+ 'which does not match the expected type %r.')
+
+ for section, conf in config.items():
+ if not isinstance(conf, dict):
+ conf = {section: conf}
+ for k, v in conf.items():
+ if v is not None:
+ expected_type = self.known_config_types.get(k, None)
+ vtype = type(v)
+ if expected_type and vtype != expected_type:
+ warnings.warn(msg % (k, section, vtype.__name__,
+ expected_type.__name__))
+
+ def check_config_types(self):
+ """Assert that config values are of the same type as default values."""
+ self._known_types(cherrypy.config)
+ for sn, app in cherrypy.tree.apps.items():
+ if not isinstance(app, cherrypy.Application):
+ continue
+ self._known_types(app.config)
+
+ # -------------------- Specific config warnings -------------------- #
+ def check_localhost(self):
+ """Warn if any socket_host is 'localhost'. See #711."""
+ for k, v in cherrypy.config.items():
+ if k == 'server.socket_host' and v == 'localhost':
+ warnings.warn("The use of 'localhost' as a socket host can "
+ 'cause problems on newer systems, since '
+ "'localhost' can map to either an IPv4 or an "
+ "IPv6 address. You should use '127.0.0.1' "
+ "or '[::1]' instead.")
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpcompat.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpcompat.py
new file mode 100644
index 0000000000000000000000000000000000000000..a43f6d36960159aa31f777b305492b2d7f10dbe1
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpcompat.py
@@ -0,0 +1,59 @@
+"""Compatibility code for using CherryPy with various versions of Python.
+
+To retain compatibility with older Python versions, this module provides a
+useful abstraction over the differences between Python versions, sometimes by
+preferring a newer idiom, sometimes an older one, and sometimes a custom one.
+
+In particular, Python 2 uses str and '' for byte strings, while Python 3
+uses str and '' for unicode strings. We will call each of these the 'native
+string' type for each version. Because of this major difference, this module
+provides
+two functions: 'ntob', which translates native strings (of type 'str') into
+byte strings regardless of Python version, and 'ntou', which translates native
+strings to unicode strings.
+
+Try not to use the compatibility functions 'ntob', 'ntou', 'tonative'.
+They were created with Python 2.3-2.5 compatibility in mind.
+Instead, use unicode literals (from __future__) and bytes literals
+and their .encode/.decode methods as needed.
+"""
+
+import http.client
+
+
+def ntob(n, encoding='ISO-8859-1'):
+ """Return the given native string as a byte string in the given
+ encoding.
+ """
+ assert_native(n)
+ # In Python 3, the native string type is unicode
+ return n.encode(encoding)
+
+
+def ntou(n, encoding='ISO-8859-1'):
+ """Return the given native string as a unicode string with the given
+ encoding.
+ """
+ assert_native(n)
+ # In Python 3, the native string type is unicode
+ return n
+
+
+def tonative(n, encoding='ISO-8859-1'):
+ """Return the given string as a native string in the given encoding."""
+ # In Python 3, the native string type is unicode
+ if isinstance(n, bytes):
+ return n.decode(encoding)
+ return n
+
+
+def assert_native(n):
+ if not isinstance(n, str):
+ raise TypeError('n must be a native str (got %s)' % type(n).__name__)
+
+
+# Some platforms don't expose HTTPSConnection, so handle it separately
+HTTPSConnection = getattr(http.client, 'HTTPSConnection', None)
+
+
+text_or_bytes = str, bytes
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpconfig.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpconfig.py
new file mode 100644
index 0000000000000000000000000000000000000000..8e3fd612042289b435e689f85d7d0476d209ad7b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpconfig.py
@@ -0,0 +1,296 @@
+"""
+Configuration system for CherryPy.
+
+Configuration in CherryPy is implemented via dictionaries. Keys are strings
+which name the mapped value, which may be of any type.
+
+
+Architecture
+------------
+
+CherryPy Requests are part of an Application, which runs in a global context,
+and configuration data may apply to any of those three scopes:
+
+Global
+ Configuration entries which apply everywhere are stored in
+ cherrypy.config.
+
+Application
+ Entries which apply to each mounted application are stored
+ on the Application object itself, as 'app.config'. This is a two-level
+ dict where each key is a path, or "relative URL" (for example, "/" or
+ "/path/to/my/page"), and each value is a config dict. Usually, this
+ data is provided in the call to tree.mount(root(), config=conf),
+ although you may also use app.merge(conf).
+
+Request
+ Each Request object possesses a single 'Request.config' dict.
+ Early in the request process, this dict is populated by merging global
+ config entries, Application entries (whose path equals or is a parent
+ of Request.path_info), and any config acquired while looking up the
+ page handler (see next).
+
+
+Declaration
+-----------
+
+Configuration data may be supplied as a Python dictionary, as a filename,
+or as an open file object. When you supply a filename or file, CherryPy
+uses Python's builtin ConfigParser; you declare Application config by
+writing each path as a section header::
+
+ [/path/to/my/page]
+ request.stream = True
+
+To declare global configuration entries, place them in a [global] section.
+
+You may also declare config entries directly on the classes and methods
+(page handlers) that make up your CherryPy application via the ``_cp_config``
+attribute, set with the ``cherrypy.config`` decorator. For example::
+
+ @cherrypy.config(**{'tools.gzip.on': True})
+ class Demo:
+
+ @cherrypy.expose
+ @cherrypy.config(**{'request.show_tracebacks': False})
+ def index(self):
+ return "Hello world"
+
+.. note::
+
+ This behavior is only guaranteed for the default dispatcher.
+ Other dispatchers may have different restrictions on where
+ you can attach config attributes.
+
+
+Namespaces
+----------
+
+Configuration keys are separated into namespaces by the first "." in the key.
+Current namespaces:
+
+engine
+ Controls the 'application engine', including autoreload.
+ These can only be declared in the global config.
+
+tree
+ Grafts cherrypy.Application objects onto cherrypy.tree.
+ These can only be declared in the global config.
+
+hooks
+ Declares additional request-processing functions.
+
+log
+ Configures the logging for each application.
+ These can only be declared in the global or / config.
+
+request
+ Adds attributes to each Request.
+
+response
+ Adds attributes to each Response.
+
+server
+ Controls the default HTTP server via cherrypy.server.
+ These can only be declared in the global config.
+
+tools
+ Runs and configures additional request-processing packages.
+
+wsgi
+ Adds WSGI middleware to an Application's "pipeline".
+ These can only be declared in the app's root config ("/").
+
+checker
+ Controls the 'checker', which looks for common errors in
+ app state (including config) when the engine starts.
+ Global config only.
+
+The only key that does not exist in a namespace is the "environment" entry.
+This special entry 'imports' other config entries from a template stored in
+cherrypy._cpconfig.environments[environment]. It only applies to the global
+config, and only when you use cherrypy.config.update.
+
+You can define your own namespaces to be called at the Global, Application,
+or Request level, by adding a named handler to cherrypy.config.namespaces,
+app.namespaces, or app.request_class.namespaces. The name can
+be any string, and the handler must be either a callable or a (Python 2.5
+style) context manager.
+"""
+
+import cherrypy
+from cherrypy._cpcompat import text_or_bytes
+from cherrypy.lib import reprconf
+
+
+def _if_filename_register_autoreload(ob):
+ """Register for autoreload if ob is a string (presumed filename)."""
+ is_filename = isinstance(ob, text_or_bytes)
+ is_filename and cherrypy.engine.autoreload.files.add(ob)
+
+
+def merge(base, other):
+ """Merge one app config (from a dict, file, or filename) into another.
+
+ If the given config is a filename, it will be appended to
+ the list of files to monitor for "autoreload" changes.
+ """
+ _if_filename_register_autoreload(other)
+
+ # Load other into base
+ for section, value_map in reprconf.Parser.load(other).items():
+ if not isinstance(value_map, dict):
+ raise ValueError(
+ 'Application config must include section headers, but the '
+ "config you tried to merge doesn't have any sections. "
+ 'Wrap your config in another dict with paths as section '
+ "headers, for example: {'/': config}.")
+ base.setdefault(section, {}).update(value_map)
+
+
+class Config(reprconf.Config):
+ """The 'global' configuration data for the entire CherryPy process."""
+
+ def update(self, config):
+ """Update self from a dict, file or filename."""
+ _if_filename_register_autoreload(config)
+ super(Config, self).update(config)
+
+ def _apply(self, config):
+ """Update self from a dict."""
+ if isinstance(config.get('global'), dict):
+ if len(config) > 1:
+ cherrypy.checker.global_config_contained_paths = True
+ config = config['global']
+ if 'tools.staticdir.dir' in config:
+ config['tools.staticdir.section'] = 'global'
+ super(Config, self)._apply(config)
+
+ @staticmethod
+ def __call__(**kwargs):
+ """Decorate for page handlers to set _cp_config."""
+ def tool_decorator(f):
+ _Vars(f).setdefault('_cp_config', {}).update(kwargs)
+ return f
+ return tool_decorator
+
+
+class _Vars(object):
+ """Adapter allowing setting a default attribute on a function or class."""
+
+ def __init__(self, target):
+ self.target = target
+
+ def setdefault(self, key, default):
+ if not hasattr(self.target, key):
+ setattr(self.target, key, default)
+ return getattr(self.target, key)
+
+
+# Sphinx begin config.environments
+Config.environments = environments = {
+ 'staging': {
+ 'engine.autoreload.on': False,
+ 'checker.on': False,
+ 'tools.log_headers.on': False,
+ 'request.show_tracebacks': False,
+ 'request.show_mismatched_params': False,
+ },
+ 'production': {
+ 'engine.autoreload.on': False,
+ 'checker.on': False,
+ 'tools.log_headers.on': False,
+ 'request.show_tracebacks': False,
+ 'request.show_mismatched_params': False,
+ 'log.screen': False,
+ },
+ 'embedded': {
+ # For use with CherryPy embedded in another deployment stack.
+ 'engine.autoreload.on': False,
+ 'checker.on': False,
+ 'tools.log_headers.on': False,
+ 'request.show_tracebacks': False,
+ 'request.show_mismatched_params': False,
+ 'log.screen': False,
+ 'engine.SIGHUP': None,
+ 'engine.SIGTERM': None,
+ },
+ 'test_suite': {
+ 'engine.autoreload.on': False,
+ 'checker.on': False,
+ 'tools.log_headers.on': False,
+ 'request.show_tracebacks': True,
+ 'request.show_mismatched_params': True,
+ 'log.screen': False,
+ },
+}
+# Sphinx end config.environments
+
+
+def _server_namespace_handler(k, v):
+ """Config handler for the "server" namespace."""
+ atoms = k.split('.', 1)
+ if len(atoms) > 1:
+ # Special-case config keys of the form 'server.servername.socket_port'
+ # to configure additional HTTP servers.
+ if not hasattr(cherrypy, 'servers'):
+ cherrypy.servers = {}
+
+ servername, k = atoms
+ if servername not in cherrypy.servers:
+ from cherrypy import _cpserver
+ cherrypy.servers[servername] = _cpserver.Server()
+ # On by default, but 'on = False' can unsubscribe it (see below).
+ cherrypy.servers[servername].subscribe()
+
+ if k == 'on':
+ if v:
+ cherrypy.servers[servername].subscribe()
+ else:
+ cherrypy.servers[servername].unsubscribe()
+ else:
+ setattr(cherrypy.servers[servername], k, v)
+ else:
+ setattr(cherrypy.server, k, v)
+
+
+Config.namespaces['server'] = _server_namespace_handler
+
+
+def _engine_namespace_handler(k, v):
+ """Config handler for the "engine" namespace."""
+ engine = cherrypy.engine
+
+ if k in {'SIGHUP', 'SIGTERM'}:
+ engine.subscribe(k, v)
+ return
+
+ if '.' in k:
+ plugin, attrname = k.split('.', 1)
+ plugin = getattr(engine, plugin)
+ op = 'subscribe' if v else 'unsubscribe'
+ sub_unsub = getattr(plugin, op, None)
+ if attrname == 'on' and callable(sub_unsub):
+ sub_unsub()
+ return
+ setattr(plugin, attrname, v)
+ else:
+ setattr(engine, k, v)
+
+
+Config.namespaces['engine'] = _engine_namespace_handler
+
+
+def _tree_namespace_handler(k, v):
+ """Namespace handler for the 'tree' config namespace."""
+ if isinstance(v, dict):
+ for script_name, app in v.items():
+ cherrypy.tree.graft(app, script_name)
+ msg = 'Mounted: %s on %s' % (app, script_name or '/')
+ cherrypy.engine.log(msg)
+ else:
+ cherrypy.tree.graft(v, v.script_name)
+ cherrypy.engine.log('Mounted: %s on %s' % (v, v.script_name or '/'))
+
+
+Config.namespaces['tree'] = _tree_namespace_handler
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpdispatch.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpdispatch.py
new file mode 100644
index 0000000000000000000000000000000000000000..83eb79cbe21cf3f332a3c18b54e34d046caae167
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpdispatch.py
@@ -0,0 +1,686 @@
+"""CherryPy dispatchers.
+
+A 'dispatcher' is the object which looks up the 'page handler' callable
+and collects config for the current request based on the path_info, other
+request attributes, and the application architecture. The core calls the
+dispatcher as early as possible, passing it a 'path_info' argument.
+
+The default dispatcher discovers the page handler by matching path_info
+to a hierarchical arrangement of objects, starting at request.app.root.
+"""
+
+import string
+import sys
+import types
+try:
+ classtype = (type, types.ClassType)
+except AttributeError:
+ classtype = type
+
+import cherrypy
+
+
+class PageHandler(object):
+
+ """Callable which sets response.body."""
+
+ def __init__(self, callable, *args, **kwargs):
+ self.callable = callable
+ self.args = args
+ self.kwargs = kwargs
+
+ @property
+ def args(self):
+ """The ordered args should be accessible from post dispatch hooks."""
+ return cherrypy.serving.request.args
+
+ @args.setter
+ def args(self, args):
+ cherrypy.serving.request.args = args
+ return cherrypy.serving.request.args
+
+ @property
+ def kwargs(self):
+ """The named kwargs should be accessible from post dispatch hooks."""
+ return cherrypy.serving.request.kwargs
+
+ @kwargs.setter
+ def kwargs(self, kwargs):
+ cherrypy.serving.request.kwargs = kwargs
+ return cherrypy.serving.request.kwargs
+
+ def __call__(self):
+ try:
+ return self.callable(*self.args, **self.kwargs)
+ except TypeError:
+ x = sys.exc_info()[1]
+ try:
+ test_callable_spec(self.callable, self.args, self.kwargs)
+ except cherrypy.HTTPError:
+ raise sys.exc_info()[1]
+ except Exception:
+ raise x
+ raise
+
+
+def test_callable_spec(callable, callable_args, callable_kwargs):
+ """
+ Inspect callable and test to see if the given args are suitable for it.
+
+ When an error occurs during the handler's invoking stage there are 2
+ erroneous cases:
+ 1. Too many parameters passed to a function which doesn't define
+ one of *args or **kwargs.
+ 2. Too little parameters are passed to the function.
+
+ There are 3 sources of parameters to a cherrypy handler.
+ 1. query string parameters are passed as keyword parameters to the
+ handler.
+ 2. body parameters are also passed as keyword parameters.
+ 3. when partial matching occurs, the final path atoms are passed as
+ positional args.
+ Both the query string and path atoms are part of the URI. If they are
+ incorrect, then a 404 Not Found should be raised. Conversely the body
+ parameters are part of the request; if they are invalid a 400 Bad Request.
+ """
+ show_mismatched_params = getattr(
+ cherrypy.serving.request, 'show_mismatched_params', False)
+ try:
+ (args, varargs, varkw, defaults) = getargspec(callable)
+ except TypeError:
+ if isinstance(callable, object) and hasattr(callable, '__call__'):
+ (args, varargs, varkw,
+ defaults) = getargspec(callable.__call__)
+ else:
+ # If it wasn't one of our own types, re-raise
+ # the original error
+ raise
+
+ if args and (
+ # For callable objects, which have a __call__(self) method
+ hasattr(callable, '__call__') or
+ # For normal methods
+ inspect.ismethod(callable)
+ ):
+ # Strip 'self'
+ args = args[1:]
+
+ arg_usage = dict([(arg, 0,) for arg in args])
+ vararg_usage = 0
+ varkw_usage = 0
+ extra_kwargs = set()
+
+ for i, value in enumerate(callable_args):
+ try:
+ arg_usage[args[i]] += 1
+ except IndexError:
+ vararg_usage += 1
+
+ for key in callable_kwargs.keys():
+ try:
+ arg_usage[key] += 1
+ except KeyError:
+ varkw_usage += 1
+ extra_kwargs.add(key)
+
+ # figure out which args have defaults.
+ args_with_defaults = args[-len(defaults or []):]
+ for i, val in enumerate(defaults or []):
+ # Defaults take effect only when the arg hasn't been used yet.
+ if arg_usage[args_with_defaults[i]] == 0:
+ arg_usage[args_with_defaults[i]] += 1
+
+ missing_args = []
+ multiple_args = []
+ for key, usage in arg_usage.items():
+ if usage == 0:
+ missing_args.append(key)
+ elif usage > 1:
+ multiple_args.append(key)
+
+ if missing_args:
+ # In the case where the method allows body arguments
+ # there are 3 potential errors:
+ # 1. not enough query string parameters -> 404
+ # 2. not enough body parameters -> 400
+ # 3. not enough path parts (partial matches) -> 404
+ #
+ # We can't actually tell which case it is,
+ # so I'm raising a 404 because that covers 2/3 of the
+ # possibilities
+ #
+ # In the case where the method does not allow body
+ # arguments it's definitely a 404.
+ message = None
+ if show_mismatched_params:
+ message = 'Missing parameters: %s' % ','.join(missing_args)
+ raise cherrypy.HTTPError(404, message=message)
+
+ # the extra positional arguments come from the path - 404 Not Found
+ if not varargs and vararg_usage > 0:
+ raise cherrypy.HTTPError(404)
+
+ body_params = cherrypy.serving.request.body.params or {}
+ body_params = set(body_params.keys())
+ qs_params = set(callable_kwargs.keys()) - body_params
+
+ if multiple_args:
+ if qs_params.intersection(set(multiple_args)):
+ # If any of the multiple parameters came from the query string then
+ # it's a 404 Not Found
+ error = 404
+ else:
+ # Otherwise it's a 400 Bad Request
+ error = 400
+
+ message = None
+ if show_mismatched_params:
+ message = 'Multiple values for parameters: '\
+ '%s' % ','.join(multiple_args)
+ raise cherrypy.HTTPError(error, message=message)
+
+ if not varkw and varkw_usage > 0:
+
+ # If there were extra query string parameters, it's a 404 Not Found
+ extra_qs_params = set(qs_params).intersection(extra_kwargs)
+ if extra_qs_params:
+ message = None
+ if show_mismatched_params:
+ message = 'Unexpected query string '\
+ 'parameters: %s' % ', '.join(extra_qs_params)
+ raise cherrypy.HTTPError(404, message=message)
+
+ # If there were any extra body parameters, it's a 400 Not Found
+ extra_body_params = set(body_params).intersection(extra_kwargs)
+ if extra_body_params:
+ message = None
+ if show_mismatched_params:
+ message = 'Unexpected body parameters: '\
+ '%s' % ', '.join(extra_body_params)
+ raise cherrypy.HTTPError(400, message=message)
+
+
+try:
+ import inspect
+except ImportError:
+ def test_callable_spec(callable, args, kwargs): # noqa: F811
+ return None
+else:
+ getargspec = inspect.getargspec
+ # Python 3 requires using getfullargspec if
+ # keyword-only arguments are present
+ if hasattr(inspect, 'getfullargspec'):
+ def getargspec(callable):
+ return inspect.getfullargspec(callable)[:4]
+
+
+class LateParamPageHandler(PageHandler):
+
+ """When passing cherrypy.request.params to the page handler, we do not
+ want to capture that dict too early; we want to give tools like the
+ decoding tool a chance to modify the params dict in-between the lookup
+ of the handler and the actual calling of the handler. This subclass
+ takes that into account, and allows request.params to be 'bound late'
+ (it's more complicated than that, but that's the effect).
+ """
+
+ @property
+ def kwargs(self):
+ """Page handler kwargs (with cherrypy.request.params copied in)."""
+ kwargs = cherrypy.serving.request.params.copy()
+ if self._kwargs:
+ kwargs.update(self._kwargs)
+ return kwargs
+
+ @kwargs.setter
+ def kwargs(self, kwargs):
+ cherrypy.serving.request.kwargs = kwargs
+ self._kwargs = kwargs
+
+
+if sys.version_info < (3, 0):
+ punctuation_to_underscores = string.maketrans(
+ string.punctuation, '_' * len(string.punctuation))
+
+ def validate_translator(t):
+ if not isinstance(t, str) or len(t) != 256:
+ raise ValueError(
+ 'The translate argument must be a str of len 256.')
+else:
+ punctuation_to_underscores = str.maketrans(
+ string.punctuation, '_' * len(string.punctuation))
+
+ def validate_translator(t):
+ if not isinstance(t, dict):
+ raise ValueError('The translate argument must be a dict.')
+
+
+class Dispatcher(object):
+
+ """CherryPy Dispatcher which walks a tree of objects to find a handler.
+
+ The tree is rooted at cherrypy.request.app.root, and each hierarchical
+ component in the path_info argument is matched to a corresponding nested
+ attribute of the root object. Matching handlers must have an 'exposed'
+ attribute which evaluates to True. The special method name "index"
+ matches a URI which ends in a slash ("/"). The special method name
+ "default" may match a portion of the path_info (but only when no longer
+ substring of the path_info matches some other object).
+
+ This is the default, built-in dispatcher for CherryPy.
+ """
+
+ dispatch_method_name = '_cp_dispatch'
+ """
+ The name of the dispatch method that nodes may optionally implement
+ to provide their own dynamic dispatch algorithm.
+ """
+
+ def __init__(self, dispatch_method_name=None,
+ translate=punctuation_to_underscores):
+ validate_translator(translate)
+ self.translate = translate
+ if dispatch_method_name:
+ self.dispatch_method_name = dispatch_method_name
+
+ def __call__(self, path_info):
+ """Set handler and config for the current request."""
+ request = cherrypy.serving.request
+ func, vpath = self.find_handler(path_info)
+
+ if func:
+ # Decode any leftover %2F in the virtual_path atoms.
+ vpath = [x.replace('%2F', '/') for x in vpath]
+ request.handler = LateParamPageHandler(func, *vpath)
+ else:
+ request.handler = cherrypy.NotFound()
+
+ def find_handler(self, path):
+ """Return the appropriate page handler, plus any virtual path.
+
+ This will return two objects. The first will be a callable,
+ which can be used to generate page output. Any parameters from
+ the query string or request body will be sent to that callable
+ as keyword arguments.
+
+ The callable is found by traversing the application's tree,
+ starting from cherrypy.request.app.root, and matching path
+ components to successive objects in the tree. For example, the
+ URL "/path/to/handler" might return root.path.to.handler.
+
+ The second object returned will be a list of names which are
+ 'virtual path' components: parts of the URL which are dynamic,
+ and were not used when looking up the handler.
+ These virtual path components are passed to the handler as
+ positional arguments.
+ """
+ request = cherrypy.serving.request
+ app = request.app
+ root = app.root
+ dispatch_name = self.dispatch_method_name
+
+ # Get config for the root object/path.
+ fullpath = [x for x in path.strip('/').split('/') if x] + ['index']
+ fullpath_len = len(fullpath)
+ segleft = fullpath_len
+ nodeconf = {}
+ if hasattr(root, '_cp_config'):
+ nodeconf.update(root._cp_config)
+ if '/' in app.config:
+ nodeconf.update(app.config['/'])
+ object_trail = [['root', root, nodeconf, segleft]]
+
+ node = root
+ iternames = fullpath[:]
+ while iternames:
+ name = iternames[0]
+ # map to legal Python identifiers (e.g. replace '.' with '_')
+ objname = name.translate(self.translate)
+
+ nodeconf = {}
+ subnode = getattr(node, objname, None)
+ pre_len = len(iternames)
+ if subnode is None:
+ dispatch = getattr(node, dispatch_name, None)
+ if dispatch and hasattr(dispatch, '__call__') and not \
+ getattr(dispatch, 'exposed', False) and \
+ pre_len > 1:
+ # Don't expose the hidden 'index' token to _cp_dispatch
+ # We skip this if pre_len == 1 since it makes no sense
+ # to call a dispatcher when we have no tokens left.
+ index_name = iternames.pop()
+ subnode = dispatch(vpath=iternames)
+ iternames.append(index_name)
+ else:
+ # We didn't find a path, but keep processing in case there
+ # is a default() handler.
+ iternames.pop(0)
+ else:
+ # We found the path, remove the vpath entry
+ iternames.pop(0)
+ segleft = len(iternames)
+ if segleft > pre_len:
+ # No path segment was removed. Raise an error.
+ raise cherrypy.CherryPyException(
+ 'A vpath segment was added. Custom dispatchers may only '
+ 'remove elements. While trying to process '
+ '{0} in {1}'.format(name, fullpath)
+ )
+ elif segleft == pre_len:
+ # Assume that the handler used the current path segment, but
+ # did not pop it. This allows things like
+ # return getattr(self, vpath[0], None)
+ iternames.pop(0)
+ segleft -= 1
+ node = subnode
+
+ if node is not None:
+ # Get _cp_config attached to this node.
+ if hasattr(node, '_cp_config'):
+ nodeconf.update(node._cp_config)
+
+ # Mix in values from app.config for this path.
+ existing_len = fullpath_len - pre_len
+ if existing_len != 0:
+ curpath = '/' + '/'.join(fullpath[0:existing_len])
+ else:
+ curpath = ''
+ new_segs = fullpath[fullpath_len - pre_len:fullpath_len - segleft]
+ for seg in new_segs:
+ curpath += '/' + seg
+ if curpath in app.config:
+ nodeconf.update(app.config[curpath])
+
+ object_trail.append([name, node, nodeconf, segleft])
+
+ def set_conf():
+ """Collapse all object_trail config into cherrypy.request.config.
+ """
+ base = cherrypy.config.copy()
+ # Note that we merge the config from each node
+ # even if that node was None.
+ for name, obj, conf, segleft in object_trail:
+ base.update(conf)
+ if 'tools.staticdir.dir' in conf:
+ base['tools.staticdir.section'] = '/' + \
+ '/'.join(fullpath[0:fullpath_len - segleft])
+ return base
+
+ # Try successive objects (reverse order)
+ num_candidates = len(object_trail) - 1
+ for i in range(num_candidates, -1, -1):
+
+ name, candidate, nodeconf, segleft = object_trail[i]
+ if candidate is None:
+ continue
+
+ # Try a "default" method on the current leaf.
+ if hasattr(candidate, 'default'):
+ defhandler = candidate.default
+ if getattr(defhandler, 'exposed', False):
+ # Insert any extra _cp_config from the default handler.
+ conf = getattr(defhandler, '_cp_config', {})
+ object_trail.insert(
+ i + 1, ['default', defhandler, conf, segleft])
+ request.config = set_conf()
+ # See https://github.com/cherrypy/cherrypy/issues/613
+ request.is_index = path.endswith('/')
+ return defhandler, fullpath[fullpath_len - segleft:-1]
+
+ # Uncomment the next line to restrict positional params to
+ # "default".
+ # if i < num_candidates - 2: continue
+
+ # Try the current leaf.
+ if getattr(candidate, 'exposed', False):
+ request.config = set_conf()
+ if i == num_candidates:
+ # We found the extra ".index". Mark request so tools
+ # can redirect if path_info has no trailing slash.
+ request.is_index = True
+ else:
+ # We're not at an 'index' handler. Mark request so tools
+ # can redirect if path_info has NO trailing slash.
+ # Note that this also includes handlers which take
+ # positional parameters (virtual paths).
+ request.is_index = False
+ return candidate, fullpath[fullpath_len - segleft:-1]
+
+ # We didn't find anything
+ request.config = set_conf()
+ return None, []
+
+
+class MethodDispatcher(Dispatcher):
+
+ """Additional dispatch based on cherrypy.request.method.upper().
+
+ Methods named GET, POST, etc will be called on an exposed class.
+ The method names must be all caps; the appropriate Allow header
+ will be output showing all capitalized method names as allowable
+ HTTP verbs.
+
+ Note that the containing class must be exposed, not the methods.
+ """
+
+ def __call__(self, path_info):
+ """Set handler and config for the current request."""
+ request = cherrypy.serving.request
+ resource, vpath = self.find_handler(path_info)
+
+ if resource:
+ # Set Allow header
+ avail = [m for m in dir(resource) if m.isupper()]
+ if 'GET' in avail and 'HEAD' not in avail:
+ avail.append('HEAD')
+ avail.sort()
+ cherrypy.serving.response.headers['Allow'] = ', '.join(avail)
+
+ # Find the subhandler
+ meth = request.method.upper()
+ func = getattr(resource, meth, None)
+ if func is None and meth == 'HEAD':
+ func = getattr(resource, 'GET', None)
+ if func:
+ # Grab any _cp_config on the subhandler.
+ if hasattr(func, '_cp_config'):
+ request.config.update(func._cp_config)
+
+ # Decode any leftover %2F in the virtual_path atoms.
+ vpath = [x.replace('%2F', '/') for x in vpath]
+ request.handler = LateParamPageHandler(func, *vpath)
+ else:
+ request.handler = cherrypy.HTTPError(405)
+ else:
+ request.handler = cherrypy.NotFound()
+
+
+class RoutesDispatcher(object):
+
+ """A Routes based dispatcher for CherryPy."""
+
+ def __init__(self, full_result=False, **mapper_options):
+ """
+ Routes dispatcher
+
+ Set full_result to True if you wish the controller
+ and the action to be passed on to the page handler
+ parameters. By default they won't be.
+ """
+ import routes
+ self.full_result = full_result
+ self.controllers = {}
+ self.mapper = routes.Mapper(**mapper_options)
+ self.mapper.controller_scan = self.controllers.keys
+
+ def connect(self, name, route, controller, **kwargs):
+ self.controllers[name] = controller
+ self.mapper.connect(name, route, controller=name, **kwargs)
+
+ def redirect(self, url):
+ raise cherrypy.HTTPRedirect(url)
+
+ def __call__(self, path_info):
+ """Set handler and config for the current request."""
+ func = self.find_handler(path_info)
+ if func:
+ cherrypy.serving.request.handler = LateParamPageHandler(func)
+ else:
+ cherrypy.serving.request.handler = cherrypy.NotFound()
+
+ def find_handler(self, path_info):
+ """Find the right page handler, and set request.config."""
+ import routes
+
+ request = cherrypy.serving.request
+
+ config = routes.request_config()
+ config.mapper = self.mapper
+ if hasattr(request, 'wsgi_environ'):
+ config.environ = request.wsgi_environ
+ config.host = request.headers.get('Host', None)
+ config.protocol = request.scheme
+ config.redirect = self.redirect
+
+ result = self.mapper.match(path_info)
+
+ config.mapper_dict = result
+ params = {}
+ if result:
+ params = result.copy()
+ if not self.full_result:
+ params.pop('controller', None)
+ params.pop('action', None)
+ request.params.update(params)
+
+ # Get config for the root object/path.
+ request.config = base = cherrypy.config.copy()
+ curpath = ''
+
+ def merge(nodeconf):
+ if 'tools.staticdir.dir' in nodeconf:
+ nodeconf['tools.staticdir.section'] = curpath or '/'
+ base.update(nodeconf)
+
+ app = request.app
+ root = app.root
+ if hasattr(root, '_cp_config'):
+ merge(root._cp_config)
+ if '/' in app.config:
+ merge(app.config['/'])
+
+ # Mix in values from app.config.
+ atoms = [x for x in path_info.split('/') if x]
+ if atoms:
+ last = atoms.pop()
+ else:
+ last = None
+ for atom in atoms:
+ curpath = '/'.join((curpath, atom))
+ if curpath in app.config:
+ merge(app.config[curpath])
+
+ handler = None
+ if result:
+ controller = result.get('controller')
+ controller = self.controllers.get(controller, controller)
+ if controller:
+ if isinstance(controller, classtype):
+ controller = controller()
+ # Get config from the controller.
+ if hasattr(controller, '_cp_config'):
+ merge(controller._cp_config)
+
+ action = result.get('action')
+ if action is not None:
+ handler = getattr(controller, action, None)
+ # Get config from the handler
+ if hasattr(handler, '_cp_config'):
+ merge(handler._cp_config)
+ else:
+ handler = controller
+
+ # Do the last path atom here so it can
+ # override the controller's _cp_config.
+ if last:
+ curpath = '/'.join((curpath, last))
+ if curpath in app.config:
+ merge(app.config[curpath])
+
+ return handler
+
+
+def XMLRPCDispatcher(next_dispatcher=Dispatcher()):
+ from cherrypy.lib import xmlrpcutil
+
+ def xmlrpc_dispatch(path_info):
+ path_info = xmlrpcutil.patched_path(path_info)
+ return next_dispatcher(path_info)
+ return xmlrpc_dispatch
+
+
+def VirtualHost(next_dispatcher=Dispatcher(), use_x_forwarded_host=True,
+ **domains):
+ """
+ Select a different handler based on the Host header.
+
+ This can be useful when running multiple sites within one CP server.
+ It allows several domains to point to different parts of a single
+ website structure. For example::
+
+ http://www.domain.example -> root
+ http://www.domain2.example -> root/domain2/
+ http://www.domain2.example:443 -> root/secure
+
+ can be accomplished via the following config::
+
+ [/]
+ request.dispatch = cherrypy.dispatch.VirtualHost(
+ **{'www.domain2.example': '/domain2',
+ 'www.domain2.example:443': '/secure',
+ })
+
+ next_dispatcher
+ The next dispatcher object in the dispatch chain.
+ The VirtualHost dispatcher adds a prefix to the URL and calls
+ another dispatcher. Defaults to cherrypy.dispatch.Dispatcher().
+
+ use_x_forwarded_host
+ If True (the default), any "X-Forwarded-Host"
+ request header will be used instead of the "Host" header. This
+ is commonly added by HTTP servers (such as Apache) when proxying.
+
+ ``**domains``
+ A dict of {host header value: virtual prefix} pairs.
+ The incoming "Host" request header is looked up in this dict,
+ and, if a match is found, the corresponding "virtual prefix"
+ value will be prepended to the URL path before calling the
+ next dispatcher. Note that you often need separate entries
+ for "example.com" and "www.example.com". In addition, "Host"
+ headers may contain the port number.
+ """
+ from cherrypy.lib import httputil
+
+ def vhost_dispatch(path_info):
+ request = cherrypy.serving.request
+ header = request.headers.get
+
+ domain = header('Host', '')
+ if use_x_forwarded_host:
+ domain = header('X-Forwarded-Host', domain)
+
+ prefix = domains.get(domain, '')
+ if prefix:
+ path_info = httputil.urljoin(prefix, path_info)
+
+ result = next_dispatcher(path_info)
+
+ # Touch up staticdir config. See
+ # https://github.com/cherrypy/cherrypy/issues/614.
+ section = request.config.get('tools.staticdir.section')
+ if section:
+ section = section[len(prefix):]
+ request.config['tools.staticdir.section'] = section
+
+ return result
+ return vhost_dispatch
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/_cperror.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cperror.py
new file mode 100644
index 0000000000000000000000000000000000000000..4e727682786da33992f601df3358cc29b536fef3
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cperror.py
@@ -0,0 +1,619 @@
+"""Exception classes for CherryPy.
+
+CherryPy provides (and uses) exceptions for declaring that the HTTP response
+should be a status other than the default "200 OK". You can ``raise`` them like
+normal Python exceptions. You can also call them and they will raise
+themselves; this means you can set an
+:class:`HTTPError<cherrypy._cperror.HTTPError>`
+or :class:`HTTPRedirect<cherrypy._cperror.HTTPRedirect>` as the
+:attr:`request.handler<cherrypy._cprequest.Request.handler>`.
+
+.. _redirectingpost:
+
+Redirecting POST
+================
+
+When you GET a resource and are redirected by the server to another Location,
+there's generally no problem since GET is both a "safe method" (there should
+be no side-effects) and an "idempotent method" (multiple calls are no different
+than a single call).
+
+POST, however, is neither safe nor idempotent--if you
+charge a credit card, you don't want to be charged twice by a redirect!
+
+For this reason, *none* of the 3xx responses permit a user-agent (browser) to
+resubmit a POST on redirection without first confirming the action with the
+user:
+
+===== ================================= ===========
+300 Multiple Choices Confirm with the user
+301 Moved Permanently Confirm with the user
+302 Found (Object moved temporarily) Confirm with the user
+303 See Other GET the new URI; no confirmation
+304 Not modified for conditional GET only;
+ POST should not raise this error
+305 Use Proxy Confirm with the user
+307 Temporary Redirect Confirm with the user
+308 Permanent Redirect No confirmation
+===== ================================= ===========
+
+However, browsers have historically implemented these restrictions poorly;
+in particular, many browsers do not force the user to confirm 301, 302
+or 307 when redirecting POST. For this reason, CherryPy defaults to 303,
+which most user-agents appear to have implemented correctly. Therefore, if
+you raise HTTPRedirect for a POST request, the user-agent will most likely
+attempt to GET the new URI (without asking for confirmation from the user).
+We realize this is confusing for developers, but it's the safest thing we
+could do. You are of course free to raise ``HTTPRedirect(uri, status=302)``
+or any other 3xx status if you know what you're doing, but given the
+environment, we couldn't let any of those be the default.
+
+Custom Error Handling
+=====================
+
+.. image:: /refman/cperrors.gif
+
+Anticipated HTTP responses
+--------------------------
+
+The 'error_page' config namespace can be used to provide custom HTML output for
+expected responses (like 404 Not Found). Supply a filename from which the
+output will be read. The contents will be interpolated with the values
+%(status)s, %(message)s, %(traceback)s, and %(version)s using plain old Python
+`string formatting
+<http://docs.python.org/2/library/stdtypes.html#string-formatting-operations>`_.
+
+::
+
+ _cp_config = {
+ 'error_page.404': os.path.join(localDir, "static/index.html")
+ }
+
+
+Beginning in version 3.1, you may also provide a function or other callable as
+an error_page entry. It will be passed the same status, message, traceback and
+version arguments that are interpolated into templates::
+
+ def error_page_402(status, message, traceback, version):
+ return "Error %s - Well, I'm very sorry but you haven't paid!" % status
+ cherrypy.config.update({'error_page.402': error_page_402})
+
+Also in 3.1, in addition to the numbered error codes, you may also supply
+"error_page.default" to handle all codes which do not have their own error_page
+entry.
+
+
+
+Unanticipated errors
+--------------------
+
+CherryPy also has a generic error handling mechanism: whenever an unanticipated
+error occurs in your code, it will call
+:func:`Request.error_response<cherrypy._cprequest.Request.error_response>` to
+set the response status, headers, and body. By default, this is the same
+output as
+:class:`HTTPError(500) <cherrypy._cperror.HTTPError>`. If you want to provide
+some other behavior, you generally replace "request.error_response".
+
+Here is some sample code that shows how to display a custom error message and
+send an e-mail containing the error::
+
+ from cherrypy import _cperror
+
+ def handle_error():
+ cherrypy.response.status = 500
+ cherrypy.response.body = [
+ "<html><body>Sorry, an error occurred</body></html>"
+ ]
+ sendMail('error@domain.com',
+ 'Error in your web app',
+ _cperror.format_exc())
+
+ @cherrypy.config(**{'request.error_response': handle_error})
+ class Root:
+ pass
+
+Note that you have to explicitly set
+:attr:`response.body <cherrypy._cprequest.Response.body>`
+and not simply return an error message as a result.
+"""
+
+import io
+import contextlib
+import urllib.parse
+from sys import exc_info as _exc_info
+from traceback import format_exception as _format_exception
+from xml.sax import saxutils
+import html
+
+from more_itertools import always_iterable
+
+import cherrypy
+from cherrypy._cpcompat import ntob
+from cherrypy._cpcompat import tonative
+from cherrypy._helper import classproperty
+from cherrypy.lib import httputil as _httputil
+
+
+class CherryPyException(Exception):
+
+ """A base class for CherryPy exceptions."""
+ pass
+
+
+class InternalRedirect(CherryPyException):
+
+ """Exception raised to switch to the handler for a different URL.
+
+ This exception will redirect processing to another path within the site
+ (without informing the client). Provide the new path as an argument when
+ raising the exception. Provide any params in the querystring for the new
+ URL.
+ """
+
+ def __init__(self, path, query_string=''):
+ self.request = cherrypy.serving.request
+
+ self.query_string = query_string
+ if '?' in path:
+ # Separate any params included in the path
+ path, self.query_string = path.split('?', 1)
+
+ # Note that urljoin will "do the right thing" whether url is:
+ # 1. a URL relative to root (e.g. "/dummy")
+ # 2. a URL relative to the current path
+ # Note that any query string will be discarded.
+ path = urllib.parse.urljoin(self.request.path_info, path)
+
+ # Set a 'path' member attribute so that code which traps this
+ # error can have access to it.
+ self.path = path
+
+ CherryPyException.__init__(self, path, self.query_string)
+
+
+class HTTPRedirect(CherryPyException):
+
+ """Exception raised when the request should be redirected.
+
+ This exception will force a HTTP redirect to the URL or URL's you give it.
+ The new URL must be passed as the first argument to the Exception,
+ e.g., HTTPRedirect(newUrl). Multiple URLs are allowed in a list.
+ If a URL is absolute, it will be used as-is. If it is relative, it is
+ assumed to be relative to the current cherrypy.request.path_info.
+
+ If one of the provided URL is a unicode object, it will be encoded
+ using the default encoding or the one passed in parameter.
+
+ There are multiple types of redirect, from which you can select via the
+ ``status`` argument. If you do not provide a ``status`` arg, it defaults to
+ 303 (or 302 if responding with HTTP/1.0).
+
+ Examples::
+
+ raise cherrypy.HTTPRedirect("")
+ raise cherrypy.HTTPRedirect("/abs/path", 307)
+ raise cherrypy.HTTPRedirect(["path1", "path2?a=1&b=2"], 301)
+
+ See :ref:`redirectingpost` for additional caveats.
+ """
+
+ urls = None
+ """The list of URL's to emit."""
+
+ encoding = 'utf-8'
+ """The encoding when passed urls are not native strings"""
+
+ def __init__(self, urls, status=None, encoding=None):
+ self.urls = abs_urls = [
+ # Note that urljoin will "do the right thing" whether url is:
+ # 1. a complete URL with host (e.g. "http://www.example.com/test")
+ # 2. a URL relative to root (e.g. "/dummy")
+ # 3. a URL relative to the current path
+ # Note that any query string in cherrypy.request is discarded.
+ urllib.parse.urljoin(
+ cherrypy.url(),
+ tonative(url, encoding or self.encoding),
+ )
+ for url in always_iterable(urls)
+ ]
+
+ status = (
+ int(status)
+ if status is not None
+ else self.default_status
+ )
+ if not 300 <= status <= 399:
+ raise ValueError('status must be between 300 and 399.')
+
+ CherryPyException.__init__(self, abs_urls, status)
+
+ @classproperty
+ def default_status(cls):
+ """
+ The default redirect status for the request.
+
+ RFC 2616 indicates a 301 response code fits our goal; however,
+ browser support for 301 is quite messy. Use 302/303 instead. See
+ http://www.alanflavell.org.uk/www/post-redirect.html
+ """
+ return 303 if cherrypy.serving.request.protocol >= (1, 1) else 302
+
+ @property
+ def status(self):
+ """The integer HTTP status code to emit."""
+ _, status = self.args[:2]
+ return status
+
+ def set_response(self):
+ """Modify cherrypy.response status, headers, and body to represent
+ self.
+
+ CherryPy uses this internally, but you can also use it to create an
+ HTTPRedirect object and set its output without *raising* the exception.
+ """
+ response = cherrypy.serving.response
+ response.status = status = self.status
+
+ if status in (300, 301, 302, 303, 307, 308):
+ response.headers['Content-Type'] = 'text/html;charset=utf-8'
+ # "The ... URI SHOULD be given by the Location field
+ # in the response."
+ response.headers['Location'] = self.urls[0]
+
+ # "Unless the request method was HEAD, the entity of the response
+ # SHOULD contain a short hypertext note with a hyperlink to the
+ # new URI(s)."
+ msg = {
+ 300: 'This resource can be found at ',
+ 301: 'This resource has permanently moved to ',
+ 302: 'This resource resides temporarily at ',
+ 303: 'This resource can be found at ',
+ 307: 'This resource has moved temporarily to ',
+ 308: 'This resource has been moved to ',
+ }[status]
+ msg += '<a href=%s>%s</a>.'
+ msgs = [
+ msg % (saxutils.quoteattr(u), html.escape(u, quote=False))
+ for u in self.urls
+ ]
+ response.body = ntob('<br />\n'.join(msgs), 'utf-8')
+ # Previous code may have set C-L, so we have to reset it
+ # (allow finalize to set it).
+ response.headers.pop('Content-Length', None)
+ elif status == 304:
+ # Not Modified.
+ # "The response MUST include the following header fields:
+ # Date, unless its omission is required by section 14.18.1"
+ # The "Date" header should have been set in Response.__init__
+
+ # "...the response SHOULD NOT include other entity-headers."
+ for key in ('Allow', 'Content-Encoding', 'Content-Language',
+ 'Content-Length', 'Content-Location', 'Content-MD5',
+ 'Content-Range', 'Content-Type', 'Expires',
+ 'Last-Modified'):
+ if key in response.headers:
+ del response.headers[key]
+
+ # "The 304 response MUST NOT contain a message-body."
+ response.body = None
+ # Previous code may have set C-L, so we have to reset it.
+ response.headers.pop('Content-Length', None)
+ elif status == 305:
+ # Use Proxy.
+ # self.urls[0] should be the URI of the proxy.
+ response.headers['Location'] = ntob(self.urls[0], 'utf-8')
+ response.body = None
+ # Previous code may have set C-L, so we have to reset it.
+ response.headers.pop('Content-Length', None)
+ else:
+ raise ValueError('The %s status code is unknown.' % status)
+
+ def __call__(self):
+ """Use this exception as a request.handler (raise self)."""
+ raise self
+
+
+def clean_headers(status):
+ """Remove any headers which should not apply to an error response."""
+ response = cherrypy.serving.response
+
+ # Remove headers which applied to the original content,
+ # but do not apply to the error page.
+ respheaders = response.headers
+ for key in ['Accept-Ranges', 'Age', 'ETag', 'Location', 'Retry-After',
+ 'Vary', 'Content-Encoding', 'Content-Length', 'Expires',
+ 'Content-Location', 'Content-MD5', 'Last-Modified']:
+ if key in respheaders:
+ del respheaders[key]
+
+ if status != 416:
+ # A server sending a response with status code 416 (Requested
+ # range not satisfiable) SHOULD include a Content-Range field
+ # with a byte-range-resp-spec of "*". The instance-length
+ # specifies the current length of the selected resource.
+ # A response with status code 206 (Partial Content) MUST NOT
+ # include a Content-Range field with a byte-range- resp-spec of "*".
+ if 'Content-Range' in respheaders:
+ del respheaders['Content-Range']
+
+
+class HTTPError(CherryPyException):
+
+ """Exception used to return an HTTP error code (4xx-5xx) to the client.
+
+ This exception can be used to automatically send a response using a
+ http status code, with an appropriate error page. It takes an optional
+ ``status`` argument (which must be between 400 and 599); it defaults to 500
+ ("Internal Server Error"). It also takes an optional ``message`` argument,
+ which will be returned in the response body. See
+ `RFC2616 <http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.4>`_
+ for a complete list of available error codes and when to use them.
+
+ Examples::
+
+ raise cherrypy.HTTPError(403)
+ raise cherrypy.HTTPError(
+ "403 Forbidden", "You are not allowed to access this resource.")
+ """
+
+ status = None
+ """The HTTP status code. May be of type int or str (with a Reason-Phrase).
+ """
+
+ code = None
+ """The integer HTTP status code."""
+
+ reason = None
+ """The HTTP Reason-Phrase string."""
+
+ def __init__(self, status=500, message=None):
+ self.status = status
+ try:
+ self.code, self.reason, defaultmsg = _httputil.valid_status(status)
+ except ValueError:
+ raise self.__class__(500, _exc_info()[1].args[0])
+
+ if self.code < 400 or self.code > 599:
+ raise ValueError('status must be between 400 and 599.')
+
+ # See http://www.python.org/dev/peps/pep-0352/
+ # self.message = message
+ self._message = message or defaultmsg
+ CherryPyException.__init__(self, status, message)
+
+ def set_response(self):
+ """Modify cherrypy.response status, headers, and body to represent
+ self.
+
+ CherryPy uses this internally, but you can also use it to create an
+ HTTPError object and set its output without *raising* the exception.
+ """
+ response = cherrypy.serving.response
+
+ clean_headers(self.code)
+
+ # In all cases, finalize will be called after this method,
+ # so don't bother cleaning up response values here.
+ response.status = self.status
+ tb = None
+ if cherrypy.serving.request.show_tracebacks:
+ tb = format_exc()
+
+ response.headers.pop('Content-Length', None)
+
+ content = self.get_error_page(self.status, traceback=tb,
+ message=self._message)
+ response.body = content
+
+ _be_ie_unfriendly(self.code)
+
+ def get_error_page(self, *args, **kwargs):
+ return get_error_page(*args, **kwargs)
+
+ def __call__(self):
+ """Use this exception as a request.handler (raise self)."""
+ raise self
+
+ @classmethod
+ @contextlib.contextmanager
+ def handle(cls, exception, status=500, message=''):
+ """Translate exception into an HTTPError."""
+ try:
+ yield
+ except exception as exc:
+ raise cls(status, message or str(exc))
+
+
+class NotFound(HTTPError):
+
+ """Exception raised when a URL could not be mapped to any handler (404).
+
+ This is equivalent to raising
+ :class:`HTTPError("404 Not Found") <cherrypy._cperror.HTTPError>`.
+ """
+
+ def __init__(self, path=None):
+ if path is None:
+ request = cherrypy.serving.request
+ path = request.script_name + request.path_info
+ self.args = (path,)
+ HTTPError.__init__(self, 404, "The path '%s' was not found." % path)
+
+
+_HTTPErrorTemplate = '''<!DOCTYPE html PUBLIC
+"-//W3C//DTD XHTML 1.0 Transitional//EN"
+"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html>
+<head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8"></meta>
+ <title>%(status)s</title>
+ <style type="text/css">
+ #powered_by {
+ margin-top: 20px;
+ border-top: 2px solid black;
+ font-style: italic;
+ }
+
+ #traceback {
+ color: red;
+ }
+ </style>
+</head>
+ <body>
+ <h2>%(status)s</h2>
+ <p>%(message)s</p>
+ <pre id="traceback">%(traceback)s</pre>
+ <div id="powered_by">
+ <span>
+ Powered by <a href="http://www.cherrypy.org">CherryPy %(version)s</a>
+ </span>
+ </div>
+ </body>
+</html>
+'''
+
+
+def get_error_page(status, **kwargs):
+ """Return an HTML page, containing a pretty error response.
+
+ status should be an int or a str.
+ kwargs will be interpolated into the page template.
+ """
+ try:
+ code, reason, message = _httputil.valid_status(status)
+ except ValueError:
+ raise cherrypy.HTTPError(500, _exc_info()[1].args[0])
+
+ # We can't use setdefault here, because some
+ # callers send None for kwarg values.
+ if kwargs.get('status') is None:
+ kwargs['status'] = '%s %s' % (code, reason)
+ if kwargs.get('message') is None:
+ kwargs['message'] = message
+ if kwargs.get('traceback') is None:
+ kwargs['traceback'] = ''
+ if kwargs.get('version') is None:
+ kwargs['version'] = cherrypy.__version__
+
+ for k, v in kwargs.items():
+ if v is None:
+ kwargs[k] = ''
+ else:
+ kwargs[k] = html.escape(kwargs[k], quote=False)
+
+ # Use a custom template or callable for the error page?
+ pages = cherrypy.serving.request.error_page
+ error_page = pages.get(code) or pages.get('default')
+
+ # Default template, can be overridden below.
+ template = _HTTPErrorTemplate
+ if error_page:
+ try:
+ if hasattr(error_page, '__call__'):
+ # The caller function may be setting headers manually,
+ # so we delegate to it completely. We may be returning
+ # an iterator as well as a string here.
+ #
+ # We *must* make sure any content is not unicode.
+ result = error_page(**kwargs)
+ if cherrypy.lib.is_iterator(result):
+ from cherrypy.lib.encoding import UTF8StreamEncoder
+ return UTF8StreamEncoder(result)
+ elif isinstance(result, str):
+ return result.encode('utf-8')
+ else:
+ if not isinstance(result, bytes):
+ raise ValueError(
+ 'error page function did not '
+ 'return a bytestring, str or an '
+ 'iterator - returned object of type %s.'
+ % (type(result).__name__))
+ return result
+ else:
+ # Load the template from this path.
+ template = io.open(error_page, newline='').read()
+ except Exception:
+ e = _format_exception(*_exc_info())[-1]
+ m = kwargs['message']
+ if m:
+ m += '<br />'
+ m += 'In addition, the custom error page failed:\n<br />%s' % e
+ kwargs['message'] = m
+
+ response = cherrypy.serving.response
+ response.headers['Content-Type'] = 'text/html;charset=utf-8'
+ result = template % kwargs
+ return result.encode('utf-8')
+
+
+_ie_friendly_error_sizes = {
+ 400: 512, 403: 256, 404: 512, 405: 256,
+ 406: 512, 408: 512, 409: 512, 410: 256,
+ 500: 512, 501: 512, 505: 512,
+}
+
+
+def _be_ie_unfriendly(status):
+ response = cherrypy.serving.response
+
+ # For some statuses, Internet Explorer 5+ shows "friendly error
+ # messages" instead of our response.body if the body is smaller
+ # than a given size. Fix this by returning a body over that size
+ # (by adding whitespace).
+ # See http://support.microsoft.com/kb/q218155/
+ s = _ie_friendly_error_sizes.get(status, 0)
+ if s:
+ s += 1
+ # Since we are issuing an HTTP error status, we assume that
+ # the entity is short, and we should just collapse it.
+ content = response.collapse_body()
+ content_length = len(content)
+ if content_length and content_length < s:
+ # IN ADDITION: the response must be written to IE
+ # in one chunk or it will still get replaced! Bah.
+ content = content + (b' ' * (s - content_length))
+ response.body = content
+ response.headers['Content-Length'] = str(len(content))
+
+
+def format_exc(exc=None):
+ """Return exc (or sys.exc_info if None), formatted."""
+ try:
+ if exc is None:
+ exc = _exc_info()
+ if exc == (None, None, None):
+ return ''
+ import traceback
+ return ''.join(traceback.format_exception(*exc))
+ finally:
+ del exc
+
+
+def bare_error(extrabody=None):
+ """Produce status, headers, body for a critical error.
+
+ Returns a triple without calling any other questionable functions,
+ so it should be as error-free as possible. Call it from an HTTP server
+ if you get errors outside of the request.
+
+ If extrabody is None, a friendly but rather unhelpful error message
+ is set in the body. If extrabody is a string, it will be appended
+ as-is to the body.
+ """
+
+ # The whole point of this function is to be a last line-of-defense
+ # in handling errors. That is, it must not raise any errors itself;
+ # it cannot be allowed to fail. Therefore, don't add to it!
+ # In particular, don't call any other CP functions.
+
+ body = b'Unrecoverable error in the server.'
+ if extrabody is not None:
+ if not isinstance(extrabody, bytes):
+ extrabody = extrabody.encode('utf-8')
+ body += b'\n' + extrabody
+
+ return (b'500 Internal Server Error',
+ [(b'Content-Type', b'text/plain'),
+ (b'Content-Length', ntob(str(len(body)), 'ISO-8859-1'))],
+ [body])
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/_cplogging.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cplogging.py
new file mode 100644
index 0000000000000000000000000000000000000000..151d3b407858ad66b7b6841cba6da29b21093495
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cplogging.py
@@ -0,0 +1,457 @@
+"""
+Simple config
+=============
+
+Although CherryPy uses the :mod:`Python logging module <logging>`, it does so
+behind the scenes so that simple logging is simple, but complicated logging
+is still possible. "Simple" logging means that you can log to the screen
+(i.e. console/stdout) or to a file, and that you can easily have separate
+error and access log files.
+
+Here are the simplified logging settings. You use these by adding lines to
+your config file or dict. You should set these at either the global level or
+per application (see next), but generally not both.
+
+ * ``log.screen``: Set this to True to have both "error" and "access" messages
+ printed to stdout.
+ * ``log.access_file``: Set this to an absolute filename where you want
+ "access" messages written.
+ * ``log.error_file``: Set this to an absolute filename where you want "error"
+ messages written.
+
+Many events are automatically logged; to log your own application events, call
+:func:`cherrypy.log`.
+
+Architecture
+============
+
+Separate scopes
+---------------
+
+CherryPy provides log managers at both the global and application layers.
+This means you can have one set of logging rules for your entire site,
+and another set of rules specific to each application. The global log
+manager is found at :func:`cherrypy.log`, and the log manager for each
+application is found at :attr:`app.log<cherrypy._cptree.Application.log>`.
+If you're inside a request, the latter is reachable from
+``cherrypy.request.app.log``; if you're outside a request, you'll have to
+obtain a reference to the ``app``: either the return value of
+:func:`tree.mount()<cherrypy._cptree.Tree.mount>` or, if you used
+:func:`quickstart()<cherrypy.quickstart>` instead, via
+``cherrypy.tree.apps['/']``.
+
+By default, the global logs are named "cherrypy.error" and "cherrypy.access",
+and the application logs are named "cherrypy.error.2378745" and
+"cherrypy.access.2378745" (the number is the id of the Application object).
+This means that the application logs "bubble up" to the site logs, so if your
+application has no log handlers, the site-level handlers will still log the
+messages.
+
+Errors vs. Access
+-----------------
+
+Each log manager handles both "access" messages (one per HTTP request) and
+"error" messages (everything else). Note that the "error" log is not just for
+errors! The format of access messages is highly formalized, but the error log
+isn't--it receives messages from a variety of sources (including full error
+tracebacks, if enabled).
+
+If you are logging the access log and error log to the same source, then there
+is a possibility that a specially crafted error message may replicate an access
+log message as described in CWE-117. In this case it is the application
+developer's responsibility to manually escape data before
+using CherryPy's log()
+functionality, or they may create an application that is vulnerable to CWE-117.
+This would be achieved by using a custom handler escape any special characters,
+and attached as described below.
+
+Custom Handlers
+===============
+
+The simple settings above work by manipulating Python's standard :mod:`logging`
+module. So when you need something more complex, the full power of the standard
+module is yours to exploit. You can borrow or create custom handlers, formats,
+filters, and much more. Here's an example that skips the standard FileHandler
+and uses a RotatingFileHandler instead:
+
+::
+
+ #python
+ log = app.log
+
+ # Remove the default FileHandlers if present.
+ log.error_file = ""
+ log.access_file = ""
+
+ maxBytes = getattr(log, "rot_maxBytes", 10000000)
+ backupCount = getattr(log, "rot_backupCount", 1000)
+
+ # Make a new RotatingFileHandler for the error log.
+ fname = getattr(log, "rot_error_file", "error.log")
+ h = handlers.RotatingFileHandler(fname, 'a', maxBytes, backupCount)
+ h.setLevel(DEBUG)
+ h.setFormatter(_cplogging.logfmt)
+ log.error_log.addHandler(h)
+
+ # Make a new RotatingFileHandler for the access log.
+ fname = getattr(log, "rot_access_file", "access.log")
+ h = handlers.RotatingFileHandler(fname, 'a', maxBytes, backupCount)
+ h.setLevel(DEBUG)
+ h.setFormatter(_cplogging.logfmt)
+ log.access_log.addHandler(h)
+
+
+The ``rot_*`` attributes are pulled straight from the application log object.
+Since "log.*" config entries simply set attributes on the log object, you can
+add custom attributes to your heart's content. Note that these handlers are
+used ''instead'' of the default, simple handlers outlined above (so don't set
+the "log.error_file" config entry, for example).
+"""
+
+import datetime
+import logging
+import os
+import sys
+
+import cherrypy
+from cherrypy import _cperror
+
+
+# Silence the no-handlers "warning" (stderr write!) in stdlib logging
+logging.Logger.manager.emittedNoHandlerWarning = 1
+logfmt = logging.Formatter('%(message)s')
+
+
+class NullHandler(logging.Handler):
+
+ """A no-op logging handler to silence the logging.lastResort handler."""
+
+ def handle(self, record):
+ pass
+
+ def emit(self, record):
+ pass
+
+ def createLock(self):
+ self.lock = None
+
+
+class LogManager(object):
+
+ """An object to assist both simple and advanced logging.
+
+ ``cherrypy.log`` is an instance of this class.
+ """
+
+ appid = None
+ """The id() of the Application object which owns this log manager. If this
+ is a global log manager, appid is None."""
+
+ error_log = None
+ """The actual :class:`logging.Logger` instance for error messages."""
+
+ access_log = None
+ """The actual :class:`logging.Logger` instance for access messages."""
+
+ access_log_format = '{h} {l} {u} {t} "{r}" {s} {b} "{f}" "{a}"'
+
+ logger_root = None
+ """The "top-level" logger name.
+
+ This string will be used as the first segment in the Logger names.
+ The default is "cherrypy", for example, in which case the Logger names
+ will be of the form::
+
+ cherrypy.error.<appid>
+ cherrypy.access.<appid>
+ """
+
+ def __init__(self, appid=None, logger_root='cherrypy'):
+ self.logger_root = logger_root
+ self.appid = appid
+ if appid is None:
+ self.error_log = logging.getLogger('%s.error' % logger_root)
+ self.access_log = logging.getLogger('%s.access' % logger_root)
+ else:
+ self.error_log = logging.getLogger(
+ '%s.error.%s' % (logger_root, appid))
+ self.access_log = logging.getLogger(
+ '%s.access.%s' % (logger_root, appid))
+ self.error_log.setLevel(logging.INFO)
+ self.access_log.setLevel(logging.INFO)
+
+ # Silence the no-handlers "warning" (stderr write!) in stdlib logging
+ self.error_log.addHandler(NullHandler())
+ self.access_log.addHandler(NullHandler())
+
+ cherrypy.engine.subscribe('graceful', self.reopen_files)
+
+ def reopen_files(self):
+ """Close and reopen all file handlers."""
+ for log in (self.error_log, self.access_log):
+ for h in log.handlers:
+ if isinstance(h, logging.FileHandler):
+ h.acquire()
+ h.stream.close()
+ h.stream = open(h.baseFilename, h.mode)
+ h.release()
+
+ def error(self, msg='', context='', severity=logging.INFO,
+ traceback=False):
+ """Write the given ``msg`` to the error log.
+
+ This is not just for errors! Applications may call this at any time
+ to log application-specific information.
+
+ If ``traceback`` is True, the traceback of the current exception
+ (if any) will be appended to ``msg``.
+ """
+ exc_info = None
+ if traceback:
+ exc_info = _cperror._exc_info()
+
+ self.error_log.log(
+ severity,
+ ' '.join((self.time(), context, msg)),
+ exc_info=exc_info,
+ )
+
+ def __call__(self, *args, **kwargs):
+ """An alias for ``error``."""
+ return self.error(*args, **kwargs)
+
+ def access(self):
+ """Write to the access log (in Apache/NCSA Combined Log format).
+
+ See the
+ `apache documentation
+ <http://httpd.apache.org/docs/current/logs.html#combined>`_
+ for format details.
+
+ CherryPy calls this automatically for you. Note there are no arguments;
+ it collects the data itself from
+ :class:`cherrypy.request<cherrypy._cprequest.Request>`.
+
+ Like Apache started doing in 2.0.46, non-printable and other special
+ characters in %r (and we expand that to all parts) are escaped using
+ \\xhh sequences, where hh stands for the hexadecimal representation
+ of the raw byte. Exceptions from this rule are " and \\, which are
+ escaped by prepending a backslash, and all whitespace characters,
+ which are written in their C-style notation (\\n, \\t, etc).
+ """
+ request = cherrypy.serving.request
+ remote = request.remote
+ response = cherrypy.serving.response
+ outheaders = response.headers
+ inheaders = request.headers
+ if response.output_status is None:
+ status = '-'
+ else:
+ status = response.output_status.split(b' ', 1)[0]
+ status = status.decode('ISO-8859-1')
+
+ atoms = {'h': remote.name or remote.ip,
+ 'l': '-',
+ 'u': getattr(request, 'login', None) or '-',
+ 't': self.time(),
+ 'r': request.request_line,
+ 's': status,
+ 'b': dict.get(outheaders, 'Content-Length', '') or '-',
+ 'f': dict.get(inheaders, 'Referer', ''),
+ 'a': dict.get(inheaders, 'User-Agent', ''),
+ 'o': dict.get(inheaders, 'Host', '-'),
+ 'i': request.unique_id,
+ 'z': LazyRfc3339UtcTime(),
+ }
+ for k, v in atoms.items():
+ if not isinstance(v, str):
+ v = str(v)
+ v = v.replace('"', '\\"').encode('utf8')
+ # Fortunately, repr(str) escapes unprintable chars, \n, \t, etc
+ # and backslash for us. All we have to do is strip the quotes.
+ v = repr(v)[2:-1]
+
+ # in python 3.0 the repr of bytes (as returned by encode)
+ # uses double \'s. But then the logger escapes them yet, again
+ # resulting in quadruple slashes. Remove the extra one here.
+ v = v.replace('\\\\', '\\')
+
+ # Escape double-quote.
+ atoms[k] = v
+
+ try:
+ self.access_log.log(
+ logging.INFO, self.access_log_format.format(**atoms))
+ except Exception:
+ self(traceback=True)
+
+ def time(self):
+ """Return now() in Apache Common Log Format (no timezone)."""
+ now = datetime.datetime.now()
+ monthnames = ['jan', 'feb', 'mar', 'apr', 'may', 'jun',
+ 'jul', 'aug', 'sep', 'oct', 'nov', 'dec']
+ month = monthnames[now.month - 1].capitalize()
+ return ('[%02d/%s/%04d:%02d:%02d:%02d]' %
+ (now.day, month, now.year, now.hour, now.minute, now.second))
+
+ def _get_builtin_handler(self, log, key):
+ for h in log.handlers:
+ if getattr(h, '_cpbuiltin', None) == key:
+ return h
+
+ # ------------------------- Screen handlers ------------------------- #
+ def _set_screen_handler(self, log, enable, stream=None):
+ h = self._get_builtin_handler(log, 'screen')
+ if enable:
+ if not h:
+ if stream is None:
+ stream = sys.stderr
+ h = logging.StreamHandler(stream)
+ h.setFormatter(logfmt)
+ h._cpbuiltin = 'screen'
+ log.addHandler(h)
+ elif h:
+ log.handlers.remove(h)
+
+ @property
+ def screen(self):
+ """Turn stderr/stdout logging on or off.
+
+ If you set this to True, it'll add the appropriate StreamHandler for
+ you. If you set it to False, it will remove the handler.
+ """
+ h = self._get_builtin_handler
+ has_h = h(self.error_log, 'screen') or h(self.access_log, 'screen')
+ return bool(has_h)
+
+ @screen.setter
+ def screen(self, newvalue):
+ self._set_screen_handler(self.error_log, newvalue, stream=sys.stderr)
+ self._set_screen_handler(self.access_log, newvalue, stream=sys.stdout)
+
+ # -------------------------- File handlers -------------------------- #
+
+ def _add_builtin_file_handler(self, log, fname):
+ h = logging.FileHandler(fname)
+ h.setFormatter(logfmt)
+ h._cpbuiltin = 'file'
+ log.addHandler(h)
+
+ def _set_file_handler(self, log, filename):
+ h = self._get_builtin_handler(log, 'file')
+ if filename:
+ if h:
+ if h.baseFilename != os.path.abspath(filename):
+ h.close()
+ log.handlers.remove(h)
+ self._add_builtin_file_handler(log, filename)
+ else:
+ self._add_builtin_file_handler(log, filename)
+ else:
+ if h:
+ h.close()
+ log.handlers.remove(h)
+
+ @property
+ def error_file(self):
+ """The filename for self.error_log.
+
+ If you set this to a string, it'll add the appropriate FileHandler for
+ you. If you set it to ``None`` or ``''``, it will remove the handler.
+ """
+ h = self._get_builtin_handler(self.error_log, 'file')
+ if h:
+ return h.baseFilename
+ return ''
+
+ @error_file.setter
+ def error_file(self, newvalue):
+ self._set_file_handler(self.error_log, newvalue)
+
+ @property
+ def access_file(self):
+ """The filename for self.access_log.
+
+ If you set this to a string, it'll add the appropriate FileHandler for
+ you. If you set it to ``None`` or ``''``, it will remove the handler.
+ """
+ h = self._get_builtin_handler(self.access_log, 'file')
+ if h:
+ return h.baseFilename
+ return ''
+
+ @access_file.setter
+ def access_file(self, newvalue):
+ self._set_file_handler(self.access_log, newvalue)
+
+ # ------------------------- WSGI handlers ------------------------- #
+
+ def _set_wsgi_handler(self, log, enable):
+ h = self._get_builtin_handler(log, 'wsgi')
+ if enable:
+ if not h:
+ h = WSGIErrorHandler()
+ h.setFormatter(logfmt)
+ h._cpbuiltin = 'wsgi'
+ log.addHandler(h)
+ elif h:
+ log.handlers.remove(h)
+
+ @property
+ def wsgi(self):
+ """Write errors to wsgi.errors.
+
+ If you set this to True, it'll add the appropriate
+ :class:`WSGIErrorHandler<cherrypy._cplogging.WSGIErrorHandler>` for you
+ (which writes errors to ``wsgi.errors``).
+ If you set it to False, it will remove the handler.
+ """
+ return bool(self._get_builtin_handler(self.error_log, 'wsgi'))
+
+ @wsgi.setter
+ def wsgi(self, newvalue):
+ self._set_wsgi_handler(self.error_log, newvalue)
+
+
+class WSGIErrorHandler(logging.Handler):
+
+ "A handler class which writes logging records to environ['wsgi.errors']."
+
+ def flush(self):
+ """Flushes the stream."""
+ try:
+ stream = cherrypy.serving.request.wsgi_environ.get('wsgi.errors')
+ except (AttributeError, KeyError):
+ pass
+ else:
+ stream.flush()
+
+ def emit(self, record):
+ """Emit a record."""
+ try:
+ stream = cherrypy.serving.request.wsgi_environ.get('wsgi.errors')
+ except (AttributeError, KeyError):
+ pass
+ else:
+ try:
+ msg = self.format(record)
+ fs = '%s\n'
+ import types
+ # if no unicode support...
+ if not hasattr(types, 'UnicodeType'):
+ stream.write(fs % msg)
+ else:
+ try:
+ stream.write(fs % msg)
+ except UnicodeError:
+ stream.write(fs % msg.encode('UTF-8'))
+ self.flush()
+ except Exception:
+ self.handleError(record)
+
+
+class LazyRfc3339UtcTime(object):
+ def __str__(self):
+ """Return now() in RFC3339 UTC Format."""
+ now = datetime.datetime.now()
+ return now.isoformat('T') + 'Z'
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpmodpy.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpmodpy.py
new file mode 100644
index 0000000000000000000000000000000000000000..0e608c48a9e6221c7372ff8b0fd367b824e77504
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpmodpy.py
@@ -0,0 +1,354 @@
+"""Native adapter for serving CherryPy via mod_python
+
+Basic usage:
+
+##########################################
+# Application in a module called myapp.py
+##########################################
+
+import cherrypy
+
+class Root:
+ @cherrypy.expose
+ def index(self):
+ return 'Hi there, Ho there, Hey there'
+
+
+# We will use this method from the mod_python configuration
+# as the entry point to our application
+def setup_server():
+ cherrypy.tree.mount(Root())
+ cherrypy.config.update({'environment': 'production',
+ 'log.screen': False,
+ 'show_tracebacks': False})
+
+##########################################
+# mod_python settings for apache2
+# This should reside in your httpd.conf
+# or a file that will be loaded at
+# apache startup
+##########################################
+
+# Start
+DocumentRoot "/"
+Listen 8080
+LoadModule python_module /usr/lib/apache2/modules/mod_python.so
+
+<Location "/">
+ PythonPath "sys.path+['/path/to/my/application']"
+ SetHandler python-program
+ PythonHandler cherrypy._cpmodpy::handler
+ PythonOption cherrypy.setup myapp::setup_server
+ PythonDebug On
+</Location>
+# End
+
+The actual path to your mod_python.so is dependent on your
+environment. In this case we suppose a global mod_python
+installation on a Linux distribution such as Ubuntu.
+
+We do set the PythonPath configuration setting so that
+your application can be found by from the user running
+the apache2 instance. Of course if your application
+resides in the global site-package this won't be needed.
+
+Then restart apache2 and access http://127.0.0.1:8080
+"""
+
+import io
+import logging
+import os
+import re
+import sys
+
+from more_itertools import always_iterable
+
+import cherrypy
+from cherrypy._cperror import format_exc, bare_error
+from cherrypy.lib import httputil
+
+
+# ------------------------------ Request-handling
+
+
+def setup(req):
+ from mod_python import apache
+
+ # Run any setup functions defined by a "PythonOption cherrypy.setup"
+ # directive.
+ options = req.get_options()
+ if 'cherrypy.setup' in options:
+ for function in options['cherrypy.setup'].split():
+ atoms = function.split('::', 1)
+ if len(atoms) == 1:
+ mod = __import__(atoms[0], globals(), locals())
+ else:
+ modname, fname = atoms
+ mod = __import__(modname, globals(), locals(), [fname])
+ func = getattr(mod, fname)
+ func()
+
+ cherrypy.config.update({'log.screen': False,
+ 'tools.ignore_headers.on': True,
+ 'tools.ignore_headers.headers': ['Range'],
+ })
+
+ engine = cherrypy.engine
+ if hasattr(engine, 'signal_handler'):
+ engine.signal_handler.unsubscribe()
+ if hasattr(engine, 'console_control_handler'):
+ engine.console_control_handler.unsubscribe()
+ engine.autoreload.unsubscribe()
+ cherrypy.server.unsubscribe()
+
+ @engine.subscribe('log')
+ def _log(msg, level):
+ newlevel = apache.APLOG_ERR
+ if logging.DEBUG >= level:
+ newlevel = apache.APLOG_DEBUG
+ elif logging.INFO >= level:
+ newlevel = apache.APLOG_INFO
+ elif logging.WARNING >= level:
+ newlevel = apache.APLOG_WARNING
+ # On Windows, req.server is required or the msg will vanish. See
+ # http://www.modpython.org/pipermail/mod_python/2003-October/014291.html
+ # Also, "When server is not specified...LogLevel does not apply..."
+ apache.log_error(msg, newlevel, req.server)
+
+ engine.start()
+
+ def cherrypy_cleanup(data):
+ engine.exit()
+ try:
+ # apache.register_cleanup wasn't available until 3.1.4.
+ apache.register_cleanup(cherrypy_cleanup)
+ except AttributeError:
+ req.server.register_cleanup(req, cherrypy_cleanup)
+
+
+class _ReadOnlyRequest:
+ expose = ('read', 'readline', 'readlines')
+
+ def __init__(self, req):
+ for method in self.expose:
+ self.__dict__[method] = getattr(req, method)
+
+
+recursive = False
+
+_isSetUp = False
+
+
+def handler(req):
+ from mod_python import apache
+ try:
+ global _isSetUp
+ if not _isSetUp:
+ setup(req)
+ _isSetUp = True
+
+ # Obtain a Request object from CherryPy
+ local = req.connection.local_addr
+ local = httputil.Host(
+ local[0], local[1], req.connection.local_host or '')
+ remote = req.connection.remote_addr
+ remote = httputil.Host(
+ remote[0], remote[1], req.connection.remote_host or '')
+
+ scheme = req.parsed_uri[0] or 'http'
+ req.get_basic_auth_pw()
+
+ try:
+ # apache.mpm_query only became available in mod_python 3.1
+ q = apache.mpm_query
+ threaded = q(apache.AP_MPMQ_IS_THREADED)
+ forked = q(apache.AP_MPMQ_IS_FORKED)
+ except AttributeError:
+ bad_value = ("You must provide a PythonOption '%s', "
+ "either 'on' or 'off', when running a version "
+ 'of mod_python < 3.1')
+
+ options = req.get_options()
+
+ threaded = options.get('multithread', '').lower()
+ if threaded == 'on':
+ threaded = True
+ elif threaded == 'off':
+ threaded = False
+ else:
+ raise ValueError(bad_value % 'multithread')
+
+ forked = options.get('multiprocess', '').lower()
+ if forked == 'on':
+ forked = True
+ elif forked == 'off':
+ forked = False
+ else:
+ raise ValueError(bad_value % 'multiprocess')
+
+ sn = cherrypy.tree.script_name(req.uri or '/')
+ if sn is None:
+ send_response(req, '404 Not Found', [], '')
+ else:
+ app = cherrypy.tree.apps[sn]
+ method = req.method
+ path = req.uri
+ qs = req.args or ''
+ reqproto = req.protocol
+ headers = list(req.headers_in.copy().items())
+ rfile = _ReadOnlyRequest(req)
+ prev = None
+
+ try:
+ redirections = []
+ while True:
+ request, response = app.get_serving(local, remote, scheme,
+ 'HTTP/1.1')
+ request.login = req.user
+ request.multithread = bool(threaded)
+ request.multiprocess = bool(forked)
+ request.app = app
+ request.prev = prev
+
+ # Run the CherryPy Request object and obtain the response
+ try:
+ request.run(method, path, qs, reqproto, headers, rfile)
+ break
+ except cherrypy.InternalRedirect:
+ ir = sys.exc_info()[1]
+ app.release_serving()
+ prev = request
+
+ if not recursive:
+ if ir.path in redirections:
+ raise RuntimeError(
+ 'InternalRedirector visited the same URL '
+ 'twice: %r' % ir.path)
+ else:
+ # Add the *previous* path_info + qs to
+ # redirections.
+ if qs:
+ qs = '?' + qs
+ redirections.append(sn + path + qs)
+
+ # Munge environment and try again.
+ method = 'GET'
+ path = ir.path
+ qs = ir.query_string
+ rfile = io.BytesIO()
+
+ send_response(
+ req, response.output_status, response.header_list,
+ response.body, response.stream)
+ finally:
+ app.release_serving()
+ except Exception:
+ tb = format_exc()
+ cherrypy.log(tb, 'MOD_PYTHON', severity=logging.ERROR)
+ s, h, b = bare_error()
+ send_response(req, s, h, b)
+ return apache.OK
+
+
+def send_response(req, status, headers, body, stream=False):
+ # Set response status
+ req.status = int(status[:3])
+
+ # Set response headers
+ req.content_type = 'text/plain'
+ for header, value in headers:
+ if header.lower() == 'content-type':
+ req.content_type = value
+ continue
+ req.headers_out.add(header, value)
+
+ if stream:
+ # Flush now so the status and headers are sent immediately.
+ req.flush()
+
+ # Set response body
+ for seg in always_iterable(body):
+ req.write(seg)
+
+
+# --------------- Startup tools for CherryPy + mod_python --------------- #
+try:
+ import subprocess
+
+ def popen(fullcmd):
+ p = subprocess.Popen(fullcmd, shell=True,
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
+ close_fds=True)
+ return p.stdout
+except ImportError:
+ def popen(fullcmd):
+ pipein, pipeout = os.popen4(fullcmd)
+ return pipeout
+
+
+def read_process(cmd, args=''):
+ fullcmd = '%s %s' % (cmd, args)
+ pipeout = popen(fullcmd)
+ try:
+ firstline = pipeout.readline()
+ cmd_not_found = re.search(
+ b'(not recognized|No such file|not found)',
+ firstline,
+ re.IGNORECASE
+ )
+ if cmd_not_found:
+ raise IOError('%s must be on your system path.' % cmd)
+ output = firstline + pipeout.read()
+ finally:
+ pipeout.close()
+ return output
+
+
+class ModPythonServer(object):
+
+ template = """
+# Apache2 server configuration file for running CherryPy with mod_python.
+
+DocumentRoot "/"
+Listen %(port)s
+LoadModule python_module modules/mod_python.so
+
+<Location %(loc)s>
+ SetHandler python-program
+ PythonHandler %(handler)s
+ PythonDebug On
+%(opts)s
+</Location>
+"""
+
+ def __init__(self, loc='/', port=80, opts=None, apache_path='apache',
+ handler='cherrypy._cpmodpy::handler'):
+ self.loc = loc
+ self.port = port
+ self.opts = opts
+ self.apache_path = apache_path
+ self.handler = handler
+
+ def start(self):
+ opts = ''.join([' PythonOption %s %s\n' % (k, v)
+ for k, v in self.opts])
+ conf_data = self.template % {'port': self.port,
+ 'loc': self.loc,
+ 'opts': opts,
+ 'handler': self.handler,
+ }
+
+ mpconf = os.path.join(os.path.dirname(__file__), 'cpmodpy.conf')
+ f = open(mpconf, 'wb')
+ try:
+ f.write(conf_data)
+ finally:
+ f.close()
+
+ response = read_process(self.apache_path, '-k start -f %s' % mpconf)
+ self.ready = True
+ return response
+
+ def stop(self):
+ os.popen('apache -k stop')
+ self.ready = False
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpnative_server.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpnative_server.py
new file mode 100644
index 0000000000000000000000000000000000000000..e9671d2892094f5a6ef79abfd338f3a7ff8bc39f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpnative_server.py
@@ -0,0 +1,168 @@
+"""Native adapter for serving CherryPy via its builtin server."""
+
+import logging
+import sys
+import io
+
+import cheroot.server
+
+import cherrypy
+from cherrypy._cperror import format_exc, bare_error
+from cherrypy.lib import httputil
+from ._cpcompat import tonative
+
+
+class NativeGateway(cheroot.server.Gateway):
+ """Native gateway implementation allowing to bypass WSGI."""
+
+ recursive = False
+
+ def respond(self):
+ """Obtain response from CherryPy machinery and then send it."""
+ req = self.req
+ try:
+ # Obtain a Request object from CherryPy
+ local = req.server.bind_addr # FIXME: handle UNIX sockets
+ local = tonative(local[0]), local[1]
+ local = httputil.Host(local[0], local[1], '')
+ remote = tonative(req.conn.remote_addr), req.conn.remote_port
+ remote = httputil.Host(remote[0], remote[1], '')
+
+ scheme = tonative(req.scheme)
+ sn = cherrypy.tree.script_name(tonative(req.uri or '/'))
+ if sn is None:
+ self.send_response('404 Not Found', [], [''])
+ else:
+ app = cherrypy.tree.apps[sn]
+ method = tonative(req.method)
+ path = tonative(req.path)
+ qs = tonative(req.qs or '')
+ headers = (
+ (tonative(h), tonative(v))
+ for h, v in req.inheaders.items()
+ )
+ rfile = req.rfile
+ prev = None
+
+ try:
+ redirections = []
+ while True:
+ request, response = app.get_serving(
+ local, remote, scheme, 'HTTP/1.1')
+ request.multithread = True
+ request.multiprocess = False
+ request.app = app
+ request.prev = prev
+
+ # Run the CherryPy Request object and obtain the
+ # response
+ try:
+ request.run(
+ method, path, qs,
+ tonative(req.request_protocol),
+ headers, rfile,
+ )
+ break
+ except cherrypy.InternalRedirect:
+ ir = sys.exc_info()[1]
+ app.release_serving()
+ prev = request
+
+ if not self.recursive:
+ if ir.path in redirections:
+ raise RuntimeError(
+ 'InternalRedirector visited the same '
+ 'URL twice: %r' % ir.path)
+ else:
+ # Add the *previous* path_info + qs to
+ # redirections.
+ if qs:
+ qs = '?' + qs
+ redirections.append(sn + path + qs)
+
+ # Munge environment and try again.
+ method = 'GET'
+ path = ir.path
+ qs = ir.query_string
+ rfile = io.BytesIO()
+
+ self.send_response(
+ response.output_status, response.header_list,
+ response.body)
+ finally:
+ app.release_serving()
+ except Exception:
+ tb = format_exc()
+ # print tb
+ cherrypy.log(tb, 'NATIVE_ADAPTER', severity=logging.ERROR)
+ s, h, b = bare_error()
+ self.send_response(s, h, b)
+
+ def send_response(self, status, headers, body):
+ """Send response to HTTP request."""
+ req = self.req
+
+ # Set response status
+ req.status = status or b'500 Server Error'
+
+ # Set response headers
+ for header, value in headers:
+ req.outheaders.append((header, value))
+ if (req.ready and not req.sent_headers):
+ req.sent_headers = True
+ req.send_headers()
+
+ # Set response body
+ for seg in body:
+ req.write(seg)
+
+
+class CPHTTPServer(cheroot.server.HTTPServer):
+ """Wrapper for cheroot.server.HTTPServer.
+
+ cheroot has been designed to not reference CherryPy in any way,
+ so that it can be used in other frameworks and applications.
+ Therefore, we wrap it here, so we can apply some attributes
+ from config -> cherrypy.server -> HTTPServer.
+ """
+
+ def __init__(self, server_adapter=cherrypy.server):
+ """Initialize CPHTTPServer."""
+ self.server_adapter = server_adapter
+
+ server_name = (self.server_adapter.socket_host or
+ self.server_adapter.socket_file or
+ None)
+
+ cheroot.server.HTTPServer.__init__(
+ self, server_adapter.bind_addr, NativeGateway,
+ minthreads=server_adapter.thread_pool,
+ maxthreads=server_adapter.thread_pool_max,
+ server_name=server_name)
+
+ self.max_request_header_size = (
+ self.server_adapter.max_request_header_size or 0)
+ self.max_request_body_size = (
+ self.server_adapter.max_request_body_size or 0)
+ self.request_queue_size = self.server_adapter.socket_queue_size
+ self.timeout = self.server_adapter.socket_timeout
+ self.shutdown_timeout = self.server_adapter.shutdown_timeout
+ self.protocol = self.server_adapter.protocol_version
+ self.nodelay = self.server_adapter.nodelay
+
+ ssl_module = self.server_adapter.ssl_module or 'pyopenssl'
+ if self.server_adapter.ssl_context:
+ adapter_class = cheroot.server.get_ssl_adapter_class(ssl_module)
+ self.ssl_adapter = adapter_class(
+ self.server_adapter.ssl_certificate,
+ self.server_adapter.ssl_private_key,
+ self.server_adapter.ssl_certificate_chain,
+ self.server_adapter.ssl_ciphers)
+ self.ssl_adapter.context = self.server_adapter.ssl_context
+ elif self.server_adapter.ssl_certificate:
+ adapter_class = cheroot.server.get_ssl_adapter_class(ssl_module)
+ self.ssl_adapter = adapter_class(
+ self.server_adapter.ssl_certificate,
+ self.server_adapter.ssl_private_key,
+ self.server_adapter.ssl_certificate_chain,
+ self.server_adapter.ssl_ciphers)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpreqbody.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpreqbody.py
new file mode 100644
index 0000000000000000000000000000000000000000..4d3cefe768f523a9cfcca726192b61ae2908eb34
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpreqbody.py
@@ -0,0 +1,993 @@
+"""Request body processing for CherryPy.
+
+.. versionadded:: 3.2
+
+Application authors have complete control over the parsing of HTTP request
+entities. In short,
+:attr:`cherrypy.request.body<cherrypy._cprequest.Request.body>`
+is now always set to an instance of
+:class:`RequestBody<cherrypy._cpreqbody.RequestBody>`,
+and *that* class is a subclass of :class:`Entity<cherrypy._cpreqbody.Entity>`.
+
+When an HTTP request includes an entity body, it is often desirable to
+provide that information to applications in a form other than the raw bytes.
+Different content types demand different approaches. Examples:
+
+ * For a GIF file, we want the raw bytes in a stream.
+ * An HTML form is better parsed into its component fields, and each text field
+ decoded from bytes to unicode.
+ * A JSON body should be deserialized into a Python dict or list.
+
+When the request contains a Content-Type header, the media type is used as a
+key to look up a value in the
+:attr:`request.body.processors<cherrypy._cpreqbody.Entity.processors>` dict.
+If the full media
+type is not found, then the major type is tried; for example, if no processor
+is found for the 'image/jpeg' type, then we look for a processor for the
+'image' types altogether. If neither the full type nor the major type has a
+matching processor, then a default processor is used
+(:func:`default_proc<cherrypy._cpreqbody.Entity.default_proc>`). For most
+types, this means no processing is done, and the body is left unread as a
+raw byte stream. Processors are configurable in an 'on_start_resource' hook.
+
+Some processors, especially those for the 'text' types, attempt to decode bytes
+to unicode. If the Content-Type request header includes a 'charset' parameter,
+this is used to decode the entity. Otherwise, one or more default charsets may
+be attempted, although this decision is up to each processor. If a processor
+successfully decodes an Entity or Part, it should set the
+:attr:`charset<cherrypy._cpreqbody.Entity.charset>` attribute
+on the Entity or Part to the name of the successful charset, so that
+applications can easily re-encode or transcode the value if they wish.
+
+If the Content-Type of the request entity is of major type 'multipart', then
+the above parsing process, and possibly a decoding process, is performed for
+each part.
+
+For both the full entity and multipart parts, a Content-Disposition header may
+be used to fill :attr:`name<cherrypy._cpreqbody.Entity.name>` and
+:attr:`filename<cherrypy._cpreqbody.Entity.filename>` attributes on the
+request.body or the Part.
+
+.. _custombodyprocessors:
+
+Custom Processors
+=================
+
+You can add your own processors for any specific or major MIME type. Simply add
+it to the :attr:`processors<cherrypy._cprequest.Entity.processors>` dict in a
+hook/tool that runs at ``on_start_resource`` or ``before_request_body``.
+Here's the built-in JSON tool for an example::
+
+ def json_in(force=True, debug=False):
+ request = cherrypy.serving.request
+ def json_processor(entity):
+ '''Read application/json data into request.json.'''
+ if not entity.headers.get("Content-Length", ""):
+ raise cherrypy.HTTPError(411)
+
+ body = entity.fp.read()
+ try:
+ request.json = json_decode(body)
+ except ValueError:
+ raise cherrypy.HTTPError(400, 'Invalid JSON document')
+ if force:
+ request.body.processors.clear()
+ request.body.default_proc = cherrypy.HTTPError(
+ 415, 'Expected an application/json content type')
+ request.body.processors['application/json'] = json_processor
+
+We begin by defining a new ``json_processor`` function to stick in the
+``processors`` dictionary. All processor functions take a single argument,
+the ``Entity`` instance they are to process. It will be called whenever a
+request is received (for those URI's where the tool is turned on) which
+has a ``Content-Type`` of "application/json".
+
+First, it checks for a valid ``Content-Length`` (raising 411 if not valid),
+then reads the remaining bytes on the socket. The ``fp`` object knows its
+own length, so it won't hang waiting for data that never arrives. It will
+return when all data has been read. Then, we decode those bytes using
+Python's built-in ``json`` module, and stick the decoded result onto
+``request.json`` . If it cannot be decoded, we raise 400.
+
+If the "force" argument is True (the default), the ``Tool`` clears the
+``processors`` dict so that request entities of other ``Content-Types``
+aren't parsed at all. Since there's no entry for those invalid MIME
+types, the ``default_proc`` method of ``cherrypy.request.body`` is
+called. But this does nothing by default (usually to provide the page
+handler an opportunity to handle it.)
+But in our case, we want to raise 415, so we replace
+``request.body.default_proc``
+with the error (``HTTPError`` instances, when called, raise themselves).
+
+If we were defining a custom processor, we can do so without making a ``Tool``.
+Just add the config entry::
+
+ request.body.processors = {'application/json': json_processor}
+
+Note that you can only replace the ``processors`` dict wholesale this way,
+not update the existing one.
+"""
+
+try:
+ from io import DEFAULT_BUFFER_SIZE
+except ImportError:
+ DEFAULT_BUFFER_SIZE = 8192
+import re
+import sys
+import tempfile
+from urllib.parse import unquote
+
+import cheroot.server
+
+import cherrypy
+from cherrypy._cpcompat import ntou
+from cherrypy.lib import httputil
+
+
+def unquote_plus(bs):
+ """Bytes version of urllib.parse.unquote_plus."""
+ bs = bs.replace(b'+', b' ')
+ atoms = bs.split(b'%')
+ for i in range(1, len(atoms)):
+ item = atoms[i]
+ try:
+ pct = int(item[:2], 16)
+ atoms[i] = bytes([pct]) + item[2:]
+ except ValueError:
+ pass
+ return b''.join(atoms)
+
+
+# ------------------------------- Processors -------------------------------- #
+
+def process_urlencoded(entity):
+ """Read application/x-www-form-urlencoded data into entity.params."""
+ qs = entity.fp.read()
+ for charset in entity.attempt_charsets:
+ try:
+ params = {}
+ for aparam in qs.split(b'&'):
+ for pair in aparam.split(b';'):
+ if not pair:
+ continue
+
+ atoms = pair.split(b'=', 1)
+ if len(atoms) == 1:
+ atoms.append(b'')
+
+ key = unquote_plus(atoms[0]).decode(charset)
+ value = unquote_plus(atoms[1]).decode(charset)
+
+ if key in params:
+ if not isinstance(params[key], list):
+ params[key] = [params[key]]
+ params[key].append(value)
+ else:
+ params[key] = value
+ except UnicodeDecodeError:
+ pass
+ else:
+ entity.charset = charset
+ break
+ else:
+ raise cherrypy.HTTPError(
+ 400, 'The request entity could not be decoded. The following '
+ 'charsets were attempted: %s' % repr(entity.attempt_charsets))
+
+ # Now that all values have been successfully parsed and decoded,
+ # apply them to the entity.params dict.
+ for key, value in params.items():
+ if key in entity.params:
+ if not isinstance(entity.params[key], list):
+ entity.params[key] = [entity.params[key]]
+ entity.params[key].append(value)
+ else:
+ entity.params[key] = value
+
+
+def process_multipart(entity):
+ """Read all multipart parts into entity.parts."""
+ ib = ''
+ if 'boundary' in entity.content_type.params:
+ # http://tools.ietf.org/html/rfc2046#section-5.1.1
+ # "The grammar for parameters on the Content-type field is such that it
+ # is often necessary to enclose the boundary parameter values in quotes
+ # on the Content-type line"
+ ib = entity.content_type.params['boundary'].strip('"')
+
+ if not re.match('^[ -~]{0,200}[!-~]$', ib):
+ raise ValueError('Invalid boundary in multipart form: %r' % (ib,))
+
+ ib = ('--' + ib).encode('ascii')
+
+ # Find the first marker
+ while True:
+ b = entity.readline()
+ if not b:
+ return
+
+ b = b.strip()
+ if b == ib:
+ break
+
+ # Read all parts
+ while True:
+ part = entity.part_class.from_fp(entity.fp, ib)
+ entity.parts.append(part)
+ part.process()
+ if part.fp.done:
+ break
+
+
+def process_multipart_form_data(entity):
+ """Read all multipart/form-data parts into entity.parts or entity.params.
+ """
+ process_multipart(entity)
+
+ kept_parts = []
+ for part in entity.parts:
+ if part.name is None:
+ kept_parts.append(part)
+ else:
+ if part.filename is None:
+ # It's a regular field
+ value = part.fullvalue()
+ else:
+ # It's a file upload. Retain the whole part so consumer code
+ # has access to its .file and .filename attributes.
+ value = part
+
+ if part.name in entity.params:
+ if not isinstance(entity.params[part.name], list):
+ entity.params[part.name] = [entity.params[part.name]]
+ entity.params[part.name].append(value)
+ else:
+ entity.params[part.name] = value
+
+ entity.parts = kept_parts
+
+
+def _old_process_multipart(entity):
+ """The behavior of 3.2 and lower. Deprecated and will be changed in 3.3."""
+ process_multipart(entity)
+
+ params = entity.params
+
+ for part in entity.parts:
+ if part.name is None:
+ key = ntou('parts')
+ else:
+ key = part.name
+
+ if part.filename is None:
+ # It's a regular field
+ value = part.fullvalue()
+ else:
+ # It's a file upload. Retain the whole part so consumer code
+ # has access to its .file and .filename attributes.
+ value = part
+
+ if key in params:
+ if not isinstance(params[key], list):
+ params[key] = [params[key]]
+ params[key].append(value)
+ else:
+ params[key] = value
+
+
+# -------------------------------- Entities --------------------------------- #
+class Entity(object):
+
+ """An HTTP request body, or MIME multipart body.
+
+ This class collects information about the HTTP request entity. When a
+ given entity is of MIME type "multipart", each part is parsed into its own
+ Entity instance, and the set of parts stored in
+ :attr:`entity.parts<cherrypy._cpreqbody.Entity.parts>`.
+
+ Between the ``before_request_body`` and ``before_handler`` tools, CherryPy
+ tries to process the request body (if any) by calling
+ :func:`request.body.process<cherrypy._cpreqbody.RequestBody.process>`.
+ This uses the ``content_type`` of the Entity to look up a suitable
+ processor in
+ :attr:`Entity.processors<cherrypy._cpreqbody.Entity.processors>`,
+ a dict.
+ If a matching processor cannot be found for the complete Content-Type,
+ it tries again using the major type. For example, if a request with an
+ entity of type "image/jpeg" arrives, but no processor can be found for
+ that complete type, then one is sought for the major type "image". If a
+ processor is still not found, then the
+ :func:`default_proc<cherrypy._cpreqbody.Entity.default_proc>` method
+ of the Entity is called (which does nothing by default; you can
+ override this too).
+
+ CherryPy includes processors for the "application/x-www-form-urlencoded"
+ type, the "multipart/form-data" type, and the "multipart" major type.
+ CherryPy 3.2 processes these types almost exactly as older versions.
+ Parts are passed as arguments to the page handler using their
+ ``Content-Disposition.name`` if given, otherwise in a generic "parts"
+ argument. Each such part is either a string, or the
+ :class:`Part<cherrypy._cpreqbody.Part>` itself if it's a file. (In this
+ case it will have ``file`` and ``filename`` attributes, or possibly a
+ ``value`` attribute). Each Part is itself a subclass of
+ Entity, and has its own ``process`` method and ``processors`` dict.
+
+ There is a separate processor for the "multipart" major type which is more
+ flexible, and simply stores all multipart parts in
+ :attr:`request.body.parts<cherrypy._cpreqbody.Entity.parts>`. You can
+ enable it with::
+
+ cherrypy.request.body.processors['multipart'] = \
+ _cpreqbody.process_multipart
+
+ in an ``on_start_resource`` tool.
+ """
+
+ # http://tools.ietf.org/html/rfc2046#section-4.1.2:
+ # "The default character set, which must be assumed in the
+ # absence of a charset parameter, is US-ASCII."
+ # However, many browsers send data in utf-8 with no charset.
+ attempt_charsets = ['utf-8']
+ r"""A list of strings, each of which should be a known encoding.
+
+ When the Content-Type of the request body warrants it, each of the given
+ encodings will be tried in order. The first one to successfully decode the
+ entity without raising an error is stored as
+ :attr:`entity.charset<cherrypy._cpreqbody.Entity.charset>`. This defaults
+ to ``['utf-8']`` (plus 'ISO-8859-1' for "text/\*" types, as required by
+ `HTTP/1.1
+ <http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1>`_),
+ but ``['us-ascii', 'utf-8']`` for multipart parts.
+ """
+
+ charset = None
+ """The successful decoding; see "attempt_charsets" above."""
+
+ content_type = None
+ """The value of the Content-Type request header.
+
+ If the Entity is part of a multipart payload, this will be the Content-Type
+ given in the MIME headers for this part.
+ """
+
+ default_content_type = 'application/x-www-form-urlencoded'
+ """This defines a default ``Content-Type`` to use if no Content-Type header
+ is given. The empty string is used for RequestBody, which results in the
+ request body not being read or parsed at all. This is by design; a missing
+ ``Content-Type`` header in the HTTP request entity is an error at best,
+ and a security hole at worst. For multipart parts, however, the MIME spec
+ declares that a part with no Content-Type defaults to "text/plain"
+ (see :class:`Part<cherrypy._cpreqbody.Part>`).
+ """
+
+ filename = None
+ """The ``Content-Disposition.filename`` header, if available."""
+
+ fp = None
+ """The readable socket file object."""
+
+ headers = None
+ """A dict of request/multipart header names and values.
+
+ This is a copy of the ``request.headers`` for the ``request.body``;
+ for multipart parts, it is the set of headers for that part.
+ """
+
+ length = None
+ """The value of the ``Content-Length`` header, if provided."""
+
+ name = None
+ """The "name" parameter of the ``Content-Disposition`` header, if any."""
+
+ params = None
+ """
+ If the request Content-Type is 'application/x-www-form-urlencoded' or
+ multipart, this will be a dict of the params pulled from the entity
+ body; that is, it will be the portion of request.params that come
+ from the message body (sometimes called "POST params", although they
+ can be sent with various HTTP method verbs). This value is set between
+ the 'before_request_body' and 'before_handler' hooks (assuming that
+ process_request_body is True)."""
+
+ processors = {'application/x-www-form-urlencoded': process_urlencoded,
+ 'multipart/form-data': process_multipart_form_data,
+ 'multipart': process_multipart,
+ }
+ """A dict of Content-Type names to processor methods."""
+
+ parts = None
+ """A list of Part instances if ``Content-Type`` is of major type
+ "multipart"."""
+
+ part_class = None
+ """The class used for multipart parts.
+
+ You can replace this with custom subclasses to alter the processing of
+ multipart parts.
+ """
+
+ def __init__(self, fp, headers, params=None, parts=None):
+ # Make an instance-specific copy of the class processors
+ # so Tools, etc. can replace them per-request.
+ self.processors = self.processors.copy()
+
+ self.fp = fp
+ self.headers = headers
+
+ if params is None:
+ params = {}
+ self.params = params
+
+ if parts is None:
+ parts = []
+ self.parts = parts
+
+ # Content-Type
+ self.content_type = headers.elements('Content-Type')
+ if self.content_type:
+ self.content_type = self.content_type[0]
+ else:
+ self.content_type = httputil.HeaderElement.from_str(
+ self.default_content_type)
+
+ # Copy the class 'attempt_charsets', prepending any Content-Type
+ # charset
+ dec = self.content_type.params.get('charset', None)
+ if dec:
+ self.attempt_charsets = [dec] + [c for c in self.attempt_charsets
+ if c != dec]
+ else:
+ self.attempt_charsets = self.attempt_charsets[:]
+
+ # Length
+ self.length = None
+ clen = headers.get('Content-Length', None)
+ # If Transfer-Encoding is 'chunked', ignore any Content-Length.
+ if (
+ clen is not None and
+ 'chunked' not in headers.get('Transfer-Encoding', '')
+ ):
+ try:
+ self.length = int(clen)
+ except ValueError:
+ pass
+
+ # Content-Disposition
+ self.name = None
+ self.filename = None
+ disp = headers.elements('Content-Disposition')
+ if disp:
+ disp = disp[0]
+ if 'name' in disp.params:
+ self.name = disp.params['name']
+ if self.name.startswith('"') and self.name.endswith('"'):
+ self.name = self.name[1:-1]
+ if 'filename' in disp.params:
+ self.filename = disp.params['filename']
+ if (
+ self.filename.startswith('"') and
+ self.filename.endswith('"')
+ ):
+ self.filename = self.filename[1:-1]
+ if 'filename*' in disp.params:
+ # @see https://tools.ietf.org/html/rfc5987
+ encoding, lang, filename = disp.params['filename*'].split("'")
+ self.filename = unquote(str(filename), encoding)
+
+ def read(self, size=None, fp_out=None):
+ return self.fp.read(size, fp_out)
+
+ def readline(self, size=None):
+ return self.fp.readline(size)
+
+ def readlines(self, sizehint=None):
+ return self.fp.readlines(sizehint)
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ line = self.readline()
+ if not line:
+ raise StopIteration
+ return line
+
+ def next(self):
+ return self.__next__()
+
+ def read_into_file(self, fp_out=None):
+ """Read the request body into fp_out (or make_file() if None).
+
+ Return fp_out.
+ """
+ if fp_out is None:
+ fp_out = self.make_file()
+ self.read(fp_out=fp_out)
+ return fp_out
+
+ def make_file(self):
+ """Return a file-like object into which the request body will be read.
+
+ By default, this will return a TemporaryFile. Override as needed.
+ See also :attr:`cherrypy._cpreqbody.Part.maxrambytes`."""
+ return tempfile.TemporaryFile()
+
+ def fullvalue(self):
+ """Return this entity as a string, whether stored in a file or not."""
+ if self.file:
+ # It was stored in a tempfile. Read it.
+ self.file.seek(0)
+ value = self.file.read()
+ self.file.seek(0)
+ else:
+ value = self.value
+ value = self.decode_entity(value)
+ return value
+
+ def decode_entity(self, value):
+ """Return a given byte encoded value as a string"""
+ for charset in self.attempt_charsets:
+ try:
+ value = value.decode(charset)
+ except UnicodeDecodeError:
+ pass
+ else:
+ self.charset = charset
+ return value
+ else:
+ raise cherrypy.HTTPError(
+ 400,
+ 'The request entity could not be decoded. The following '
+ 'charsets were attempted: %s' % repr(self.attempt_charsets)
+ )
+
+ def process(self):
+ """Execute the best-match processor for the given media type."""
+ proc = None
+ ct = self.content_type.value
+ try:
+ proc = self.processors[ct]
+ except KeyError:
+ toptype = ct.split('/', 1)[0]
+ try:
+ proc = self.processors[toptype]
+ except KeyError:
+ pass
+ if proc is None:
+ self.default_proc()
+ else:
+ proc(self)
+
+ def default_proc(self):
+ """Called if a more-specific processor is not found for the
+ ``Content-Type``.
+ """
+ # Leave the fp alone for someone else to read. This works fine
+ # for request.body, but the Part subclasses need to override this
+ # so they can move on to the next part.
+ pass
+
+
+class Part(Entity):
+
+ """A MIME part entity, part of a multipart entity."""
+
+ # "The default character set, which must be assumed in the absence of a
+ # charset parameter, is US-ASCII."
+ attempt_charsets = ['us-ascii', 'utf-8']
+ r"""A list of strings, each of which should be a known encoding.
+
+ When the Content-Type of the request body warrants it, each of the given
+ encodings will be tried in order. The first one to successfully decode the
+ entity without raising an error is stored as
+ :attr:`entity.charset<cherrypy._cpreqbody.Entity.charset>`. This defaults
+ to ``['utf-8']`` (plus 'ISO-8859-1' for "text/\*" types, as required by
+ `HTTP/1.1
+ <http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1>`_),
+ but ``['us-ascii', 'utf-8']`` for multipart parts.
+ """
+
+ boundary = None
+ """The MIME multipart boundary."""
+
+ default_content_type = 'text/plain'
+ """This defines a default ``Content-Type`` to use if no Content-Type header
+ is given. The empty string is used for RequestBody, which results in the
+ request body not being read or parsed at all. This is by design; a missing
+ ``Content-Type`` header in the HTTP request entity is an error at best,
+ and a security hole at worst. For multipart parts, however (this class),
+ the MIME spec declares that a part with no Content-Type defaults to
+ "text/plain".
+ """
+
+ # This is the default in stdlib cgi. We may want to increase it.
+ maxrambytes = 1000
+ """The threshold of bytes after which point the ``Part`` will store
+ its data in a file (generated by
+ :func:`make_file<cherrypy._cprequest.Entity.make_file>`)
+ instead of a string. Defaults to 1000, just like the :mod:`cgi`
+ module in Python's standard library.
+ """
+
+ def __init__(self, fp, headers, boundary):
+ Entity.__init__(self, fp, headers)
+ self.boundary = boundary
+ self.file = None
+ self.value = None
+
+ @classmethod
+ def from_fp(cls, fp, boundary):
+ headers = cls.read_headers(fp)
+ return cls(fp, headers, boundary)
+
+ @classmethod
+ def read_headers(cls, fp):
+ headers = httputil.HeaderMap()
+ while True:
+ line = fp.readline()
+ if not line:
+ # No more data--illegal end of headers
+ raise EOFError('Illegal end of headers.')
+
+ if line == b'\r\n':
+ # Normal end of headers
+ break
+ if not line.endswith(b'\r\n'):
+ raise ValueError('MIME requires CRLF terminators: %r' % line)
+
+ if line[0] in b' \t':
+ # It's a continuation line.
+ v = line.strip().decode('ISO-8859-1')
+ else:
+ k, v = line.split(b':', 1)
+ k = k.strip().decode('ISO-8859-1')
+ v = v.strip().decode('ISO-8859-1')
+
+ existing = headers.get(k)
+ if existing:
+ v = ', '.join((existing, v))
+ headers[k] = v
+
+ return headers
+
+ def read_lines_to_boundary(self, fp_out=None):
+ """Read bytes from self.fp and return or write them to a file.
+
+ If the 'fp_out' argument is None (the default), all bytes read are
+ returned in a single byte string.
+
+ If the 'fp_out' argument is not None, it must be a file-like
+ object that supports the 'write' method; all bytes read will be
+ written to the fp, and that fp is returned.
+ """
+ endmarker = self.boundary + b'--'
+ delim = b''
+ prev_lf = True
+ lines = []
+ seen = 0
+ while True:
+ line = self.fp.readline(1 << 16)
+ if not line:
+ raise EOFError('Illegal end of multipart body.')
+ if line.startswith(b'--') and prev_lf:
+ strippedline = line.strip()
+ if strippedline == self.boundary:
+ break
+ if strippedline == endmarker:
+ self.fp.finish()
+ break
+
+ line = delim + line
+
+ if line.endswith(b'\r\n'):
+ delim = b'\r\n'
+ line = line[:-2]
+ prev_lf = True
+ elif line.endswith(b'\n'):
+ delim = b'\n'
+ line = line[:-1]
+ prev_lf = True
+ else:
+ delim = b''
+ prev_lf = False
+
+ if fp_out is None:
+ lines.append(line)
+ seen += len(line)
+ if seen > self.maxrambytes:
+ fp_out = self.make_file()
+ for line in lines:
+ fp_out.write(line)
+ else:
+ fp_out.write(line)
+
+ if fp_out is None:
+ result = b''.join(lines)
+ return result
+ else:
+ fp_out.seek(0)
+ return fp_out
+
+ def default_proc(self):
+ """Called if a more-specific processor is not found for the
+ ``Content-Type``.
+ """
+ if self.filename:
+ # Always read into a file if a .filename was given.
+ self.file = self.read_into_file()
+ else:
+ result = self.read_lines_to_boundary()
+ if isinstance(result, bytes):
+ self.value = result
+ else:
+ self.file = result
+
+ def read_into_file(self, fp_out=None):
+ """Read the request body into fp_out (or make_file() if None).
+
+ Return fp_out.
+ """
+ if fp_out is None:
+ fp_out = self.make_file()
+ self.read_lines_to_boundary(fp_out=fp_out)
+ return fp_out
+
+
+Entity.part_class = Part
+
+inf = float('inf')
+
+
+class SizedReader:
+
+ def __init__(self, fp, length, maxbytes, bufsize=DEFAULT_BUFFER_SIZE,
+ has_trailers=False):
+ # Wrap our fp in a buffer so peek() works
+ self.fp = fp
+ self.length = length
+ self.maxbytes = maxbytes
+ self.buffer = b''
+ self.bufsize = bufsize
+ self.bytes_read = 0
+ self.done = False
+ self.has_trailers = has_trailers
+
+ def read(self, size=None, fp_out=None):
+ """Read bytes from the request body and return or write them to a file.
+
+ A number of bytes less than or equal to the 'size' argument are read
+ off the socket. The actual number of bytes read are tracked in
+ self.bytes_read. The number may be smaller than 'size' when 1) the
+ client sends fewer bytes, 2) the 'Content-Length' request header
+ specifies fewer bytes than requested, or 3) the number of bytes read
+ exceeds self.maxbytes (in which case, 413 is raised).
+
+ If the 'fp_out' argument is None (the default), all bytes read are
+ returned in a single byte string.
+
+ If the 'fp_out' argument is not None, it must be a file-like
+ object that supports the 'write' method; all bytes read will be
+ written to the fp, and None is returned.
+ """
+
+ if self.length is None:
+ if size is None:
+ remaining = inf
+ else:
+ remaining = size
+ else:
+ remaining = self.length - self.bytes_read
+ if size and size < remaining:
+ remaining = size
+ if remaining == 0:
+ self.finish()
+ if fp_out is None:
+ return b''
+ else:
+ return None
+
+ chunks = []
+
+ # Read bytes from the buffer.
+ if self.buffer:
+ if remaining is inf:
+ data = self.buffer
+ self.buffer = b''
+ else:
+ data = self.buffer[:remaining]
+ self.buffer = self.buffer[remaining:]
+ datalen = len(data)
+ remaining -= datalen
+
+ # Check lengths.
+ self.bytes_read += datalen
+ if self.maxbytes and self.bytes_read > self.maxbytes:
+ raise cherrypy.HTTPError(413)
+
+ # Store the data.
+ if fp_out is None:
+ chunks.append(data)
+ else:
+ fp_out.write(data)
+
+ # Read bytes from the socket.
+ while remaining > 0:
+ chunksize = min(remaining, self.bufsize)
+ try:
+ data = self.fp.read(chunksize)
+ except Exception:
+ e = sys.exc_info()[1]
+ if e.__class__.__name__ == 'MaxSizeExceeded':
+ # Post data is too big
+ raise cherrypy.HTTPError(
+ 413, 'Maximum request length: %r' % e.args[1])
+ else:
+ raise
+ if not data:
+ self.finish()
+ break
+ datalen = len(data)
+ remaining -= datalen
+
+ # Check lengths.
+ self.bytes_read += datalen
+ if self.maxbytes and self.bytes_read > self.maxbytes:
+ raise cherrypy.HTTPError(413)
+
+ # Store the data.
+ if fp_out is None:
+ chunks.append(data)
+ else:
+ fp_out.write(data)
+
+ if fp_out is None:
+ return b''.join(chunks)
+
+ def readline(self, size=None):
+ """Read a line from the request body and return it."""
+ chunks = []
+ while size is None or size > 0:
+ chunksize = self.bufsize
+ if size is not None and size < self.bufsize:
+ chunksize = size
+ data = self.read(chunksize)
+ if not data:
+ break
+ pos = data.find(b'\n') + 1
+ if pos:
+ chunks.append(data[:pos])
+ remainder = data[pos:]
+ self.buffer += remainder
+ self.bytes_read -= len(remainder)
+ break
+ else:
+ chunks.append(data)
+ return b''.join(chunks)
+
+ def readlines(self, sizehint=None):
+ """Read lines from the request body and return them."""
+ if self.length is not None:
+ if sizehint is None:
+ sizehint = self.length - self.bytes_read
+ else:
+ sizehint = min(sizehint, self.length - self.bytes_read)
+
+ lines = []
+ seen = 0
+ while True:
+ line = self.readline()
+ if not line:
+ break
+ lines.append(line)
+ seen += len(line)
+ if seen >= sizehint:
+ break
+ return lines
+
+ def finish(self):
+ self.done = True
+ if self.has_trailers and hasattr(self.fp, 'read_trailer_lines'):
+ self.trailers = {}
+
+ try:
+ for line in self.fp.read_trailer_lines():
+ if line[0] in b' \t':
+ # It's a continuation line.
+ v = line.strip()
+ else:
+ try:
+ k, v = line.split(b':', 1)
+ except ValueError:
+ raise ValueError('Illegal header line.')
+ k = k.strip().title()
+ v = v.strip()
+
+ if k in cheroot.server.comma_separated_headers:
+ existing = self.trailers.get(k)
+ if existing:
+ v = b', '.join((existing, v))
+ self.trailers[k] = v
+ except Exception:
+ e = sys.exc_info()[1]
+ if e.__class__.__name__ == 'MaxSizeExceeded':
+ # Post data is too big
+ raise cherrypy.HTTPError(
+ 413, 'Maximum request length: %r' % e.args[1])
+ else:
+ raise
+
+
+class RequestBody(Entity):
+
+ """The entity of the HTTP request."""
+
+ bufsize = 8 * 1024
+ """The buffer size used when reading the socket."""
+
+ # Don't parse the request body at all if the client didn't provide
+ # a Content-Type header. See
+ # https://github.com/cherrypy/cherrypy/issues/790
+ default_content_type = ''
+ """This defines a default ``Content-Type`` to use if no Content-Type header
+ is given. The empty string is used for RequestBody, which results in the
+ request body not being read or parsed at all. This is by design; a missing
+ ``Content-Type`` header in the HTTP request entity is an error at best,
+ and a security hole at worst. For multipart parts, however, the MIME spec
+ declares that a part with no Content-Type defaults to "text/plain"
+ (see :class:`Part<cherrypy._cpreqbody.Part>`).
+ """
+
+ maxbytes = None
+ """Raise ``MaxSizeExceeded`` if more bytes than this are read from
+ the socket.
+ """
+
+ def __init__(self, fp, headers, params=None, request_params=None):
+ Entity.__init__(self, fp, headers, params)
+
+ # http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1
+ # When no explicit charset parameter is provided by the
+ # sender, media subtypes of the "text" type are defined
+ # to have a default charset value of "ISO-8859-1" when
+ # received via HTTP.
+ if self.content_type.value.startswith('text/'):
+ for c in ('ISO-8859-1', 'iso-8859-1', 'Latin-1', 'latin-1'):
+ if c in self.attempt_charsets:
+ break
+ else:
+ self.attempt_charsets.append('ISO-8859-1')
+
+ # Temporary fix while deprecating passing .parts as .params.
+ self.processors['multipart'] = _old_process_multipart
+
+ if request_params is None:
+ request_params = {}
+ self.request_params = request_params
+
+ def process(self):
+ """Process the request entity based on its Content-Type."""
+ # "The presence of a message-body in a request is signaled by the
+ # inclusion of a Content-Length or Transfer-Encoding header field in
+ # the request's message-headers."
+ # It is possible to send a POST request with no body, for example;
+ # however, app developers are responsible in that case to set
+ # cherrypy.request.process_body to False so this method isn't called.
+ h = cherrypy.serving.request.headers
+ if 'Content-Length' not in h and 'Transfer-Encoding' not in h:
+ raise cherrypy.HTTPError(411)
+
+ self.fp = SizedReader(self.fp, self.length,
+ self.maxbytes, bufsize=self.bufsize,
+ has_trailers='Trailer' in h)
+ super(RequestBody, self).process()
+
+ # Body params should also be a part of the request_params
+ # add them in here.
+ request_params = self.request_params
+ for key, value in self.params.items():
+ if key in request_params:
+ if not isinstance(request_params[key], list):
+ request_params[key] = [request_params[key]]
+ request_params[key].append(value)
+ else:
+ request_params[key] = value
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/_cprequest.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cprequest.py
new file mode 100644
index 0000000000000000000000000000000000000000..9b86bd674d742df6f2a0a118d72a637a973b7fac
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cprequest.py
@@ -0,0 +1,932 @@
+import sys
+import time
+import collections
+import operator
+from http.cookies import SimpleCookie, CookieError
+
+import uuid
+
+from more_itertools import consume
+
+import cherrypy
+from cherrypy._cpcompat import ntob
+from cherrypy import _cpreqbody
+from cherrypy._cperror import format_exc, bare_error
+from cherrypy.lib import httputil, reprconf, encoding
+
+
+class Hook(object):
+
+ """A callback and its metadata: failsafe, priority, and kwargs."""
+
+ callback = None
+ """
+ The bare callable that this Hook object is wrapping, which will
+ be called when the Hook is called."""
+
+ failsafe = False
+ """
+ If True, the callback is guaranteed to run even if other callbacks
+ from the same call point raise exceptions."""
+
+ priority = 50
+ """
+ Defines the order of execution for a list of Hooks. Priority numbers
+ should be limited to the closed interval [0, 100], but values outside
+ this range are acceptable, as are fractional values."""
+
+ kwargs = {}
+ """
+ A set of keyword arguments that will be passed to the
+ callable on each call."""
+
+ def __init__(self, callback, failsafe=None, priority=None, **kwargs):
+ self.callback = callback
+
+ if failsafe is None:
+ failsafe = getattr(callback, 'failsafe', False)
+ self.failsafe = failsafe
+
+ if priority is None:
+ priority = getattr(callback, 'priority', 50)
+ self.priority = priority
+
+ self.kwargs = kwargs
+
+ def __lt__(self, other):
+ """
+ Hooks sort by priority, ascending, such that
+ hooks of lower priority are run first.
+ """
+ return self.priority < other.priority
+
+ def __call__(self):
+ """Run self.callback(**self.kwargs)."""
+ return self.callback(**self.kwargs)
+
+ def __repr__(self):
+ cls = self.__class__
+ return ('%s.%s(callback=%r, failsafe=%r, priority=%r, %s)'
+ % (cls.__module__, cls.__name__, self.callback,
+ self.failsafe, self.priority,
+ ', '.join(['%s=%r' % (k, v)
+ for k, v in self.kwargs.items()])))
+
+
+class HookMap(dict):
+
+ """A map of call points to lists of callbacks (Hook objects)."""
+
+ def __new__(cls, points=None):
+ d = dict.__new__(cls)
+ for p in points or []:
+ d[p] = []
+ return d
+
+ def __init__(self, *a, **kw):
+ pass
+
+ def attach(self, point, callback, failsafe=None, priority=None, **kwargs):
+ """Append a new Hook made from the supplied arguments."""
+ self[point].append(Hook(callback, failsafe, priority, **kwargs))
+
+ def run(self, point):
+ """Execute all registered Hooks (callbacks) for the given point."""
+ self.run_hooks(iter(sorted(self[point])))
+
+ @classmethod
+ def run_hooks(cls, hooks):
+ """Execute the indicated hooks, trapping errors.
+
+ Hooks with ``.failsafe == True`` are guaranteed to run
+ even if others at the same hookpoint fail. In this case,
+ log the failure and proceed on to the next hook. The only
+ way to stop all processing from one of these hooks is
+ to raise a BaseException like SystemExit or
+ KeyboardInterrupt and stop the whole server.
+ """
+ assert isinstance(hooks, collections.abc.Iterator)
+ quiet_errors = (
+ cherrypy.HTTPError,
+ cherrypy.HTTPRedirect,
+ cherrypy.InternalRedirect,
+ )
+ safe = filter(operator.attrgetter('failsafe'), hooks)
+ for hook in hooks:
+ try:
+ hook()
+ except quiet_errors:
+ cls.run_hooks(safe)
+ raise
+ except Exception:
+ cherrypy.log(traceback=True, severity=40)
+ cls.run_hooks(safe)
+ raise
+
+ def __copy__(self):
+ newmap = self.__class__()
+ # We can't just use 'update' because we want copies of the
+ # mutable values (each is a list) as well.
+ for k, v in self.items():
+ newmap[k] = v[:]
+ return newmap
+ copy = __copy__
+
+ def __repr__(self):
+ cls = self.__class__
+ return '%s.%s(points=%r)' % (
+ cls.__module__,
+ cls.__name__,
+ list(self)
+ )
+
+
+# Config namespace handlers
+
+def hooks_namespace(k, v):
+ """Attach bare hooks declared in config."""
+ # Use split again to allow multiple hooks for a single
+ # hookpoint per path (e.g. "hooks.before_handler.1").
+ # Little-known fact you only get from reading source ;)
+ hookpoint = k.split('.', 1)[0]
+ if isinstance(v, str):
+ v = cherrypy.lib.reprconf.attributes(v)
+ if not isinstance(v, Hook):
+ v = Hook(v)
+ cherrypy.serving.request.hooks[hookpoint].append(v)
+
+
+def request_namespace(k, v):
+ """Attach request attributes declared in config."""
+ # Provides config entries to set request.body attrs (like
+ # attempt_charsets).
+ if k[:5] == 'body.':
+ setattr(cherrypy.serving.request.body, k[5:], v)
+ else:
+ setattr(cherrypy.serving.request, k, v)
+
+
+def response_namespace(k, v):
+ """Attach response attributes declared in config."""
+ # Provides config entries to set default response headers
+ # http://cherrypy.org/ticket/889
+ if k[:8] == 'headers.':
+ cherrypy.serving.response.headers[k.split('.', 1)[1]] = v
+ else:
+ setattr(cherrypy.serving.response, k, v)
+
+
+def error_page_namespace(k, v):
+ """Attach error pages declared in config."""
+ if k != 'default':
+ k = int(k)
+ cherrypy.serving.request.error_page[k] = v
+
+
+hookpoints = ['on_start_resource', 'before_request_body',
+ 'before_handler', 'before_finalize',
+ 'on_end_resource', 'on_end_request',
+ 'before_error_response', 'after_error_response']
+
+
+class Request(object):
+
+ """An HTTP request.
+
+ This object represents the metadata of an HTTP request message;
+ that is, it contains attributes which describe the environment
+ in which the request URL, headers, and body were sent (if you
+ want tools to interpret the headers and body, those are elsewhere,
+ mostly in Tools). This 'metadata' consists of socket data,
+ transport characteristics, and the Request-Line. This object
+ also contains data regarding the configuration in effect for
+ the given URL, and the execution plan for generating a response.
+ """
+
+ prev = None
+ """
+ The previous Request object (if any). This should be None
+ unless we are processing an InternalRedirect."""
+
+ # Conversation/connection attributes
+ local = httputil.Host('127.0.0.1', 80)
+ 'An httputil.Host(ip, port, hostname) object for the server socket.'
+
+ remote = httputil.Host('127.0.0.1', 1111)
+ 'An httputil.Host(ip, port, hostname) object for the client socket.'
+
+ scheme = 'http'
+ """
+ The protocol used between client and server. In most cases,
+ this will be either 'http' or 'https'."""
+
+ server_protocol = 'HTTP/1.1'
+ """
+ The HTTP version for which the HTTP server is at least
+ conditionally compliant."""
+
+ base = ''
+ """The (scheme://host) portion of the requested URL.
+ In some cases (e.g. when proxying via mod_rewrite), this may contain
+ path segments which cherrypy.url uses when constructing url's, but
+ which otherwise are ignored by CherryPy. Regardless, this value
+ MUST NOT end in a slash."""
+
+ # Request-Line attributes
+ request_line = ''
+ """
+ The complete Request-Line received from the client. This is a
+ single string consisting of the request method, URI, and protocol
+ version (joined by spaces). Any final CRLF is removed."""
+
+ method = 'GET'
+ """
+ Indicates the HTTP method to be performed on the resource identified
+ by the Request-URI. Common methods include GET, HEAD, POST, PUT, and
+ DELETE. CherryPy allows any extension method; however, various HTTP
+ servers and gateways may restrict the set of allowable methods.
+ CherryPy applications SHOULD restrict the set (on a per-URI basis)."""
+
+ query_string = ''
+ """
+ The query component of the Request-URI, a string of information to be
+ interpreted by the resource. The query portion of a URI follows the
+ path component, and is separated by a '?'. For example, the URI
+ 'http://www.cherrypy.org/wiki?a=3&b=4' has the query component,
+ 'a=3&b=4'."""
+
+ query_string_encoding = 'utf8'
+ """
+ The encoding expected for query string arguments after % HEX HEX decoding).
+ If a query string is provided that cannot be decoded with this encoding,
+ 404 is raised (since technically it's a different URI). If you want
+ arbitrary encodings to not error, set this to 'Latin-1'; you can then
+ encode back to bytes and re-decode to whatever encoding you like later.
+ """
+
+ protocol = (1, 1)
+ """The HTTP protocol version corresponding to the set
+ of features which should be allowed in the response. If BOTH
+ the client's request message AND the server's level of HTTP
+ compliance is HTTP/1.1, this attribute will be the tuple (1, 1).
+ If either is 1.0, this attribute will be the tuple (1, 0).
+ Lower HTTP protocol versions are not explicitly supported."""
+
+ params = {}
+ """
+ A dict which combines query string (GET) and request entity (POST)
+ variables. This is populated in two stages: GET params are added
+ before the 'on_start_resource' hook, and POST params are added
+ between the 'before_request_body' and 'before_handler' hooks."""
+
+ # Message attributes
+ header_list = []
+ """
+ A list of the HTTP request headers as (name, value) tuples.
+ In general, you should use request.headers (a dict) instead."""
+
+ headers = httputil.HeaderMap()
+ """
+ A dict-like object containing the request headers. Keys are header
+ names (in Title-Case format); however, you may get and set them in
+ a case-insensitive manner. That is, headers['Content-Type'] and
+ headers['content-type'] refer to the same value. Values are header
+ values (decoded according to :rfc:`2047` if necessary). See also:
+ httputil.HeaderMap, httputil.HeaderElement."""
+
+ cookie = SimpleCookie()
+ """See help(Cookie)."""
+
+ rfile = None
+ """
+ If the request included an entity (body), it will be available
+ as a stream in this attribute. However, the rfile will normally
+ be read for you between the 'before_request_body' hook and the
+ 'before_handler' hook, and the resulting string is placed into
+ either request.params or the request.body attribute.
+
+ You may disable the automatic consumption of the rfile by setting
+ request.process_request_body to False, either in config for the desired
+ path, or in an 'on_start_resource' or 'before_request_body' hook.
+
+ WARNING: In almost every case, you should not attempt to read from the
+ rfile stream after CherryPy's automatic mechanism has read it. If you
+ turn off the automatic parsing of rfile, you should read exactly the
+ number of bytes specified in request.headers['Content-Length'].
+ Ignoring either of these warnings may result in a hung request thread
+ or in corruption of the next (pipelined) request.
+ """
+
+ process_request_body = True
+ """
+ If True, the rfile (if any) is automatically read and parsed,
+ and the result placed into request.params or request.body."""
+
+ methods_with_bodies = ('POST', 'PUT', 'PATCH')
+ """
+ A sequence of HTTP methods for which CherryPy will automatically
+ attempt to read a body from the rfile. If you are going to change
+ this property, modify it on the configuration (recommended)
+ or on the "hook point" `on_start_resource`.
+ """
+
+ body = None
+ """
+ If the request Content-Type is 'application/x-www-form-urlencoded'
+ or multipart, this will be None. Otherwise, this will be an instance
+ of :class:`RequestBody<cherrypy._cpreqbody.RequestBody>` (which you
+ can .read()); this value is set between the 'before_request_body' and
+ 'before_handler' hooks (assuming that process_request_body is True)."""
+
+ # Dispatch attributes
+ dispatch = cherrypy.dispatch.Dispatcher()
+ """
+ The object which looks up the 'page handler' callable and collects
+ config for the current request based on the path_info, other
+ request attributes, and the application architecture. The core
+ calls the dispatcher as early as possible, passing it a 'path_info'
+ argument.
+
+ The default dispatcher discovers the page handler by matching path_info
+ to a hierarchical arrangement of objects, starting at request.app.root.
+ See help(cherrypy.dispatch) for more information."""
+
+ script_name = ''
+ """
+ The 'mount point' of the application which is handling this request.
+
+ This attribute MUST NOT end in a slash. If the script_name refers to
+ the root of the URI, it MUST be an empty string (not "/").
+ """
+
+ path_info = '/'
+ """
+ The 'relative path' portion of the Request-URI. This is relative
+ to the script_name ('mount point') of the application which is
+ handling this request."""
+
+ login = None
+ """
+ When authentication is used during the request processing this is
+ set to 'False' if it failed and to the 'username' value if it succeeded.
+ The default 'None' implies that no authentication happened."""
+
+ # Note that cherrypy.url uses "if request.app:" to determine whether
+ # the call is during a real HTTP request or not. So leave this None.
+ app = None
+ """The cherrypy.Application object which is handling this request."""
+
+ handler = None
+ """
+ The function, method, or other callable which CherryPy will call to
+ produce the response. The discovery of the handler and the arguments
+ it will receive are determined by the request.dispatch object.
+ By default, the handler is discovered by walking a tree of objects
+ starting at request.app.root, and is then passed all HTTP params
+ (from the query string and POST body) as keyword arguments."""
+
+ toolmaps = {}
+ """
+ A nested dict of all Toolboxes and Tools in effect for this request,
+ of the form: {Toolbox.namespace: {Tool.name: config dict}}."""
+
+ config = None
+ """
+ A flat dict of all configuration entries which apply to the
+ current request. These entries are collected from global config,
+ application config (based on request.path_info), and from handler
+ config (exactly how is governed by the request.dispatch object in
+ effect for this request; by default, handler config can be attached
+ anywhere in the tree between request.app.root and the final handler,
+ and inherits downward)."""
+
+ is_index = None
+ """
+ This will be True if the current request is mapped to an 'index'
+ resource handler (also, a 'default' handler if path_info ends with
+ a slash). The value may be used to automatically redirect the
+ user-agent to a 'more canonical' URL which either adds or removes
+ the trailing slash. See cherrypy.tools.trailing_slash."""
+
+ hooks = HookMap(hookpoints)
+ """
+ A HookMap (dict-like object) of the form: {hookpoint: [hook, ...]}.
+ Each key is a str naming the hook point, and each value is a list
+ of hooks which will be called at that hook point during this request.
+ The list of hooks is generally populated as early as possible (mostly
+ from Tools specified in config), but may be extended at any time.
+ See also: _cprequest.Hook, _cprequest.HookMap, and cherrypy.tools."""
+
+ error_response = cherrypy.HTTPError(500).set_response
+ """
+ The no-arg callable which will handle unexpected, untrapped errors
+ during request processing. This is not used for expected exceptions
+ (like NotFound, HTTPError, or HTTPRedirect) which are raised in
+ response to expected conditions (those should be customized either
+ via request.error_page or by overriding HTTPError.set_response).
+ By default, error_response uses HTTPError(500) to return a generic
+ error response to the user-agent."""
+
+ error_page = {}
+ """
+ A dict of {error code: response filename or callable} pairs.
+
+ The error code must be an int representing a given HTTP error code,
+ or the string 'default', which will be used if no matching entry
+ is found for a given numeric code.
+
+ If a filename is provided, the file should contain a Python string-
+ formatting template, and can expect by default to receive format
+ values with the mapping keys %(status)s, %(message)s, %(traceback)s,
+ and %(version)s. The set of format mappings can be extended by
+ overriding HTTPError.set_response.
+
+ If a callable is provided, it will be called by default with keyword
+ arguments 'status', 'message', 'traceback', and 'version', as for a
+ string-formatting template. The callable must return a string or
+ iterable of strings which will be set to response.body. It may also
+ override headers or perform any other processing.
+
+ If no entry is given for an error code, and no 'default' entry exists,
+ a default template will be used.
+ """
+
+ show_tracebacks = True
+ """
+ If True, unexpected errors encountered during request processing will
+ include a traceback in the response body."""
+
+ show_mismatched_params = True
+ """
+ If True, mismatched parameters encountered during PageHandler invocation
+ processing will be included in the response body."""
+
+ throws = (KeyboardInterrupt, SystemExit, cherrypy.InternalRedirect)
+ """The sequence of exceptions which Request.run does not trap."""
+
+ throw_errors = False
+ """
+ If True, Request.run will not trap any errors (except HTTPRedirect and
+ HTTPError, which are more properly called 'exceptions', not errors)."""
+
+ closed = False
+ """True once the close method has been called, False otherwise."""
+
+ stage = None
+ """
+ A string containing the stage reached in the request-handling process.
+ This is useful when debugging a live server with hung requests."""
+
+ unique_id = None
+ """A lazy object generating and memorizing UUID4 on ``str()`` render."""
+
+ namespaces = reprconf.NamespaceSet(
+ **{'hooks': hooks_namespace,
+ 'request': request_namespace,
+ 'response': response_namespace,
+ 'error_page': error_page_namespace,
+ 'tools': cherrypy.tools,
+ })
+
+ def __init__(self, local_host, remote_host, scheme='http',
+ server_protocol='HTTP/1.1'):
+ """Populate a new Request object.
+
+ local_host should be an httputil.Host object with the server info.
+ remote_host should be an httputil.Host object with the client info.
+ scheme should be a string, either "http" or "https".
+ """
+ self.local = local_host
+ self.remote = remote_host
+ self.scheme = scheme
+ self.server_protocol = server_protocol
+
+ self.closed = False
+
+ # Put a *copy* of the class error_page into self.
+ self.error_page = self.error_page.copy()
+
+ # Put a *copy* of the class namespaces into self.
+ self.namespaces = self.namespaces.copy()
+
+ self.stage = None
+
+ self.unique_id = LazyUUID4()
+
+ def close(self):
+ """Run cleanup code. (Core)"""
+ if not self.closed:
+ self.closed = True
+ self.stage = 'on_end_request'
+ self.hooks.run('on_end_request')
+ self.stage = 'close'
+
+ def run(self, method, path, query_string, req_protocol, headers, rfile):
+ r"""Process the Request. (Core)
+
+ method, path, query_string, and req_protocol should be pulled directly
+ from the Request-Line (e.g. "GET /path?key=val HTTP/1.0").
+
+ path
+ This should be %XX-unquoted, but query_string should not be.
+
+ When using Python 2, they both MUST be byte strings,
+ not unicode strings.
+
+ When using Python 3, they both MUST be unicode strings,
+ not byte strings, and preferably not bytes \x00-\xFF
+ disguised as unicode.
+
+ headers
+ A list of (name, value) tuples.
+
+ rfile
+ A file-like object containing the HTTP request entity.
+
+ When run() is done, the returned object should have 3 attributes:
+
+ * status, e.g. "200 OK"
+ * header_list, a list of (name, value) tuples
+ * body, an iterable yielding strings
+
+ Consumer code (HTTP servers) should then access these response
+ attributes to build the outbound stream.
+
+ """
+ response = cherrypy.serving.response
+ self.stage = 'run'
+ try:
+ self.error_response = cherrypy.HTTPError(500).set_response
+
+ self.method = method
+ path = path or '/'
+ self.query_string = query_string or ''
+ self.params = {}
+
+ # Compare request and server HTTP protocol versions, in case our
+ # server does not support the requested protocol. Limit our output
+ # to min(req, server). We want the following output:
+ # request server actual written supported response
+ # protocol protocol response protocol feature set
+ # a 1.0 1.0 1.0 1.0
+ # b 1.0 1.1 1.1 1.0
+ # c 1.1 1.0 1.0 1.0
+ # d 1.1 1.1 1.1 1.1
+ # Notice that, in (b), the response will be "HTTP/1.1" even though
+ # the client only understands 1.0. RFC 2616 10.5.6 says we should
+ # only return 505 if the _major_ version is different.
+ rp = int(req_protocol[5]), int(req_protocol[7])
+ sp = int(self.server_protocol[5]), int(self.server_protocol[7])
+ self.protocol = min(rp, sp)
+ response.headers.protocol = self.protocol
+
+ # Rebuild first line of the request (e.g. "GET /path HTTP/1.0").
+ url = path
+ if query_string:
+ url += '?' + query_string
+ self.request_line = '%s %s %s' % (method, url, req_protocol)
+
+ self.header_list = list(headers)
+ self.headers = httputil.HeaderMap()
+
+ self.rfile = rfile
+ self.body = None
+
+ self.cookie = SimpleCookie()
+ self.handler = None
+
+ # path_info should be the path from the
+ # app root (script_name) to the handler.
+ self.script_name = self.app.script_name
+ self.path_info = pi = path[len(self.script_name):]
+
+ self.stage = 'respond'
+ self.respond(pi)
+
+ except self.throws:
+ raise
+ except Exception:
+ if self.throw_errors:
+ raise
+ else:
+ # Failure in setup, error handler or finalize. Bypass them.
+ # Can't use handle_error because we may not have hooks yet.
+ cherrypy.log(traceback=True, severity=40)
+ if self.show_tracebacks:
+ body = format_exc()
+ else:
+ body = ''
+ r = bare_error(body)
+ response.output_status, response.header_list, response.body = r
+
+ if self.method == 'HEAD':
+ # HEAD requests MUST NOT return a message-body in the response.
+ response.body = []
+
+ try:
+ cherrypy.log.access()
+ except Exception:
+ cherrypy.log.error(traceback=True)
+
+ return response
+
+ def respond(self, path_info):
+ """Generate a response for the resource at self.path_info. (Core)"""
+ try:
+ try:
+ try:
+ self._do_respond(path_info)
+ except (cherrypy.HTTPRedirect, cherrypy.HTTPError):
+ inst = sys.exc_info()[1]
+ inst.set_response()
+ self.stage = 'before_finalize (HTTPError)'
+ self.hooks.run('before_finalize')
+ cherrypy.serving.response.finalize()
+ finally:
+ self.stage = 'on_end_resource'
+ self.hooks.run('on_end_resource')
+ except self.throws:
+ raise
+ except Exception:
+ if self.throw_errors:
+ raise
+ self.handle_error()
+
+ def _do_respond(self, path_info):
+ response = cherrypy.serving.response
+
+ if self.app is None:
+ raise cherrypy.NotFound()
+
+ self.hooks = self.__class__.hooks.copy()
+ self.toolmaps = {}
+
+ # Get the 'Host' header, so we can HTTPRedirect properly.
+ self.stage = 'process_headers'
+ self.process_headers()
+
+ self.stage = 'get_resource'
+ self.get_resource(path_info)
+
+ self.body = _cpreqbody.RequestBody(
+ self.rfile, self.headers, request_params=self.params)
+
+ self.namespaces(self.config)
+
+ self.stage = 'on_start_resource'
+ self.hooks.run('on_start_resource')
+
+ # Parse the querystring
+ self.stage = 'process_query_string'
+ self.process_query_string()
+
+ # Process the body
+ if self.process_request_body:
+ if self.method not in self.methods_with_bodies:
+ self.process_request_body = False
+ self.stage = 'before_request_body'
+ self.hooks.run('before_request_body')
+ if self.process_request_body:
+ self.body.process()
+
+ # Run the handler
+ self.stage = 'before_handler'
+ self.hooks.run('before_handler')
+ if self.handler:
+ self.stage = 'handler'
+ response.body = self.handler()
+
+ # Finalize
+ self.stage = 'before_finalize'
+ self.hooks.run('before_finalize')
+ response.finalize()
+
+ def process_query_string(self):
+ """Parse the query string into Python structures. (Core)"""
+ try:
+ p = httputil.parse_query_string(
+ self.query_string, encoding=self.query_string_encoding)
+ except UnicodeDecodeError:
+ raise cherrypy.HTTPError(
+ 404, 'The given query string could not be processed. Query '
+ 'strings for this resource must be encoded with %r.' %
+ self.query_string_encoding)
+
+ self.params.update(p)
+
+ def process_headers(self):
+ """Parse HTTP header data into Python structures. (Core)"""
+ # Process the headers into self.headers
+ headers = self.headers
+ for name, value in self.header_list:
+ # Call title() now (and use dict.__method__(headers))
+ # so title doesn't have to be called twice.
+ name = name.title()
+ value = value.strip()
+
+ headers[name] = httputil.decode_TEXT_maybe(value)
+
+ # Some clients, notably Konquoror, supply multiple
+ # cookies on different lines with the same key. To
+ # handle this case, store all cookies in self.cookie.
+ if name == 'Cookie':
+ try:
+ self.cookie.load(value)
+ except CookieError as exc:
+ raise cherrypy.HTTPError(400, str(exc))
+
+ if not dict.__contains__(headers, 'Host'):
+ # All Internet-based HTTP/1.1 servers MUST respond with a 400
+ # (Bad Request) status code to any HTTP/1.1 request message
+ # which lacks a Host header field.
+ if self.protocol >= (1, 1):
+ msg = "HTTP/1.1 requires a 'Host' request header."
+ raise cherrypy.HTTPError(400, msg)
+ host = dict.get(headers, 'Host')
+ if not host:
+ host = self.local.name or self.local.ip
+ self.base = '%s://%s' % (self.scheme, host)
+
+ def get_resource(self, path):
+ """Call a dispatcher (which sets self.handler and .config). (Core)"""
+ # First, see if there is a custom dispatch at this URI. Custom
+ # dispatchers can only be specified in app.config, not in _cp_config
+ # (since custom dispatchers may not even have an app.root).
+ dispatch = self.app.find_config(
+ path, 'request.dispatch', self.dispatch)
+
+ # dispatch() should set self.handler and self.config
+ dispatch(path)
+
+ def handle_error(self):
+ """Handle the last unanticipated exception. (Core)"""
+ try:
+ self.hooks.run('before_error_response')
+ if self.error_response:
+ self.error_response()
+ self.hooks.run('after_error_response')
+ cherrypy.serving.response.finalize()
+ except cherrypy.HTTPRedirect:
+ inst = sys.exc_info()[1]
+ inst.set_response()
+ cherrypy.serving.response.finalize()
+
+
+class ResponseBody(object):
+
+ """The body of the HTTP response (the response entity)."""
+
+ unicode_err = ('Page handlers MUST return bytes. Use tools.encode '
+ 'if you wish to return unicode.')
+
+ def __get__(self, obj, objclass=None):
+ if obj is None:
+ # When calling on the class instead of an instance...
+ return self
+ else:
+ return obj._body
+
+ def __set__(self, obj, value):
+ # Convert the given value to an iterable object.
+ if isinstance(value, str):
+ raise ValueError(self.unicode_err)
+ elif isinstance(value, list):
+ # every item in a list must be bytes...
+ if any(isinstance(item, str) for item in value):
+ raise ValueError(self.unicode_err)
+
+ obj._body = encoding.prepare_iter(value)
+
+
+class Response(object):
+
+ """An HTTP Response, including status, headers, and body."""
+
+ status = ''
+ """The HTTP Status-Code and Reason-Phrase."""
+
+ header_list = []
+ """
+ A list of the HTTP response headers as (name, value) tuples.
+ In general, you should use response.headers (a dict) instead. This
+ attribute is generated from response.headers and is not valid until
+ after the finalize phase."""
+
+ headers = httputil.HeaderMap()
+ """
+ A dict-like object containing the response headers. Keys are header
+ names (in Title-Case format); however, you may get and set them in
+ a case-insensitive manner. That is, headers['Content-Type'] and
+ headers['content-type'] refer to the same value. Values are header
+ values (decoded according to :rfc:`2047` if necessary).
+
+ .. seealso:: classes :class:`HeaderMap`, :class:`HeaderElement`
+ """
+
+ cookie = SimpleCookie()
+ """See help(Cookie)."""
+
+ body = ResponseBody()
+ """The body (entity) of the HTTP response."""
+
+ time = None
+ """The value of time.time() when created. Use in HTTP dates."""
+
+ stream = False
+ """If False, buffer the response body."""
+
+ def __init__(self):
+ self.status = None
+ self.header_list = None
+ self._body = []
+ self.time = time.time()
+
+ self.headers = httputil.HeaderMap()
+ # Since we know all our keys are titled strings, we can
+ # bypass HeaderMap.update and get a big speed boost.
+ dict.update(self.headers, {
+ 'Content-Type': 'text/html',
+ 'Server': 'CherryPy/' + cherrypy.__version__,
+ 'Date': httputil.HTTPDate(self.time),
+ })
+ self.cookie = SimpleCookie()
+
+ def collapse_body(self):
+ """Collapse self.body to a single string; replace it and return it."""
+ new_body = b''.join(self.body)
+ self.body = new_body
+ return new_body
+
+ def _flush_body(self):
+ """
+ Discard self.body but consume any generator such that
+ any finalization can occur, such as is required by
+ caching.tee_output().
+ """
+ consume(iter(self.body))
+
+ def finalize(self):
+ """Transform headers (and cookies) into self.header_list. (Core)"""
+ try:
+ code, reason, _ = httputil.valid_status(self.status)
+ except ValueError:
+ raise cherrypy.HTTPError(500, sys.exc_info()[1].args[0])
+
+ headers = self.headers
+
+ self.status = '%s %s' % (code, reason)
+ self.output_status = ntob(str(code), 'ascii') + \
+ b' ' + headers.encode(reason)
+
+ if self.stream:
+ # The upshot: wsgiserver will chunk the response if
+ # you pop Content-Length (or set it explicitly to None).
+ # Note that lib.static sets C-L to the file's st_size.
+ if dict.get(headers, 'Content-Length') is None:
+ dict.pop(headers, 'Content-Length', None)
+ elif code < 200 or code in (204, 205, 304):
+ # "All 1xx (informational), 204 (no content),
+ # and 304 (not modified) responses MUST NOT
+ # include a message-body."
+ dict.pop(headers, 'Content-Length', None)
+ self._flush_body()
+ self.body = b''
+ else:
+ # Responses which are not streamed should have a Content-Length,
+ # but allow user code to set Content-Length if desired.
+ if dict.get(headers, 'Content-Length') is None:
+ content = self.collapse_body()
+ dict.__setitem__(headers, 'Content-Length', len(content))
+
+ # Transform our header dict into a list of tuples.
+ self.header_list = h = headers.output()
+
+ cookie = self.cookie.output()
+ if cookie:
+ for line in cookie.split('\r\n'):
+ name, value = line.split(': ', 1)
+ if isinstance(name, str):
+ name = name.encode('ISO-8859-1')
+ if isinstance(value, str):
+ value = headers.encode(value)
+ h.append((name, value))
+
+
+class LazyUUID4(object):
+ def __str__(self):
+ """Return UUID4 and keep it for future calls."""
+ return str(self.uuid4)
+
+ @property
+ def uuid4(self):
+ """Provide unique id on per-request basis using UUID4.
+
+ It's evaluated lazily on render.
+ """
+ try:
+ self._uuid4
+ except AttributeError:
+ # evaluate on first access
+ self._uuid4 = uuid.uuid4()
+
+ return self._uuid4
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpserver.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpserver.py
new file mode 100644
index 0000000000000000000000000000000000000000..5f8d98fa3eb0a9bb79dd2e184ff70180e7fd147a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpserver.py
@@ -0,0 +1,241 @@
+"""Manage HTTP servers with CherryPy."""
+
+import cherrypy
+from cherrypy.lib.reprconf import attributes
+from cherrypy._cpcompat import text_or_bytes
+from cherrypy.process.servers import ServerAdapter
+
+
+__all__ = ('Server', )
+
+
+class Server(ServerAdapter):
+ """An adapter for an HTTP server.
+
+ You can set attributes (like socket_host and socket_port)
+ on *this* object (which is probably cherrypy.server), and call
+ quickstart. For example::
+
+ cherrypy.server.socket_port = 80
+ cherrypy.quickstart()
+ """
+
+ socket_port = 8080
+ """The TCP port on which to listen for connections."""
+
+ _socket_host = '127.0.0.1'
+
+ @property
+ def socket_host(self): # noqa: D401; irrelevant for properties
+ """The hostname or IP address on which to listen for connections.
+
+ Host values may be any IPv4 or IPv6 address, or any valid hostname.
+ The string 'localhost' is a synonym for '127.0.0.1' (or '::1', if
+ your hosts file prefers IPv6). The string '0.0.0.0' is a special
+ IPv4 entry meaning "any active interface" (INADDR_ANY), and '::'
+ is the similar IN6ADDR_ANY for IPv6. The empty string or None are
+ not allowed.
+ """
+ return self._socket_host
+
+ @socket_host.setter
+ def socket_host(self, value):
+ if value == '':
+ raise ValueError("The empty string ('') is not an allowed value. "
+ "Use '0.0.0.0' instead to listen on all active "
+ 'interfaces (INADDR_ANY).')
+ self._socket_host = value
+
+ socket_file = None
+ """If given, the name of the UNIX socket to use instead of TCP/IP.
+
+ When this option is not None, the `socket_host` and `socket_port` options
+ are ignored."""
+
+ socket_queue_size = 5
+ """The 'backlog' argument to socket.listen(); specifies the maximum number
+ of queued connections (default 5)."""
+
+ socket_timeout = 10
+ """The timeout in seconds for accepted connections (default 10)."""
+
+ accepted_queue_size = -1
+ """The maximum number of requests which will be queued up before
+ the server refuses to accept it (default -1, meaning no limit)."""
+
+ accepted_queue_timeout = 10
+ """The timeout in seconds for attempting to add a request to the
+ queue when the queue is full (default 10)."""
+
+ shutdown_timeout = 5
+ """The time to wait for HTTP worker threads to clean up."""
+
+ protocol_version = 'HTTP/1.1'
+ """The version string to write in the Status-Line of all HTTP responses,
+ for example, "HTTP/1.1" (the default). Depending on the HTTP server used,
+ this should also limit the supported features used in the response."""
+
+ thread_pool = 10
+ """The number of worker threads to start up in the pool."""
+
+ thread_pool_max = -1
+ """The maximum size of the worker-thread pool. Use -1 to indicate no limit.
+ """
+
+ max_request_header_size = 500 * 1024
+ """The maximum number of bytes allowable in the request headers.
+ If exceeded, the HTTP server should return "413 Request Entity Too Large".
+ """
+
+ max_request_body_size = 100 * 1024 * 1024
+ """The maximum number of bytes allowable in the request body. If exceeded,
+ the HTTP server should return "413 Request Entity Too Large"."""
+
+ instance = None
+ """If not None, this should be an HTTP server instance (such as
+ cheroot.wsgi.Server) which cherrypy.server will control.
+ Use this when you need
+ more control over object instantiation than is available in the various
+ configuration options."""
+
+ ssl_context = None
+ """When using PyOpenSSL, an instance of SSL.Context."""
+
+ ssl_certificate = None
+ """The filename of the SSL certificate to use."""
+
+ ssl_certificate_chain = None
+ """When using PyOpenSSL, the certificate chain to pass to
+ Context.load_verify_locations."""
+
+ ssl_private_key = None
+ """The filename of the private key to use with SSL."""
+
+ ssl_ciphers = None
+ """The ciphers list of SSL."""
+
+ ssl_module = 'builtin'
+ """The name of a registered SSL adaptation module to use with
+ the builtin WSGI server. Builtin options are: 'builtin' (to
+ use the SSL library built into recent versions of Python).
+ You may also register your own classes in the
+ cheroot.server.ssl_adapters dict."""
+
+ statistics = False
+ """Turns statistics-gathering on or off for aware HTTP servers."""
+
+ nodelay = True
+ """If True (the default since 3.1), sets the TCP_NODELAY socket option."""
+
+ wsgi_version = (1, 0)
+ """The WSGI version tuple to use with the builtin WSGI server.
+ The provided options are (1, 0) [which includes support for PEP 3333,
+ which declares it covers WSGI version 1.0.1 but still mandates the
+ wsgi.version (1, 0)] and ('u', 0), an experimental unicode version.
+ You may create and register your own experimental versions of the WSGI
+ protocol by adding custom classes to the cheroot.server.wsgi_gateways dict.
+ """
+
+ peercreds = False
+ """If True, peer cred lookup for UNIX domain socket will put to WSGI env.
+
+ This information will then be available through WSGI env vars:
+ * X_REMOTE_PID
+ * X_REMOTE_UID
+ * X_REMOTE_GID
+ """
+
+ peercreds_resolve = False
+ """If True, username/group will be looked up in the OS from peercreds.
+
+ This information will then be available through WSGI env vars:
+ * REMOTE_USER
+ * X_REMOTE_USER
+ * X_REMOTE_GROUP
+ """
+
+ def __init__(self):
+ """Initialize Server instance."""
+ self.bus = cherrypy.engine
+ self.httpserver = None
+ self.interrupt = None
+ self.running = False
+
+ def httpserver_from_self(self, httpserver=None):
+ """Return a (httpserver, bind_addr) pair based on self attributes."""
+ if httpserver is None:
+ httpserver = self.instance
+ if httpserver is None:
+ from cherrypy import _cpwsgi_server
+ httpserver = _cpwsgi_server.CPWSGIServer(self)
+ if isinstance(httpserver, text_or_bytes):
+ # Is anyone using this? Can I add an arg?
+ httpserver = attributes(httpserver)(self)
+ return httpserver, self.bind_addr
+
+ def start(self):
+ """Start the HTTP server."""
+ if not self.httpserver:
+ self.httpserver, self.bind_addr = self.httpserver_from_self()
+ super(Server, self).start()
+ start.priority = 75
+
+ @property
+ def bind_addr(self):
+ """Return bind address.
+
+ A (host, port) tuple for TCP sockets or a str for Unix domain sockts.
+ """
+ if self.socket_file:
+ return self.socket_file
+ if self.socket_host is None and self.socket_port is None:
+ return None
+ return (self.socket_host, self.socket_port)
+
+ @bind_addr.setter
+ def bind_addr(self, value):
+ if value is None:
+ self.socket_file = None
+ self.socket_host = None
+ self.socket_port = None
+ elif isinstance(value, text_or_bytes):
+ self.socket_file = value
+ self.socket_host = None
+ self.socket_port = None
+ else:
+ try:
+ self.socket_host, self.socket_port = value
+ self.socket_file = None
+ except ValueError:
+ raise ValueError('bind_addr must be a (host, port) tuple '
+ '(for TCP sockets) or a string (for Unix '
+ 'domain sockets), not %r' % value)
+
+ def base(self):
+ """Return the base for this server.
+
+ e.i. scheme://host[:port] or sock file
+ """
+ if self.socket_file:
+ return self.socket_file
+
+ host = self.socket_host
+ if host in ('0.0.0.0', '::'):
+ # 0.0.0.0 is INADDR_ANY and :: is IN6ADDR_ANY.
+ # Look up the host name, which should be the
+ # safest thing to spit out in a URL.
+ import socket
+ host = socket.gethostname()
+
+ port = self.socket_port
+
+ if self.ssl_certificate:
+ scheme = 'https'
+ if port != 443:
+ host += ':%s' % port
+ else:
+ scheme = 'http'
+ if port != 80:
+ host += ':%s' % port
+
+ return '%s://%s' % (scheme, host)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/_cptools.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cptools.py
new file mode 100644
index 0000000000000000000000000000000000000000..716f99a49f47d065473b9f0e272225605e17c9e8
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cptools.py
@@ -0,0 +1,502 @@
+"""CherryPy tools. A "tool" is any helper, adapted to CP.
+
+Tools are usually designed to be used in a variety of ways (although some
+may only offer one if they choose):
+
+ Library calls
+ All tools are callables that can be used wherever needed.
+ The arguments are straightforward and should be detailed within the
+ docstring.
+
+ Function decorators
+ All tools, when called, may be used as decorators which configure
+ individual CherryPy page handlers (methods on the CherryPy tree).
+ That is, "@tools.anytool()" should "turn on" the tool via the
+ decorated function's _cp_config attribute.
+
+ CherryPy config
+ If a tool exposes a "_setup" callable, it will be called
+ once per Request (if the feature is "turned on" via config).
+
+Tools may be implemented as any object with a namespace. The builtins
+are generally either modules or instances of the tools.Tool class.
+"""
+
+import cherrypy
+from cherrypy._helper import expose
+
+from cherrypy.lib import cptools, encoding, static, jsontools
+from cherrypy.lib import sessions as _sessions, xmlrpcutil as _xmlrpc
+from cherrypy.lib import caching as _caching
+from cherrypy.lib import auth_basic, auth_digest
+
+
+def _getargs(func):
+ """Return the names of all static arguments to the given function."""
+ # Use this instead of importing inspect for less mem overhead.
+ import types
+ if isinstance(func, types.MethodType):
+ func = func.__func__
+ co = func.__code__
+ return co.co_varnames[:co.co_argcount]
+
+
+_attr_error = (
+ 'CherryPy Tools cannot be turned on directly. Instead, turn them '
+ 'on via config, or use them as decorators on your page handlers.'
+)
+
+
+class Tool(object):
+
+ """A registered function for use with CherryPy request-processing hooks.
+
+ help(tool.callable) should give you more information about this Tool.
+ """
+
+ namespace = 'tools'
+
+ def __init__(self, point, callable, name=None, priority=50):
+ self._point = point
+ self.callable = callable
+ self._name = name
+ self._priority = priority
+ self.__doc__ = self.callable.__doc__
+ self._setargs()
+
+ @property
+ def on(self):
+ raise AttributeError(_attr_error)
+
+ @on.setter
+ def on(self, value):
+ raise AttributeError(_attr_error)
+
+ def _setargs(self):
+ """Copy func parameter names to obj attributes."""
+ try:
+ for arg in _getargs(self.callable):
+ setattr(self, arg, None)
+ except (TypeError, AttributeError):
+ if hasattr(self.callable, '__call__'):
+ for arg in _getargs(self.callable.__call__):
+ setattr(self, arg, None)
+ # IronPython 1.0 raises NotImplementedError because
+ # inspect.getargspec tries to access Python bytecode
+ # in co_code attribute.
+ except NotImplementedError:
+ pass
+ # IronPython 1B1 may raise IndexError in some cases,
+ # but if we trap it here it doesn't prevent CP from working.
+ except IndexError:
+ pass
+
+ def _merged_args(self, d=None):
+ """Return a dict of configuration entries for this Tool."""
+ if d:
+ conf = d.copy()
+ else:
+ conf = {}
+
+ tm = cherrypy.serving.request.toolmaps[self.namespace]
+ if self._name in tm:
+ conf.update(tm[self._name])
+
+ if 'on' in conf:
+ del conf['on']
+
+ return conf
+
+ def __call__(self, *args, **kwargs):
+ """Compile-time decorator (turn on the tool in config).
+
+ For example::
+
+ @expose
+ @tools.proxy()
+ def whats_my_base(self):
+ return cherrypy.request.base
+ """
+ if args:
+ raise TypeError('The %r Tool does not accept positional '
+ 'arguments; you must use keyword arguments.'
+ % self._name)
+
+ def tool_decorator(f):
+ if not hasattr(f, '_cp_config'):
+ f._cp_config = {}
+ subspace = self.namespace + '.' + self._name + '.'
+ f._cp_config[subspace + 'on'] = True
+ for k, v in kwargs.items():
+ f._cp_config[subspace + k] = v
+ return f
+ return tool_decorator
+
+ def _setup(self):
+ """Hook this tool into cherrypy.request.
+
+ The standard CherryPy request object will automatically call this
+ method when the tool is "turned on" in config.
+ """
+ conf = self._merged_args()
+ p = conf.pop('priority', None)
+ if p is None:
+ p = getattr(self.callable, 'priority', self._priority)
+ cherrypy.serving.request.hooks.attach(self._point, self.callable,
+ priority=p, **conf)
+
+
+class HandlerTool(Tool):
+
+ """Tool which is called 'before main', that may skip normal handlers.
+
+ If the tool successfully handles the request (by setting response.body),
+ if should return True. This will cause CherryPy to skip any 'normal' page
+ handler. If the tool did not handle the request, it should return False
+ to tell CherryPy to continue on and call the normal page handler. If the
+ tool is declared AS a page handler (see the 'handler' method), returning
+ False will raise NotFound.
+ """
+
+ def __init__(self, callable, name=None):
+ Tool.__init__(self, 'before_handler', callable, name)
+
+ def handler(self, *args, **kwargs):
+ """Use this tool as a CherryPy page handler.
+
+ For example::
+
+ class Root:
+ nav = tools.staticdir.handler(section="/nav", dir="nav",
+ root=absDir)
+ """
+ @expose
+ def handle_func(*a, **kw):
+ handled = self.callable(*args, **self._merged_args(kwargs))
+ if not handled:
+ raise cherrypy.NotFound()
+ return cherrypy.serving.response.body
+ return handle_func
+
+ def _wrapper(self, **kwargs):
+ if self.callable(**kwargs):
+ cherrypy.serving.request.handler = None
+
+ def _setup(self):
+ """Hook this tool into cherrypy.request.
+
+ The standard CherryPy request object will automatically call this
+ method when the tool is "turned on" in config.
+ """
+ conf = self._merged_args()
+ p = conf.pop('priority', None)
+ if p is None:
+ p = getattr(self.callable, 'priority', self._priority)
+ cherrypy.serving.request.hooks.attach(self._point, self._wrapper,
+ priority=p, **conf)
+
+
+class HandlerWrapperTool(Tool):
+
+ """Tool which wraps request.handler in a provided wrapper function.
+
+ The 'newhandler' arg must be a handler wrapper function that takes a
+ 'next_handler' argument, plus ``*args`` and ``**kwargs``. Like all
+ page handler
+ functions, it must return an iterable for use as cherrypy.response.body.
+
+ For example, to allow your 'inner' page handlers to return dicts
+ which then get interpolated into a template::
+
+ def interpolator(next_handler, *args, **kwargs):
+ filename = cherrypy.request.config.get('template')
+ cherrypy.response.template = env.get_template(filename)
+ response_dict = next_handler(*args, **kwargs)
+ return cherrypy.response.template.render(**response_dict)
+ cherrypy.tools.jinja = HandlerWrapperTool(interpolator)
+ """
+
+ def __init__(self, newhandler, point='before_handler', name=None,
+ priority=50):
+ self.newhandler = newhandler
+ self._point = point
+ self._name = name
+ self._priority = priority
+
+ def callable(self, *args, **kwargs):
+ innerfunc = cherrypy.serving.request.handler
+
+ def wrap(*args, **kwargs):
+ return self.newhandler(innerfunc, *args, **kwargs)
+ cherrypy.serving.request.handler = wrap
+
+
+class ErrorTool(Tool):
+
+ """Tool which is used to replace the default request.error_response."""
+
+ def __init__(self, callable, name=None):
+ Tool.__init__(self, None, callable, name)
+
+ def _wrapper(self):
+ self.callable(**self._merged_args())
+
+ def _setup(self):
+ """Hook this tool into cherrypy.request.
+
+ The standard CherryPy request object will automatically call this
+ method when the tool is "turned on" in config.
+ """
+ cherrypy.serving.request.error_response = self._wrapper
+
+
+# Builtin tools #
+
+
+class SessionTool(Tool):
+
+ """Session Tool for CherryPy.
+
+ sessions.locking
+ When 'implicit' (the default), the session will be locked for you,
+ just before running the page handler.
+
+ When 'early', the session will be locked before reading the request
+ body. This is off by default for safety reasons; for example,
+ a large upload would block the session, denying an AJAX
+ progress meter
+ (`issue <https://github.com/cherrypy/cherrypy/issues/630>`_).
+
+ When 'explicit' (or any other value), you need to call
+ cherrypy.session.acquire_lock() yourself before using
+ session data.
+ """
+
+ def __init__(self):
+ # _sessions.init must be bound after headers are read
+ Tool.__init__(self, 'before_request_body', _sessions.init)
+
+ def _lock_session(self):
+ cherrypy.serving.session.acquire_lock()
+
+ def _setup(self):
+ """Hook this tool into cherrypy.request.
+
+ The standard CherryPy request object will automatically call this
+ method when the tool is "turned on" in config.
+ """
+ hooks = cherrypy.serving.request.hooks
+
+ conf = self._merged_args()
+
+ p = conf.pop('priority', None)
+ if p is None:
+ p = getattr(self.callable, 'priority', self._priority)
+
+ hooks.attach(self._point, self.callable, priority=p, **conf)
+
+ locking = conf.pop('locking', 'implicit')
+ if locking == 'implicit':
+ hooks.attach('before_handler', self._lock_session)
+ elif locking == 'early':
+ # Lock before the request body (but after _sessions.init runs!)
+ hooks.attach('before_request_body', self._lock_session,
+ priority=60)
+ else:
+ # Don't lock
+ pass
+
+ hooks.attach('before_finalize', _sessions.save)
+ hooks.attach('on_end_request', _sessions.close)
+
+ def regenerate(self):
+ """Drop the current session and make a new one (with a new id)."""
+ sess = cherrypy.serving.session
+ sess.regenerate()
+
+ # Grab cookie-relevant tool args
+ relevant = 'path', 'path_header', 'name', 'timeout', 'domain', 'secure'
+ conf = dict(
+ (k, v)
+ for k, v in self._merged_args().items()
+ if k in relevant
+ )
+ _sessions.set_response_cookie(**conf)
+
+
+class XMLRPCController(object):
+
+ """A Controller (page handler collection) for XML-RPC.
+
+ To use it, have your controllers subclass this base class (it will
+ turn on the tool for you).
+
+ You can also supply the following optional config entries::
+
+ tools.xmlrpc.encoding: 'utf-8'
+ tools.xmlrpc.allow_none: 0
+
+ XML-RPC is a rather discontinuous layer over HTTP; dispatching to the
+ appropriate handler must first be performed according to the URL, and
+ then a second dispatch step must take place according to the RPC method
+ specified in the request body. It also allows a superfluous "/RPC2"
+ prefix in the URL, supplies its own handler args in the body, and
+ requires a 200 OK "Fault" response instead of 404 when the desired
+ method is not found.
+
+ Therefore, XML-RPC cannot be implemented for CherryPy via a Tool alone.
+ This Controller acts as the dispatch target for the first half (based
+ on the URL); it then reads the RPC method from the request body and
+ does its own second dispatch step based on that method. It also reads
+ body params, and returns a Fault on error.
+
+ The XMLRPCDispatcher strips any /RPC2 prefix; if you aren't using /RPC2
+ in your URL's, you can safely skip turning on the XMLRPCDispatcher.
+ Otherwise, you need to use declare it in config::
+
+ request.dispatch: cherrypy.dispatch.XMLRPCDispatcher()
+ """
+
+ # Note we're hard-coding this into the 'tools' namespace. We could do
+ # a huge amount of work to make it relocatable, but the only reason why
+ # would be if someone actually disabled the default_toolbox. Meh.
+ _cp_config = {'tools.xmlrpc.on': True}
+
+ @expose
+ def default(self, *vpath, **params):
+ rpcparams, rpcmethod = _xmlrpc.process_body()
+
+ subhandler = self
+ for attr in str(rpcmethod).split('.'):
+ subhandler = getattr(subhandler, attr, None)
+
+ if subhandler and getattr(subhandler, 'exposed', False):
+ body = subhandler(*(vpath + rpcparams), **params)
+
+ else:
+ # https://github.com/cherrypy/cherrypy/issues/533
+ # if a method is not found, an xmlrpclib.Fault should be returned
+ # raising an exception here will do that; see
+ # cherrypy.lib.xmlrpcutil.on_error
+ raise Exception('method "%s" is not supported' % attr)
+
+ conf = cherrypy.serving.request.toolmaps['tools'].get('xmlrpc', {})
+ _xmlrpc.respond(body,
+ conf.get('encoding', 'utf-8'),
+ conf.get('allow_none', 0))
+ return cherrypy.serving.response.body
+
+
+class SessionAuthTool(HandlerTool):
+ pass
+
+
+class CachingTool(Tool):
+
+ """Caching Tool for CherryPy."""
+
+ def _wrapper(self, **kwargs):
+ request = cherrypy.serving.request
+ if _caching.get(**kwargs):
+ request.handler = None
+ else:
+ if request.cacheable:
+ # Note the devious technique here of adding hooks on the fly
+ request.hooks.attach('before_finalize', _caching.tee_output,
+ priority=100)
+ _wrapper.priority = 90
+
+ def _setup(self):
+ """Hook caching into cherrypy.request."""
+ conf = self._merged_args()
+
+ p = conf.pop('priority', None)
+ cherrypy.serving.request.hooks.attach('before_handler', self._wrapper,
+ priority=p, **conf)
+
+
+class Toolbox(object):
+
+ """A collection of Tools.
+
+ This object also functions as a config namespace handler for itself.
+ Custom toolboxes should be added to each Application's toolboxes dict.
+ """
+
+ def __init__(self, namespace):
+ self.namespace = namespace
+
+ def __setattr__(self, name, value):
+ # If the Tool._name is None, supply it from the attribute name.
+ if isinstance(value, Tool):
+ if value._name is None:
+ value._name = name
+ value.namespace = self.namespace
+ object.__setattr__(self, name, value)
+
+ def __enter__(self):
+ """Populate request.toolmaps from tools specified in config."""
+ cherrypy.serving.request.toolmaps[self.namespace] = map = {}
+
+ def populate(k, v):
+ toolname, arg = k.split('.', 1)
+ bucket = map.setdefault(toolname, {})
+ bucket[arg] = v
+ return populate
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ """Run tool._setup() for each tool in our toolmap."""
+ map = cherrypy.serving.request.toolmaps.get(self.namespace)
+ if map:
+ for name, settings in map.items():
+ if settings.get('on', False):
+ tool = getattr(self, name)
+ tool._setup()
+
+ def register(self, point, **kwargs):
+ """
+ Return a decorator which registers the function
+ at the given hook point.
+ """
+ def decorator(func):
+ attr_name = kwargs.get('name', func.__name__)
+ tool = Tool(point, func, **kwargs)
+ setattr(self, attr_name, tool)
+ return func
+ return decorator
+
+
+default_toolbox = _d = Toolbox('tools')
+_d.session_auth = SessionAuthTool(cptools.session_auth)
+_d.allow = Tool('on_start_resource', cptools.allow)
+_d.proxy = Tool('before_request_body', cptools.proxy, priority=30)
+_d.response_headers = Tool('on_start_resource', cptools.response_headers)
+_d.log_tracebacks = Tool('before_error_response', cptools.log_traceback)
+_d.log_headers = Tool('before_error_response', cptools.log_request_headers)
+_d.log_hooks = Tool('on_end_request', cptools.log_hooks, priority=100)
+_d.err_redirect = ErrorTool(cptools.redirect)
+_d.etags = Tool('before_finalize', cptools.validate_etags, priority=75)
+_d.decode = Tool('before_request_body', encoding.decode)
+# the order of encoding, gzip, caching is important
+_d.encode = Tool('before_handler', encoding.ResponseEncoder, priority=70)
+_d.gzip = Tool('before_finalize', encoding.gzip, priority=80)
+_d.staticdir = HandlerTool(static.staticdir)
+_d.staticfile = HandlerTool(static.staticfile)
+_d.sessions = SessionTool()
+_d.xmlrpc = ErrorTool(_xmlrpc.on_error)
+_d.caching = CachingTool('before_handler', _caching.get, 'caching')
+_d.expires = Tool('before_finalize', _caching.expires)
+_d.ignore_headers = Tool('before_request_body', cptools.ignore_headers)
+_d.referer = Tool('before_request_body', cptools.referer)
+_d.trailing_slash = Tool('before_handler', cptools.trailing_slash, priority=60)
+_d.flatten = Tool('before_finalize', cptools.flatten)
+_d.accept = Tool('on_start_resource', cptools.accept)
+_d.redirect = Tool('on_start_resource', cptools.redirect)
+_d.autovary = Tool('on_start_resource', cptools.autovary, priority=0)
+_d.json_in = Tool('before_request_body', jsontools.json_in, priority=30)
+_d.json_out = Tool('before_handler', jsontools.json_out, priority=30)
+_d.auth_basic = Tool('before_handler', auth_basic.basic_auth, priority=1)
+_d.auth_digest = Tool('before_handler', auth_digest.digest_auth, priority=1)
+_d.params = Tool('before_handler', cptools.convert_params, priority=15)
+
+del _d, cptools, encoding, static
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/_cptree.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cptree.py
new file mode 100644
index 0000000000000000000000000000000000000000..917c5b1aa4cd308a79b5ed5d63f5abd4594ed7da
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cptree.py
@@ -0,0 +1,302 @@
+"""CherryPy Application and Tree objects."""
+
+import os
+
+import cherrypy
+from cherrypy import _cpconfig, _cplogging, _cprequest, _cpwsgi, tools
+from cherrypy.lib import httputil, reprconf
+
+
+class Application(object):
+ """A CherryPy Application.
+
+ Servers and gateways should not instantiate Request objects directly.
+ Instead, they should ask an Application object for a request object.
+
+ An instance of this class may also be used as a WSGI callable
+ (WSGI application object) for itself.
+ """
+
+ root = None
+ """The top-most container of page handlers for this app. Handlers should
+ be arranged in a hierarchy of attributes, matching the expected URI
+ hierarchy; the default dispatcher then searches this hierarchy for a
+ matching handler. When using a dispatcher other than the default,
+ this value may be None."""
+
+ config = {}
+ """A dict of {path: pathconf} pairs, where 'pathconf' is itself a dict
+ of {key: value} pairs."""
+
+ namespaces = reprconf.NamespaceSet()
+ toolboxes = {'tools': cherrypy.tools}
+
+ log = None
+ """A LogManager instance. See _cplogging."""
+
+ wsgiapp = None
+ """A CPWSGIApp instance. See _cpwsgi."""
+
+ request_class = _cprequest.Request
+ response_class = _cprequest.Response
+
+ relative_urls = False
+
+ def __init__(self, root, script_name='', config=None):
+ """Initialize Application with given root."""
+ self.log = _cplogging.LogManager(id(self), cherrypy.log.logger_root)
+ self.root = root
+ self.script_name = script_name
+ self.wsgiapp = _cpwsgi.CPWSGIApp(self)
+
+ self.namespaces = self.namespaces.copy()
+ self.namespaces['log'] = lambda k, v: setattr(self.log, k, v)
+ self.namespaces['wsgi'] = self.wsgiapp.namespace_handler
+
+ self.config = self.__class__.config.copy()
+ if config:
+ self.merge(config)
+
+ def __repr__(self):
+ """Generate a representation of the Application instance."""
+ return '%s.%s(%r, %r)' % (self.__module__, self.__class__.__name__,
+ self.root, self.script_name)
+
+ script_name_doc = """The URI "mount point" for this app. A mount point
+ is that portion of the URI which is constant for all URIs that are
+ serviced by this application; it does not include scheme, host, or proxy
+ ("virtual host") portions of the URI.
+
+ For example, if script_name is "/my/cool/app", then the URL
+ "http://www.example.com/my/cool/app/page1" might be handled by a
+ "page1" method on the root object.
+
+ The value of script_name MUST NOT end in a slash. If the script_name
+ refers to the root of the URI, it MUST be an empty string (not "/").
+
+ If script_name is explicitly set to None, then the script_name will be
+ provided for each call from request.wsgi_environ['SCRIPT_NAME'].
+ """
+
+ @property
+ def script_name(self): # noqa: D401; irrelevant for properties
+ """The URI "mount point" for this app.
+
+ A mount point is that portion of the URI which is constant for all URIs
+ that are serviced by this application; it does not include scheme,
+ host, or proxy ("virtual host") portions of the URI.
+
+ For example, if script_name is "/my/cool/app", then the URL
+ "http://www.example.com/my/cool/app/page1" might be handled by a
+ "page1" method on the root object.
+
+ The value of script_name MUST NOT end in a slash. If the script_name
+ refers to the root of the URI, it MUST be an empty string (not "/").
+
+ If script_name is explicitly set to None, then the script_name will be
+ provided for each call from request.wsgi_environ['SCRIPT_NAME'].
+ """
+ if self._script_name is not None:
+ return self._script_name
+
+ # A `_script_name` with a value of None signals that the script name
+ # should be pulled from WSGI environ.
+ return cherrypy.serving.request.wsgi_environ['SCRIPT_NAME'].rstrip('/')
+
+ @script_name.setter
+ def script_name(self, value):
+ if value:
+ value = value.rstrip('/')
+ self._script_name = value
+
+ def merge(self, config):
+ """Merge the given config into self.config."""
+ _cpconfig.merge(self.config, config)
+
+ # Handle namespaces specified in config.
+ self.namespaces(self.config.get('/', {}))
+
+ def find_config(self, path, key, default=None):
+ """Return the most-specific value for key along path, or default."""
+ trail = path or '/'
+ while trail:
+ nodeconf = self.config.get(trail, {})
+
+ if key in nodeconf:
+ return nodeconf[key]
+
+ lastslash = trail.rfind('/')
+ if lastslash == -1:
+ break
+ elif lastslash == 0 and trail != '/':
+ trail = '/'
+ else:
+ trail = trail[:lastslash]
+
+ return default
+
+ def get_serving(self, local, remote, scheme, sproto):
+ """Create and return a Request and Response object."""
+ req = self.request_class(local, remote, scheme, sproto)
+ req.app = self
+
+ for name, toolbox in self.toolboxes.items():
+ req.namespaces[name] = toolbox
+
+ resp = self.response_class()
+ cherrypy.serving.load(req, resp)
+ cherrypy.engine.publish('acquire_thread')
+ cherrypy.engine.publish('before_request')
+
+ return req, resp
+
+ def release_serving(self):
+ """Release the current serving (request and response)."""
+ req = cherrypy.serving.request
+
+ cherrypy.engine.publish('after_request')
+
+ try:
+ req.close()
+ except Exception:
+ cherrypy.log(traceback=True, severity=40)
+
+ cherrypy.serving.clear()
+
+ def __call__(self, environ, start_response):
+ """Call a WSGI-callable."""
+ return self.wsgiapp(environ, start_response)
+
+
+class Tree(object):
+ """A registry of CherryPy applications, mounted at diverse points.
+
+ An instance of this class may also be used as a WSGI callable
+ (WSGI application object), in which case it dispatches to all
+ mounted apps.
+ """
+
+ apps = {}
+ """
+ A dict of the form {script name: application}, where "script name"
+ is a string declaring the URI mount point (no trailing slash), and
+ "application" is an instance of cherrypy.Application (or an arbitrary
+ WSGI callable if you happen to be using a WSGI server)."""
+
+ def __init__(self):
+ """Initialize registry Tree."""
+ self.apps = {}
+
+ def mount(self, root, script_name='', config=None):
+ """Mount a new app from a root object, script_name, and config.
+
+ root
+ An instance of a "controller class" (a collection of page
+ handler methods) which represents the root of the application.
+ This may also be an Application instance, or None if using
+ a dispatcher other than the default.
+
+ script_name
+ A string containing the "mount point" of the application.
+ This should start with a slash, and be the path portion of the
+ URL at which to mount the given root. For example, if root.index()
+ will handle requests to "http://www.example.com:8080/dept/app1/",
+ then the script_name argument would be "/dept/app1".
+
+ It MUST NOT end in a slash. If the script_name refers to the
+ root of the URI, it MUST be an empty string (not "/").
+
+ config
+ A file or dict containing application config.
+ """
+ if script_name is None:
+ raise TypeError(
+ "The 'script_name' argument may not be None. Application "
+ 'objects may, however, possess a script_name of None (in '
+ 'order to inpect the WSGI environ for SCRIPT_NAME upon each '
+ 'request). You cannot mount such Applications on this Tree; '
+ 'you must pass them to a WSGI server interface directly.')
+
+ # Next line both 1) strips trailing slash and 2) maps "/" -> "".
+ script_name = script_name.rstrip('/')
+
+ if isinstance(root, Application):
+ app = root
+ if script_name != '' and script_name != app.script_name:
+ raise ValueError(
+ 'Cannot specify a different script name and pass an '
+ 'Application instance to cherrypy.mount')
+ script_name = app.script_name
+ else:
+ app = Application(root, script_name)
+
+ # If mounted at "", add favicon.ico
+ needs_favicon = (
+ script_name == ''
+ and root is not None
+ and not hasattr(root, 'favicon_ico')
+ )
+ if needs_favicon:
+ favicon = os.path.join(
+ os.getcwd(),
+ os.path.dirname(__file__),
+ 'favicon.ico',
+ )
+ root.favicon_ico = tools.staticfile.handler(favicon)
+
+ if config:
+ app.merge(config)
+
+ self.apps[script_name] = app
+
+ return app
+
+ def graft(self, wsgi_callable, script_name=''):
+ """Mount a wsgi callable at the given script_name."""
+ # Next line both 1) strips trailing slash and 2) maps "/" -> "".
+ script_name = script_name.rstrip('/')
+ self.apps[script_name] = wsgi_callable
+
+ def script_name(self, path=None):
+ """Return the script_name of the app at the given path, or None.
+
+ If path is None, cherrypy.request is used.
+ """
+ if path is None:
+ try:
+ request = cherrypy.serving.request
+ path = httputil.urljoin(request.script_name,
+ request.path_info)
+ except AttributeError:
+ return None
+
+ while True:
+ if path in self.apps:
+ return path
+
+ if path == '':
+ return None
+
+ # Move one node up the tree and try again.
+ path = path[:path.rfind('/')]
+
+ def __call__(self, environ, start_response):
+ """Pre-initialize WSGI env and call WSGI-callable."""
+ # If you're calling this, then you're probably setting SCRIPT_NAME
+ # to '' (some WSGI servers always set SCRIPT_NAME to '').
+ # Try to look up the app using the full path.
+ env1x = environ
+ path = httputil.urljoin(env1x.get('SCRIPT_NAME', ''),
+ env1x.get('PATH_INFO', ''))
+ sn = self.script_name(path or '/')
+ if sn is None:
+ start_response('404 Not Found', [])
+ return []
+
+ app = self.apps[sn]
+
+ # Correct the SCRIPT_NAME and PATH_INFO environ entries.
+ environ = environ.copy()
+ environ['SCRIPT_NAME'] = sn
+ environ['PATH_INFO'] = path[len(sn.rstrip('/')):]
+ return app(environ, start_response)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpwsgi.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpwsgi.py
new file mode 100644
index 0000000000000000000000000000000000000000..b4f55fd6ad169b64b7c7cd23b007885e022c7360
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpwsgi.py
@@ -0,0 +1,451 @@
+"""WSGI interface (see PEP 333 and 3333).
+
+Note that WSGI environ keys and values are 'native strings'; that is,
+whatever the type of "" is. For Python 2, that's a byte string; for Python 3,
+it's a unicode string. But PEP 3333 says: "even if Python's str type is
+actually Unicode "under the hood", the content of native strings must
+still be translatable to bytes via the Latin-1 encoding!"
+"""
+
+import sys as _sys
+import io
+
+import cherrypy as _cherrypy
+from cherrypy._cpcompat import ntou
+from cherrypy import _cperror
+from cherrypy.lib import httputil
+from cherrypy.lib import is_closable_iterator
+
+
+def downgrade_wsgi_ux_to_1x(environ):
+ """Return a new environ dict for WSGI 1.x from the given WSGI u.x environ.
+ """
+ env1x = {}
+
+ url_encoding = environ[ntou('wsgi.url_encoding')]
+ for k, v in environ.copy().items():
+ if k in [ntou('PATH_INFO'), ntou('SCRIPT_NAME'), ntou('QUERY_STRING')]:
+ v = v.encode(url_encoding)
+ elif isinstance(v, str):
+ v = v.encode('ISO-8859-1')
+ env1x[k.encode('ISO-8859-1')] = v
+
+ return env1x
+
+
+class VirtualHost(object):
+
+ """Select a different WSGI application based on the Host header.
+
+ This can be useful when running multiple sites within one CP server.
+ It allows several domains to point to different applications. For example::
+
+ root = Root()
+ RootApp = cherrypy.Application(root)
+ Domain2App = cherrypy.Application(root)
+ SecureApp = cherrypy.Application(Secure())
+
+ vhost = cherrypy._cpwsgi.VirtualHost(
+ RootApp,
+ domains={
+ 'www.domain2.example': Domain2App,
+ 'www.domain2.example:443': SecureApp,
+ },
+ )
+
+ cherrypy.tree.graft(vhost)
+ """
+ default = None
+ """Required. The default WSGI application."""
+
+ use_x_forwarded_host = True
+ """If True (the default), any "X-Forwarded-Host"
+ request header will be used instead of the "Host" header. This
+ is commonly added by HTTP servers (such as Apache) when proxying."""
+
+ domains = {}
+ """A dict of {host header value: application} pairs.
+ The incoming "Host" request header is looked up in this dict,
+ and, if a match is found, the corresponding WSGI application
+ will be called instead of the default. Note that you often need
+ separate entries for "example.com" and "www.example.com".
+ In addition, "Host" headers may contain the port number.
+ """
+
+ def __init__(self, default, domains=None, use_x_forwarded_host=True):
+ self.default = default
+ self.domains = domains or {}
+ self.use_x_forwarded_host = use_x_forwarded_host
+
+ def __call__(self, environ, start_response):
+ domain = environ.get('HTTP_HOST', '')
+ if self.use_x_forwarded_host:
+ domain = environ.get('HTTP_X_FORWARDED_HOST', domain)
+
+ nextapp = self.domains.get(domain)
+ if nextapp is None:
+ nextapp = self.default
+ return nextapp(environ, start_response)
+
+
+class InternalRedirector(object):
+
+ """WSGI middleware that handles raised cherrypy.InternalRedirect."""
+
+ def __init__(self, nextapp, recursive=False):
+ self.nextapp = nextapp
+ self.recursive = recursive
+
+ def __call__(self, environ, start_response):
+ redirections = []
+ while True:
+ environ = environ.copy()
+ try:
+ return self.nextapp(environ, start_response)
+ except _cherrypy.InternalRedirect:
+ ir = _sys.exc_info()[1]
+ sn = environ.get('SCRIPT_NAME', '')
+ path = environ.get('PATH_INFO', '')
+ qs = environ.get('QUERY_STRING', '')
+
+ # Add the *previous* path_info + qs to redirections.
+ old_uri = sn + path
+ if qs:
+ old_uri += '?' + qs
+ redirections.append(old_uri)
+
+ if not self.recursive:
+ # Check to see if the new URI has been redirected to
+ # already
+ new_uri = sn + ir.path
+ if ir.query_string:
+ new_uri += '?' + ir.query_string
+ if new_uri in redirections:
+ ir.request.close()
+ tmpl = (
+ 'InternalRedirector visited the same URL twice: %r'
+ )
+ raise RuntimeError(tmpl % new_uri)
+
+ # Munge the environment and try again.
+ environ['REQUEST_METHOD'] = 'GET'
+ environ['PATH_INFO'] = ir.path
+ environ['QUERY_STRING'] = ir.query_string
+ environ['wsgi.input'] = io.BytesIO()
+ environ['CONTENT_LENGTH'] = '0'
+ environ['cherrypy.previous_request'] = ir.request
+
+
+class ExceptionTrapper(object):
+
+ """WSGI middleware that traps exceptions."""
+
+ def __init__(self, nextapp, throws=(KeyboardInterrupt, SystemExit)):
+ self.nextapp = nextapp
+ self.throws = throws
+
+ def __call__(self, environ, start_response):
+ return _TrappedResponse(
+ self.nextapp,
+ environ,
+ start_response,
+ self.throws
+ )
+
+
+class _TrappedResponse(object):
+
+ response = iter([])
+
+ def __init__(self, nextapp, environ, start_response, throws):
+ self.nextapp = nextapp
+ self.environ = environ
+ self.start_response = start_response
+ self.throws = throws
+ self.started_response = False
+ self.response = self.trap(
+ self.nextapp, self.environ, self.start_response,
+ )
+ self.iter_response = iter(self.response)
+
+ def __iter__(self):
+ self.started_response = True
+ return self
+
+ def __next__(self):
+ return self.trap(next, self.iter_response)
+
+ def close(self):
+ if hasattr(self.response, 'close'):
+ self.response.close()
+
+ def trap(self, func, *args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except self.throws:
+ raise
+ except StopIteration:
+ raise
+ except Exception:
+ tb = _cperror.format_exc()
+ _cherrypy.log(tb, severity=40)
+ if not _cherrypy.request.show_tracebacks:
+ tb = ''
+ s, h, b = _cperror.bare_error(tb)
+ if True:
+ # What fun.
+ s = s.decode('ISO-8859-1')
+ h = [
+ (k.decode('ISO-8859-1'), v.decode('ISO-8859-1'))
+ for k, v in h
+ ]
+ if self.started_response:
+ # Empty our iterable (so future calls raise StopIteration)
+ self.iter_response = iter([])
+ else:
+ self.iter_response = iter(b)
+
+ try:
+ self.start_response(s, h, _sys.exc_info())
+ except Exception:
+ # "The application must not trap any exceptions raised by
+ # start_response, if it called start_response with exc_info.
+ # Instead, it should allow such exceptions to propagate
+ # back to the server or gateway."
+ # But we still log and call close() to clean up ourselves.
+ _cherrypy.log(traceback=True, severity=40)
+ raise
+
+ if self.started_response:
+ return b''.join(b)
+ else:
+ return b
+
+
+# WSGI-to-CP Adapter #
+
+
+class AppResponse(object):
+
+ """WSGI response iterable for CherryPy applications."""
+
+ def __init__(self, environ, start_response, cpapp):
+ self.cpapp = cpapp
+ try:
+ self.environ = environ
+ self.run()
+
+ r = _cherrypy.serving.response
+
+ outstatus = r.output_status
+ if not isinstance(outstatus, bytes):
+ raise TypeError('response.output_status is not a byte string.')
+
+ outheaders = []
+ for k, v in r.header_list:
+ if not isinstance(k, bytes):
+ tmpl = 'response.header_list key %r is not a byte string.'
+ raise TypeError(tmpl % k)
+ if not isinstance(v, bytes):
+ tmpl = (
+ 'response.header_list value %r is not a byte string.'
+ )
+ raise TypeError(tmpl % v)
+ outheaders.append((k, v))
+
+ if True:
+ # According to PEP 3333, when using Python 3, the response
+ # status and headers must be bytes masquerading as unicode;
+ # that is, they must be of type "str" but are restricted to
+ # code points in the "latin-1" set.
+ outstatus = outstatus.decode('ISO-8859-1')
+ outheaders = [
+ (k.decode('ISO-8859-1'), v.decode('ISO-8859-1'))
+ for k, v in outheaders
+ ]
+
+ self.iter_response = iter(r.body)
+ self.write = start_response(outstatus, outheaders)
+ except BaseException:
+ self.close()
+ raise
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ return next(self.iter_response)
+
+ def close(self):
+ """Close and de-reference the current request and response. (Core)"""
+ streaming = _cherrypy.serving.response.stream
+ self.cpapp.release_serving()
+
+ # We avoid the expense of examining the iterator to see if it's
+ # closable unless we are streaming the response, as that's the
+ # only situation where we are going to have an iterator which
+ # may not have been exhausted yet.
+ if streaming and is_closable_iterator(self.iter_response):
+ iter_close = self.iter_response.close
+ try:
+ iter_close()
+ except Exception:
+ _cherrypy.log(traceback=True, severity=40)
+
+ def run(self):
+ """Create a Request object using environ."""
+ env = self.environ.get
+
+ local = httputil.Host(
+ '',
+ int(env('SERVER_PORT', 80) or -1),
+ env('SERVER_NAME', ''),
+ )
+ remote = httputil.Host(
+ env('REMOTE_ADDR', ''),
+ int(env('REMOTE_PORT', -1) or -1),
+ env('REMOTE_HOST', ''),
+ )
+ scheme = env('wsgi.url_scheme')
+ sproto = env('ACTUAL_SERVER_PROTOCOL', 'HTTP/1.1')
+ request, resp = self.cpapp.get_serving(local, remote, scheme, sproto)
+
+ # LOGON_USER is served by IIS, and is the name of the
+ # user after having been mapped to a local account.
+ # Both IIS and Apache set REMOTE_USER, when possible.
+ request.login = env('LOGON_USER') or env('REMOTE_USER') or None
+ request.multithread = self.environ['wsgi.multithread']
+ request.multiprocess = self.environ['wsgi.multiprocess']
+ request.wsgi_environ = self.environ
+ request.prev = env('cherrypy.previous_request', None)
+
+ meth = self.environ['REQUEST_METHOD']
+
+ path = httputil.urljoin(
+ self.environ.get('SCRIPT_NAME', ''),
+ self.environ.get('PATH_INFO', ''),
+ )
+ qs = self.environ.get('QUERY_STRING', '')
+
+ path, qs = self.recode_path_qs(path, qs) or (path, qs)
+
+ rproto = self.environ.get('SERVER_PROTOCOL')
+ headers = self.translate_headers(self.environ)
+ rfile = self.environ['wsgi.input']
+ request.run(meth, path, qs, rproto, headers, rfile)
+
+ headerNames = {
+ 'HTTP_CGI_AUTHORIZATION': 'Authorization',
+ 'CONTENT_LENGTH': 'Content-Length',
+ 'CONTENT_TYPE': 'Content-Type',
+ 'REMOTE_HOST': 'Remote-Host',
+ 'REMOTE_ADDR': 'Remote-Addr',
+ }
+
+ def recode_path_qs(self, path, qs):
+ # This isn't perfect; if the given PATH_INFO is in the
+ # wrong encoding, it may fail to match the appropriate config
+ # section URI. But meh.
+ old_enc = self.environ.get('wsgi.url_encoding', 'ISO-8859-1')
+ new_enc = self.cpapp.find_config(
+ self.environ.get('PATH_INFO', ''),
+ 'request.uri_encoding', 'utf-8',
+ )
+ if new_enc.lower() == old_enc.lower():
+ return
+
+ # Even though the path and qs are unicode, the WSGI server
+ # is required by PEP 3333 to coerce them to ISO-8859-1
+ # masquerading as unicode. So we have to encode back to
+ # bytes and then decode again using the "correct" encoding.
+ try:
+ return (
+ path.encode(old_enc).decode(new_enc),
+ qs.encode(old_enc).decode(new_enc),
+ )
+ except (UnicodeEncodeError, UnicodeDecodeError):
+ # Just pass them through without transcoding and hope.
+ pass
+
+ def translate_headers(self, environ):
+ """Translate CGI-environ header names to HTTP header names."""
+ for cgiName in environ:
+ # We assume all incoming header keys are uppercase already.
+ if cgiName in self.headerNames:
+ yield self.headerNames[cgiName], environ[cgiName]
+ elif cgiName[:5] == 'HTTP_':
+ # Hackish attempt at recovering original header names.
+ translatedHeader = cgiName[5:].replace('_', '-')
+ yield translatedHeader, environ[cgiName]
+
+
+class CPWSGIApp(object):
+
+ """A WSGI application object for a CherryPy Application."""
+
+ pipeline = [
+ ('ExceptionTrapper', ExceptionTrapper),
+ ('InternalRedirector', InternalRedirector),
+ ]
+ """A list of (name, wsgiapp) pairs. Each 'wsgiapp' MUST be a
+ constructor that takes an initial, positional 'nextapp' argument,
+ plus optional keyword arguments, and returns a WSGI application
+ (that takes environ and start_response arguments). The 'name' can
+ be any you choose, and will correspond to keys in self.config."""
+
+ head = None
+ """Rather than nest all apps in the pipeline on each call, it's only
+ done the first time, and the result is memoized into self.head. Set
+ this to None again if you change self.pipeline after calling self."""
+
+ config = {}
+ """A dict whose keys match names listed in the pipeline. Each
+ value is a further dict which will be passed to the corresponding
+ named WSGI callable (from the pipeline) as keyword arguments."""
+
+ response_class = AppResponse
+ """The class to instantiate and return as the next app in the WSGI chain.
+ """
+
+ def __init__(self, cpapp, pipeline=None):
+ self.cpapp = cpapp
+ self.pipeline = self.pipeline[:]
+ if pipeline:
+ self.pipeline.extend(pipeline)
+ self.config = self.config.copy()
+
+ def tail(self, environ, start_response):
+ """WSGI application callable for the actual CherryPy application.
+
+ You probably shouldn't call this; call self.__call__ instead,
+ so that any WSGI middleware in self.pipeline can run first.
+ """
+ return self.response_class(environ, start_response, self.cpapp)
+
+ def __call__(self, environ, start_response):
+ head = self.head
+ if head is None:
+ # Create and nest the WSGI apps in our pipeline (in reverse order).
+ # Then memoize the result in self.head.
+ head = self.tail
+ for name, callable in self.pipeline[::-1]:
+ conf = self.config.get(name, {})
+ head = callable(head, **conf)
+ self.head = head
+ return head(environ, start_response)
+
+ def namespace_handler(self, k, v):
+ """Config handler for the 'wsgi' namespace."""
+ if k == 'pipeline':
+ # Note this allows multiple 'wsgi.pipeline' config entries
+ # (but each entry will be processed in a 'random' order).
+ # It should also allow developers to set default middleware
+ # in code (passed to self.__init__) that deployers can add to
+ # (but not remove) via config.
+ self.pipeline.extend(v)
+ elif k == 'response_class':
+ self.response_class = v
+ else:
+ name, arg = k.split('.', 1)
+ bucket = self.config.setdefault(name, {})
+ bucket[arg] = v
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpwsgi_server.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpwsgi_server.py
new file mode 100644
index 0000000000000000000000000000000000000000..11dd846af4dec04c1ec6800cf9d4607d753e297c
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/_cpwsgi_server.py
@@ -0,0 +1,110 @@
+"""
+WSGI server interface (see PEP 333).
+
+This adds some CP-specific bits to the framework-agnostic cheroot package.
+"""
+import sys
+
+import cheroot.wsgi
+import cheroot.server
+
+import cherrypy
+
+
+class CPWSGIHTTPRequest(cheroot.server.HTTPRequest):
+ """Wrapper for cheroot.server.HTTPRequest.
+
+ This is a layer, which preserves URI parsing mode like it which was
+ before Cheroot v5.8.0.
+ """
+
+ def __init__(self, server, conn):
+ """Initialize HTTP request container instance.
+
+ Args:
+ server (cheroot.server.HTTPServer):
+ web server object receiving this request
+ conn (cheroot.server.HTTPConnection):
+ HTTP connection object for this request
+ """
+ super(CPWSGIHTTPRequest, self).__init__(
+ server, conn, proxy_mode=True
+ )
+
+
+class CPWSGIServer(cheroot.wsgi.Server):
+ """Wrapper for cheroot.wsgi.Server.
+
+ cheroot has been designed to not reference CherryPy in any way,
+ so that it can be used in other frameworks and applications. Therefore,
+ we wrap it here, so we can set our own mount points from cherrypy.tree
+ and apply some attributes from config -> cherrypy.server -> wsgi.Server.
+ """
+
+ fmt = 'CherryPy/{cherrypy.__version__} {cheroot.wsgi.Server.version}'
+ version = fmt.format(**globals())
+
+ def __init__(self, server_adapter=cherrypy.server):
+ """Initialize CPWSGIServer instance.
+
+ Args:
+ server_adapter (cherrypy._cpserver.Server): ...
+ """
+ self.server_adapter = server_adapter
+ self.max_request_header_size = (
+ self.server_adapter.max_request_header_size or 0
+ )
+ self.max_request_body_size = (
+ self.server_adapter.max_request_body_size or 0
+ )
+
+ server_name = (self.server_adapter.socket_host or
+ self.server_adapter.socket_file or
+ None)
+
+ self.wsgi_version = self.server_adapter.wsgi_version
+
+ super(CPWSGIServer, self).__init__(
+ server_adapter.bind_addr, cherrypy.tree,
+ self.server_adapter.thread_pool,
+ server_name,
+ max=self.server_adapter.thread_pool_max,
+ request_queue_size=self.server_adapter.socket_queue_size,
+ timeout=self.server_adapter.socket_timeout,
+ shutdown_timeout=self.server_adapter.shutdown_timeout,
+ accepted_queue_size=self.server_adapter.accepted_queue_size,
+ accepted_queue_timeout=self.server_adapter.accepted_queue_timeout,
+ peercreds_enabled=self.server_adapter.peercreds,
+ peercreds_resolve_enabled=self.server_adapter.peercreds_resolve,
+ )
+ self.ConnectionClass.RequestHandlerClass = CPWSGIHTTPRequest
+
+ self.protocol = self.server_adapter.protocol_version
+ self.nodelay = self.server_adapter.nodelay
+
+ if sys.version_info >= (3, 0):
+ ssl_module = self.server_adapter.ssl_module or 'builtin'
+ else:
+ ssl_module = self.server_adapter.ssl_module or 'pyopenssl'
+ if self.server_adapter.ssl_context:
+ adapter_class = cheroot.server.get_ssl_adapter_class(ssl_module)
+ self.ssl_adapter = adapter_class(
+ self.server_adapter.ssl_certificate,
+ self.server_adapter.ssl_private_key,
+ self.server_adapter.ssl_certificate_chain,
+ self.server_adapter.ssl_ciphers)
+ self.ssl_adapter.context = self.server_adapter.ssl_context
+ elif self.server_adapter.ssl_certificate:
+ adapter_class = cheroot.server.get_ssl_adapter_class(ssl_module)
+ self.ssl_adapter = adapter_class(
+ self.server_adapter.ssl_certificate,
+ self.server_adapter.ssl_private_key,
+ self.server_adapter.ssl_certificate_chain,
+ self.server_adapter.ssl_ciphers)
+
+ self.stats['Enabled'] = getattr(
+ self.server_adapter, 'statistics', False)
+
+ def error_log(self, msg='', level=20, traceback=False):
+ """Write given message to the error log."""
+ cherrypy.engine.log(msg, level, traceback)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/_helper.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..d57cd1f962e6323bce518cd1d586f55c00ad0c02
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/_helper.py
@@ -0,0 +1,348 @@
+"""Helper functions for CP apps."""
+
+import urllib.parse
+
+from cherrypy._cpcompat import text_or_bytes
+
+import cherrypy
+
+
+def expose(func=None, alias=None):
+ """Expose the function or class.
+
+ Optionally provide an alias or set of aliases.
+ """
+ def expose_(func):
+ func.exposed = True
+ if alias is not None:
+ if isinstance(alias, text_or_bytes):
+ parents[alias.replace('.', '_')] = func
+ else:
+ for a in alias:
+ parents[a.replace('.', '_')] = func
+ return func
+
+ import sys
+ import types
+ decoratable_types = types.FunctionType, types.MethodType, type,
+ if isinstance(func, decoratable_types):
+ if alias is None:
+ # @expose
+ func.exposed = True
+ return func
+ else:
+ # func = expose(func, alias)
+ parents = sys._getframe(1).f_locals
+ return expose_(func)
+ elif func is None:
+ if alias is None:
+ # @expose()
+ parents = sys._getframe(1).f_locals
+ return expose_
+ else:
+ # @expose(alias="alias") or
+ # @expose(alias=["alias1", "alias2"])
+ parents = sys._getframe(1).f_locals
+ return expose_
+ else:
+ # @expose("alias") or
+ # @expose(["alias1", "alias2"])
+ parents = sys._getframe(1).f_locals
+ alias = func
+ return expose_
+
+
+def popargs(*args, **kwargs):
+ """Decorate _cp_dispatch.
+
+ (cherrypy.dispatch.Dispatcher.dispatch_method_name)
+
+ Optional keyword argument: handler=(Object or Function)
+
+ Provides a _cp_dispatch function that pops off path segments into
+ cherrypy.request.params under the names specified. The dispatch
+ is then forwarded on to the next vpath element.
+
+ Note that any existing (and exposed) member function of the class that
+ popargs is applied to will override that value of the argument. For
+ instance, if you have a method named "list" on the class decorated with
+ popargs, then accessing "/list" will call that function instead of popping
+ it off as the requested parameter. This restriction applies to all
+ _cp_dispatch functions. The only way around this restriction is to create
+ a "blank class" whose only function is to provide _cp_dispatch.
+
+ If there are path elements after the arguments, or more arguments
+ are requested than are available in the vpath, then the 'handler'
+ keyword argument specifies the next object to handle the parameterized
+ request. If handler is not specified or is None, then self is used.
+ If handler is a function rather than an instance, then that function
+ will be called with the args specified and the return value from that
+ function used as the next object INSTEAD of adding the parameters to
+ cherrypy.request.args.
+
+ This decorator may be used in one of two ways:
+
+ As a class decorator:
+
+ .. code-block:: python
+
+ @cherrypy.popargs('year', 'month', 'day')
+ class Blog:
+ def index(self, year=None, month=None, day=None):
+ #Process the parameters here; any url like
+ #/, /2009, /2009/12, or /2009/12/31
+ #will fill in the appropriate parameters.
+
+ def create(self):
+ #This link will still be available at /create.
+ #Defined functions take precedence over arguments.
+
+ Or as a member of a class:
+
+ .. code-block:: python
+
+ class Blog:
+ _cp_dispatch = cherrypy.popargs('year', 'month', 'day')
+ #...
+
+ The handler argument may be used to mix arguments with built in functions.
+ For instance, the following setup allows different activities at the
+ day, month, and year level:
+
+ .. code-block:: python
+
+ class DayHandler:
+ def index(self, year, month, day):
+ #Do something with this day; probably list entries
+
+ def delete(self, year, month, day):
+ #Delete all entries for this day
+
+ @cherrypy.popargs('day', handler=DayHandler())
+ class MonthHandler:
+ def index(self, year, month):
+ #Do something with this month; probably list entries
+
+ def delete(self, year, month):
+ #Delete all entries for this month
+
+ @cherrypy.popargs('month', handler=MonthHandler())
+ class YearHandler:
+ def index(self, year):
+ #Do something with this year
+
+ #...
+
+ @cherrypy.popargs('year', handler=YearHandler())
+ class Root:
+ def index(self):
+ #...
+
+ """
+ # Since keyword arg comes after *args, we have to process it ourselves
+ # for lower versions of python.
+
+ handler = None
+ handler_call = False
+ for k, v in kwargs.items():
+ if k == 'handler':
+ handler = v
+ else:
+ tm = "cherrypy.popargs() got an unexpected keyword argument '{0}'"
+ raise TypeError(tm.format(k))
+
+ import inspect
+
+ if handler is not None \
+ and (hasattr(handler, '__call__') or inspect.isclass(handler)):
+ handler_call = True
+
+ def decorated(cls_or_self=None, vpath=None):
+ if inspect.isclass(cls_or_self):
+ # cherrypy.popargs is a class decorator
+ cls = cls_or_self
+ name = cherrypy.dispatch.Dispatcher.dispatch_method_name
+ setattr(cls, name, decorated)
+ return cls
+
+ # We're in the actual function
+ self = cls_or_self
+ parms = {}
+ for arg in args:
+ if not vpath:
+ break
+ parms[arg] = vpath.pop(0)
+
+ if handler is not None:
+ if handler_call:
+ return handler(**parms)
+ else:
+ cherrypy.request.params.update(parms)
+ return handler
+
+ cherrypy.request.params.update(parms)
+
+ # If we are the ultimate handler, then to prevent our _cp_dispatch
+ # from being called again, we will resolve remaining elements through
+ # getattr() directly.
+ if vpath:
+ return getattr(self, vpath.pop(0), None)
+ else:
+ return self
+
+ return decorated
+
+
+def url(path='', qs='', script_name=None, base=None, relative=None):
+ """Create an absolute URL for the given path.
+
+ If 'path' starts with a slash ('/'), this will return
+ (base + script_name + path + qs).
+ If it does not start with a slash, this returns
+ (base + script_name [+ request.path_info] + path + qs).
+
+ If script_name is None, cherrypy.request will be used
+ to find a script_name, if available.
+
+ If base is None, cherrypy.request.base will be used (if available).
+ Note that you can use cherrypy.tools.proxy to change this.
+
+ Finally, note that this function can be used to obtain an absolute URL
+ for the current request path (minus the querystring) by passing no args.
+ If you call url(qs=cherrypy.request.query_string), you should get the
+ original browser URL (assuming no internal redirections).
+
+ If relative is None or not provided, request.app.relative_urls will
+ be used (if available, else False). If False, the output will be an
+ absolute URL (including the scheme, host, vhost, and script_name).
+ If True, the output will instead be a URL that is relative to the
+ current request path, perhaps including '..' atoms. If relative is
+ the string 'server', the output will instead be a URL that is
+ relative to the server root; i.e., it will start with a slash.
+ """
+ if isinstance(qs, (tuple, list, dict)):
+ qs = urllib.parse.urlencode(qs)
+ if qs:
+ qs = '?' + qs
+
+ if cherrypy.request.app:
+ if not path.startswith('/'):
+ # Append/remove trailing slash from path_info as needed
+ # (this is to support mistyped URL's without redirecting;
+ # if you want to redirect, use tools.trailing_slash).
+ pi = cherrypy.request.path_info
+ if cherrypy.request.is_index is True:
+ if not pi.endswith('/'):
+ pi = pi + '/'
+ elif cherrypy.request.is_index is False:
+ if pi.endswith('/') and pi != '/':
+ pi = pi[:-1]
+
+ if path == '':
+ path = pi
+ else:
+ path = urllib.parse.urljoin(pi, path)
+
+ if script_name is None:
+ script_name = cherrypy.request.script_name
+ if base is None:
+ base = cherrypy.request.base
+
+ newurl = base + script_name + normalize_path(path) + qs
+ else:
+ # No request.app (we're being called outside a request).
+ # We'll have to guess the base from server.* attributes.
+ # This will produce very different results from the above
+ # if you're using vhosts or tools.proxy.
+ if base is None:
+ base = cherrypy.server.base()
+
+ path = (script_name or '') + path
+ newurl = base + normalize_path(path) + qs
+
+ # At this point, we should have a fully-qualified absolute URL.
+
+ if relative is None:
+ relative = getattr(cherrypy.request.app, 'relative_urls', False)
+
+ # See http://www.ietf.org/rfc/rfc2396.txt
+ if relative == 'server':
+ # "A relative reference beginning with a single slash character is
+ # termed an absolute-path reference, as defined by <abs_path>..."
+ # This is also sometimes called "server-relative".
+ newurl = '/' + '/'.join(newurl.split('/', 3)[3:])
+ elif relative:
+ # "A relative reference that does not begin with a scheme name
+ # or a slash character is termed a relative-path reference."
+ old = url(relative=False).split('/')[:-1]
+ new = newurl.split('/')
+ while old and new:
+ a, b = old[0], new[0]
+ if a != b:
+ break
+ old.pop(0)
+ new.pop(0)
+ new = (['..'] * len(old)) + new
+ newurl = '/'.join(new)
+
+ return newurl
+
+
+def normalize_path(path):
+ """Resolve given path from relative into absolute form."""
+ if './' not in path:
+ return path
+
+ # Normalize the URL by removing ./ and ../
+ atoms = []
+ for atom in path.split('/'):
+ if atom == '.':
+ pass
+ elif atom == '..':
+ # Don't pop from empty list
+ # (i.e. ignore redundant '..')
+ if atoms:
+ atoms.pop()
+ elif atom:
+ atoms.append(atom)
+
+ newpath = '/'.join(atoms)
+ # Preserve leading '/'
+ if path.startswith('/'):
+ newpath = '/' + newpath
+
+ return newpath
+
+
+####
+# Inlined from jaraco.classes 1.4.3
+# Ref #1673
+class _ClassPropertyDescriptor(object):
+ """Descript for read-only class-based property.
+
+ Turns a classmethod-decorated func into a read-only property of that class
+ type (means the value cannot be set).
+ """
+
+ def __init__(self, fget, fset=None):
+ """Initialize a class property descriptor.
+
+ Instantiated by ``_helper.classproperty``.
+ """
+ self.fget = fget
+ self.fset = fset
+
+ def __get__(self, obj, klass=None):
+ """Return property value."""
+ if klass is None:
+ klass = type(obj)
+ return self.fget.__get__(obj, klass)()
+
+
+def classproperty(func): # noqa: D401; irrelevant for properties
+ """Decorator like classmethod to implement a static class property."""
+ if not isinstance(func, (classmethod, staticmethod)):
+ func = classmethod(func)
+
+ return _ClassPropertyDescriptor(func)
+####
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/_json.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/_json.py
new file mode 100644
index 0000000000000000000000000000000000000000..0c2a0f0e0a127292b37bfb70cd184b8fd25fc2dd
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/_json.py
@@ -0,0 +1,25 @@
+"""
+JSON support.
+
+Expose preferred json module as json and provide encode/decode
+convenience functions.
+"""
+
+try:
+ # Prefer simplejson
+ import simplejson as json
+except ImportError:
+ import json
+
+
+__all__ = ['json', 'encode', 'decode']
+
+
+decode = json.JSONDecoder().decode
+_encode = json.JSONEncoder().iterencode
+
+
+def encode(value):
+ """Encode to bytes."""
+ for chunk in _encode(value):
+ yield chunk.encode('utf-8')
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/daemon.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/daemon.py
new file mode 100644
index 0000000000000000000000000000000000000000..74488c06b565e0ea3a2939c66634ac4a23dd5b0f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/daemon.py
@@ -0,0 +1,107 @@
+"""The CherryPy daemon."""
+
+import sys
+
+import cherrypy
+from cherrypy.process import plugins, servers
+from cherrypy import Application
+
+
+def start(configfiles=None, daemonize=False, environment=None,
+ fastcgi=False, scgi=False, pidfile=None, imports=None,
+ cgi=False):
+ """Subscribe all engine plugins and start the engine."""
+ sys.path = [''] + sys.path
+ for i in imports or []:
+ exec('import %s' % i)
+
+ for c in configfiles or []:
+ cherrypy.config.update(c)
+ # If there's only one app mounted, merge config into it.
+ if len(cherrypy.tree.apps) == 1:
+ for app in cherrypy.tree.apps.values():
+ if isinstance(app, Application):
+ app.merge(c)
+
+ engine = cherrypy.engine
+
+ if environment is not None:
+ cherrypy.config.update({'environment': environment})
+
+ # Only daemonize if asked to.
+ if daemonize:
+ # Don't print anything to stdout/sterr.
+ cherrypy.config.update({'log.screen': False})
+ plugins.Daemonizer(engine).subscribe()
+
+ if pidfile:
+ plugins.PIDFile(engine, pidfile).subscribe()
+
+ if hasattr(engine, 'signal_handler'):
+ engine.signal_handler.subscribe()
+ if hasattr(engine, 'console_control_handler'):
+ engine.console_control_handler.subscribe()
+
+ if (fastcgi and (scgi or cgi)) or (scgi and cgi):
+ cherrypy.log.error('You may only specify one of the cgi, fastcgi, and '
+ 'scgi options.', 'ENGINE')
+ sys.exit(1)
+ elif fastcgi or scgi or cgi:
+ # Turn off autoreload when using *cgi.
+ cherrypy.config.update({'engine.autoreload.on': False})
+ # Turn off the default HTTP server (which is subscribed by default).
+ cherrypy.server.unsubscribe()
+
+ addr = cherrypy.server.bind_addr
+ cls = (
+ servers.FlupFCGIServer if fastcgi else
+ servers.FlupSCGIServer if scgi else
+ servers.FlupCGIServer
+ )
+ f = cls(application=cherrypy.tree, bindAddress=addr)
+ s = servers.ServerAdapter(engine, httpserver=f, bind_addr=addr)
+ s.subscribe()
+
+ # Always start the engine; this will start all other services
+ try:
+ engine.start()
+ except Exception:
+ # Assume the error has been logged already via bus.log.
+ sys.exit(1)
+ else:
+ engine.block()
+
+
+def run():
+ """Run cherryd CLI."""
+ from optparse import OptionParser
+
+ p = OptionParser()
+ p.add_option('-c', '--config', action='append', dest='config',
+ help='specify config file(s)')
+ p.add_option('-d', action='store_true', dest='daemonize',
+ help='run the server as a daemon')
+ p.add_option('-e', '--environment', dest='environment', default=None,
+ help='apply the given config environment')
+ p.add_option('-f', action='store_true', dest='fastcgi',
+ help='start a fastcgi server instead of the default HTTP '
+ 'server')
+ p.add_option('-s', action='store_true', dest='scgi',
+ help='start a scgi server instead of the default HTTP server')
+ p.add_option('-x', action='store_true', dest='cgi',
+ help='start a cgi server instead of the default HTTP server')
+ p.add_option('-i', '--import', action='append', dest='imports',
+ help='specify modules to import')
+ p.add_option('-p', '--pidfile', dest='pidfile', default=None,
+ help='store the process id in the given file')
+ p.add_option('-P', '--Path', action='append', dest='Path',
+ help='add the given paths to sys.path')
+ options, args = p.parse_args()
+
+ if options.Path:
+ for p in options.Path:
+ sys.path.insert(0, p)
+
+ start(options.config, options.daemonize,
+ options.environment, options.fastcgi, options.scgi,
+ options.pidfile, options.imports, options.cgi)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/favicon.ico b/monEnvTP/lib/python3.8/site-packages/cherrypy/favicon.ico
new file mode 100644
index 0000000000000000000000000000000000000000..f0d7e61badad3f332cf1e663efb97c0b5be80f5e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/favicon.ico differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__init__.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..0edaaf20c73864b3242fdb6243b4d02b9283cf6f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__init__.py
@@ -0,0 +1,101 @@
+"""CherryPy Library."""
+
+
+def is_iterator(obj):
+ """Detect if the object provided implements the iterator protocol.
+
+ (i.e. like a generator).
+
+ This will return False for objects which are iterable,
+ but not iterators themselves.
+ """
+ from types import GeneratorType
+ if isinstance(obj, GeneratorType):
+ return True
+ elif not hasattr(obj, '__iter__'):
+ return False
+ else:
+ # Types which implement the protocol must return themselves when
+ # invoking 'iter' upon them.
+ return iter(obj) is obj
+
+
+def is_closable_iterator(obj):
+ """Detect if the given object is both closable and iterator."""
+ # Not an iterator.
+ if not is_iterator(obj):
+ return False
+
+ # A generator - the easiest thing to deal with.
+ import inspect
+ if inspect.isgenerator(obj):
+ return True
+
+ # A custom iterator. Look for a close method...
+ if not (hasattr(obj, 'close') and callable(obj.close)):
+ return False
+
+ # ... which doesn't require any arguments.
+ try:
+ inspect.getcallargs(obj.close)
+ except TypeError:
+ return False
+ else:
+ return True
+
+
+class file_generator(object):
+ """Yield the given input (a file object) in chunks (default 64k).
+
+ (Core)
+ """
+
+ def __init__(self, input, chunkSize=65536):
+ """Initialize file_generator with file ``input`` for chunked access."""
+ self.input = input
+ self.chunkSize = chunkSize
+
+ def __iter__(self):
+ """Return iterator."""
+ return self
+
+ def __next__(self):
+ """Return next chunk of file."""
+ chunk = self.input.read(self.chunkSize)
+ if chunk:
+ return chunk
+ else:
+ if hasattr(self.input, 'close'):
+ self.input.close()
+ raise StopIteration()
+ next = __next__
+
+ def __del__(self):
+ """Close input on descturct."""
+ if hasattr(self.input, 'close'):
+ self.input.close()
+
+
+def file_generator_limited(fileobj, count, chunk_size=65536):
+ """Yield the given file object in chunks.
+
+ Stopps after `count` bytes has been emitted.
+ Default chunk size is 64kB. (Core)
+ """
+ remaining = count
+ while remaining > 0:
+ chunk = fileobj.read(min(chunk_size, remaining))
+ chunklen = len(chunk)
+ if chunklen == 0:
+ return
+ remaining -= chunklen
+ yield chunk
+
+
+def set_vary_header(response, header_name):
+ """Add a Vary header to a response."""
+ varies = response.headers.get('Vary', '')
+ varies = [x.strip() for x in varies.split(',') if x.strip()]
+ if header_name not in varies:
+ varies.append(header_name)
+ response.headers['Vary'] = ', '.join(varies)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..124361c746ab9b546acf6f9cb68c4356075bed8b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/auth_basic.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/auth_basic.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c0c028074d503676d9e21cfff58714cd56928e63
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/auth_basic.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/auth_digest.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/auth_digest.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..53a4f5b2ae75dd80bef9e67a63765f9bd51d8878
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/auth_digest.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/caching.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/caching.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e0382c4492bca13732b7a586d62c45e2e67994ae
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/caching.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/covercp.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/covercp.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6e229799bfd344d4542428d6feb225bdfe96b0d0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/covercp.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/cpstats.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/cpstats.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..18ea06c96d53e8bbf64591908d176153aefdafb0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/cpstats.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/cptools.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/cptools.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ef5b2fbf6e0f5cbc53f9866a663f196349b329b5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/cptools.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/encoding.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/encoding.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..625334b32d7bc3cb5ebab006edf133b16a292578
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/encoding.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/gctools.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/gctools.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..06e8a5a6f87765a0690be8ac0ae278e10c072113
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/gctools.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/httputil.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/httputil.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ceaf5e69a08dbe34b20c603cabdb5c4e3ebadd11
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/httputil.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/jsontools.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/jsontools.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8d0f90f8532488464311176c8a2c8bc53280ca82
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/jsontools.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/locking.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/locking.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ff491936d5990669f134ea9751126a92bcc6bb90
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/locking.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/profiler.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/profiler.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bf97d7c1a209ba79f000bb9d24daf180d978548e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/profiler.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/reprconf.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/reprconf.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f236b5b6079383d14442d7227d3497f0014a4b73
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/reprconf.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/sessions.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/sessions.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fb01ebb557e1259b5287c08aee1d257501a3ae85
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/sessions.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/static.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/static.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..dba39533d81a4d1ceaa4a3a06a73ee9397ee330f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/static.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/xmlrpcutil.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/xmlrpcutil.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..66bba0b71c03ec08117644b9b529f859c8f75752
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/__pycache__/xmlrpcutil.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/auth_basic.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/auth_basic.py
new file mode 100644
index 0000000000000000000000000000000000000000..ad379a2601765fdee0809c4174b144d7331d71a2
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/auth_basic.py
@@ -0,0 +1,120 @@
+# This file is part of CherryPy <http://www.cherrypy.org/>
+# -*- coding: utf-8 -*-
+# vim:ts=4:sw=4:expandtab:fileencoding=utf-8
+"""HTTP Basic Authentication tool.
+
+This module provides a CherryPy 3.x tool which implements
+the server-side of HTTP Basic Access Authentication, as described in
+:rfc:`2617`.
+
+Example usage, using the built-in checkpassword_dict function which uses a dict
+as the credentials store::
+
+ userpassdict = {'bird' : 'bebop', 'ornette' : 'wayout'}
+ checkpassword = cherrypy.lib.auth_basic.checkpassword_dict(userpassdict)
+ basic_auth = {'tools.auth_basic.on': True,
+ 'tools.auth_basic.realm': 'earth',
+ 'tools.auth_basic.checkpassword': checkpassword,
+ 'tools.auth_basic.accept_charset': 'UTF-8',
+ }
+ app_config = { '/' : basic_auth }
+
+"""
+
+import binascii
+import unicodedata
+import base64
+
+import cherrypy
+from cherrypy._cpcompat import ntou, tonative
+
+
+__author__ = 'visteya'
+__date__ = 'April 2009'
+
+
+def checkpassword_dict(user_password_dict):
+ """Returns a checkpassword function which checks credentials
+ against a dictionary of the form: {username : password}.
+
+ If you want a simple dictionary-based authentication scheme, use
+ checkpassword_dict(my_credentials_dict) as the value for the
+ checkpassword argument to basic_auth().
+ """
+ def checkpassword(realm, user, password):
+ p = user_password_dict.get(user)
+ return p and p == password or False
+
+ return checkpassword
+
+
+def basic_auth(realm, checkpassword, debug=False, accept_charset='utf-8'):
+ """A CherryPy tool which hooks at before_handler to perform
+ HTTP Basic Access Authentication, as specified in :rfc:`2617`
+ and :rfc:`7617`.
+
+ If the request has an 'authorization' header with a 'Basic' scheme, this
+ tool attempts to authenticate the credentials supplied in that header. If
+ the request has no 'authorization' header, or if it does but the scheme is
+ not 'Basic', or if authentication fails, the tool sends a 401 response with
+ a 'WWW-Authenticate' Basic header.
+
+ realm
+ A string containing the authentication realm.
+
+ checkpassword
+ A callable which checks the authentication credentials.
+ Its signature is checkpassword(realm, username, password). where
+ username and password are the values obtained from the request's
+ 'authorization' header. If authentication succeeds, checkpassword
+ returns True, else it returns False.
+
+ """
+
+ fallback_charset = 'ISO-8859-1'
+
+ if '"' in realm:
+ raise ValueError('Realm cannot contain the " (quote) character.')
+ request = cherrypy.serving.request
+
+ auth_header = request.headers.get('authorization')
+ if auth_header is not None:
+ # split() error, base64.decodestring() error
+ msg = 'Bad Request'
+ with cherrypy.HTTPError.handle((ValueError, binascii.Error), 400, msg):
+ scheme, params = auth_header.split(' ', 1)
+ if scheme.lower() == 'basic':
+ charsets = accept_charset, fallback_charset
+ decoded_params = base64.b64decode(params.encode('ascii'))
+ decoded_params = _try_decode(decoded_params, charsets)
+ decoded_params = ntou(decoded_params)
+ decoded_params = unicodedata.normalize('NFC', decoded_params)
+ decoded_params = tonative(decoded_params)
+ username, password = decoded_params.split(':', 1)
+ if checkpassword(realm, username, password):
+ if debug:
+ cherrypy.log('Auth succeeded', 'TOOLS.AUTH_BASIC')
+ request.login = username
+ return # successful authentication
+
+ charset = accept_charset.upper()
+ charset_declaration = (
+ (', charset="%s"' % charset)
+ if charset != fallback_charset
+ else ''
+ )
+ # Respond with 401 status and a WWW-Authenticate header
+ cherrypy.serving.response.headers['www-authenticate'] = (
+ 'Basic realm="%s"%s' % (realm, charset_declaration)
+ )
+ raise cherrypy.HTTPError(
+ 401, 'You are not authorized to access that resource')
+
+
+def _try_decode(subject, charsets):
+ for charset in charsets[:-1]:
+ try:
+ return tonative(subject, charset)
+ except ValueError:
+ pass
+ return tonative(subject, charsets[-1])
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/auth_digest.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/auth_digest.py
new file mode 100644
index 0000000000000000000000000000000000000000..fbb5df64a6f0b3adc7dcfa304b9cbc8803efbec3
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/auth_digest.py
@@ -0,0 +1,463 @@
+# This file is part of CherryPy <http://www.cherrypy.org/>
+# -*- coding: utf-8 -*-
+# vim:ts=4:sw=4:expandtab:fileencoding=utf-8
+"""HTTP Digest Authentication tool.
+
+An implementation of the server-side of HTTP Digest Access
+Authentication, which is described in :rfc:`2617`.
+
+Example usage, using the built-in get_ha1_dict_plain function which uses a dict
+of plaintext passwords as the credentials store::
+
+ userpassdict = {'alice' : '4x5istwelve'}
+ get_ha1 = cherrypy.lib.auth_digest.get_ha1_dict_plain(userpassdict)
+ digest_auth = {'tools.auth_digest.on': True,
+ 'tools.auth_digest.realm': 'wonderland',
+ 'tools.auth_digest.get_ha1': get_ha1,
+ 'tools.auth_digest.key': 'a565c27146791cfb',
+ 'tools.auth_digest.accept_charset': 'UTF-8',
+ }
+ app_config = { '/' : digest_auth }
+"""
+
+import time
+import functools
+from hashlib import md5
+from urllib.request import parse_http_list, parse_keqv_list
+
+import cherrypy
+from cherrypy._cpcompat import ntob, tonative
+
+
+__author__ = 'visteya'
+__date__ = 'April 2009'
+
+
+def md5_hex(s):
+ return md5(ntob(s, 'utf-8')).hexdigest()
+
+
+qop_auth = 'auth'
+qop_auth_int = 'auth-int'
+valid_qops = (qop_auth, qop_auth_int)
+
+valid_algorithms = ('MD5', 'MD5-sess')
+
+FALLBACK_CHARSET = 'ISO-8859-1'
+DEFAULT_CHARSET = 'UTF-8'
+
+
+def TRACE(msg):
+ cherrypy.log(msg, context='TOOLS.AUTH_DIGEST')
+
+# Three helper functions for users of the tool, providing three variants
+# of get_ha1() functions for three different kinds of credential stores.
+
+
+def get_ha1_dict_plain(user_password_dict):
+ """Returns a get_ha1 function which obtains a plaintext password from a
+ dictionary of the form: {username : password}.
+
+ If you want a simple dictionary-based authentication scheme, with plaintext
+ passwords, use get_ha1_dict_plain(my_userpass_dict) as the value for the
+ get_ha1 argument to digest_auth().
+ """
+ def get_ha1(realm, username):
+ password = user_password_dict.get(username)
+ if password:
+ return md5_hex('%s:%s:%s' % (username, realm, password))
+ return None
+
+ return get_ha1
+
+
+def get_ha1_dict(user_ha1_dict):
+ """Returns a get_ha1 function which obtains a HA1 password hash from a
+ dictionary of the form: {username : HA1}.
+
+ If you want a dictionary-based authentication scheme, but with
+ pre-computed HA1 hashes instead of plain-text passwords, use
+ get_ha1_dict(my_userha1_dict) as the value for the get_ha1
+ argument to digest_auth().
+ """
+ def get_ha1(realm, username):
+ return user_ha1_dict.get(username)
+
+ return get_ha1
+
+
+def get_ha1_file_htdigest(filename):
+ """Returns a get_ha1 function which obtains a HA1 password hash from a
+ flat file with lines of the same format as that produced by the Apache
+ htdigest utility. For example, for realm 'wonderland', username 'alice',
+ and password '4x5istwelve', the htdigest line would be::
+
+ alice:wonderland:3238cdfe91a8b2ed8e39646921a02d4c
+
+ If you want to use an Apache htdigest file as the credentials store,
+ then use get_ha1_file_htdigest(my_htdigest_file) as the value for the
+ get_ha1 argument to digest_auth(). It is recommended that the filename
+ argument be an absolute path, to avoid problems.
+ """
+ def get_ha1(realm, username):
+ result = None
+ f = open(filename, 'r')
+ for line in f:
+ u, r, ha1 = line.rstrip().split(':')
+ if u == username and r == realm:
+ result = ha1
+ break
+ f.close()
+ return result
+
+ return get_ha1
+
+
+def synthesize_nonce(s, key, timestamp=None):
+ """Synthesize a nonce value which resists spoofing and can be checked
+ for staleness. Returns a string suitable as the value for 'nonce' in
+ the www-authenticate header.
+
+ s
+ A string related to the resource, such as the hostname of the server.
+
+ key
+ A secret string known only to the server.
+
+ timestamp
+ An integer seconds-since-the-epoch timestamp
+
+ """
+ if timestamp is None:
+ timestamp = int(time.time())
+ h = md5_hex('%s:%s:%s' % (timestamp, s, key))
+ nonce = '%s:%s' % (timestamp, h)
+ return nonce
+
+
+def H(s):
+ """The hash function H"""
+ return md5_hex(s)
+
+
+def _try_decode_header(header, charset):
+ global FALLBACK_CHARSET
+
+ for enc in (charset, FALLBACK_CHARSET):
+ try:
+ return tonative(ntob(tonative(header, 'latin1'), 'latin1'), enc)
+ except ValueError as ve:
+ last_err = ve
+ else:
+ raise last_err
+
+
+class HttpDigestAuthorization(object):
+ """
+ Parses a Digest Authorization header and performs
+ re-calculation of the digest.
+ """
+
+ scheme = 'digest'
+
+ def errmsg(self, s):
+ return 'Digest Authorization header: %s' % s
+
+ @classmethod
+ def matches(cls, header):
+ scheme, _, _ = header.partition(' ')
+ return scheme.lower() == cls.scheme
+
+ def __init__(
+ self, auth_header, http_method,
+ debug=False, accept_charset=DEFAULT_CHARSET[:],
+ ):
+ self.http_method = http_method
+ self.debug = debug
+
+ if not self.matches(auth_header):
+ raise ValueError('Authorization scheme is not "Digest"')
+
+ self.auth_header = _try_decode_header(auth_header, accept_charset)
+
+ scheme, params = self.auth_header.split(' ', 1)
+
+ # make a dict of the params
+ items = parse_http_list(params)
+ paramsd = parse_keqv_list(items)
+
+ self.realm = paramsd.get('realm')
+ self.username = paramsd.get('username')
+ self.nonce = paramsd.get('nonce')
+ self.uri = paramsd.get('uri')
+ self.method = paramsd.get('method')
+ self.response = paramsd.get('response') # the response digest
+ self.algorithm = paramsd.get('algorithm', 'MD5').upper()
+ self.cnonce = paramsd.get('cnonce')
+ self.opaque = paramsd.get('opaque')
+ self.qop = paramsd.get('qop') # qop
+ self.nc = paramsd.get('nc') # nonce count
+
+ # perform some correctness checks
+ if self.algorithm not in valid_algorithms:
+ raise ValueError(
+ self.errmsg("Unsupported value for algorithm: '%s'" %
+ self.algorithm))
+
+ has_reqd = (
+ self.username and
+ self.realm and
+ self.nonce and
+ self.uri and
+ self.response
+ )
+ if not has_reqd:
+ raise ValueError(
+ self.errmsg('Not all required parameters are present.'))
+
+ if self.qop:
+ if self.qop not in valid_qops:
+ raise ValueError(
+ self.errmsg("Unsupported value for qop: '%s'" % self.qop))
+ if not (self.cnonce and self.nc):
+ raise ValueError(
+ self.errmsg('If qop is sent then '
+ 'cnonce and nc MUST be present'))
+ else:
+ if self.cnonce or self.nc:
+ raise ValueError(
+ self.errmsg('If qop is not sent, '
+ 'neither cnonce nor nc can be present'))
+
+ def __str__(self):
+ return 'authorization : %s' % self.auth_header
+
+ def validate_nonce(self, s, key):
+ """Validate the nonce.
+ Returns True if nonce was generated by synthesize_nonce() and the
+ timestamp is not spoofed, else returns False.
+
+ s
+ A string related to the resource, such as the hostname of
+ the server.
+
+ key
+ A secret string known only to the server.
+
+ Both s and key must be the same values which were used to synthesize
+ the nonce we are trying to validate.
+ """
+ try:
+ timestamp, hashpart = self.nonce.split(':', 1)
+ s_timestamp, s_hashpart = synthesize_nonce(
+ s, key, timestamp).split(':', 1)
+ is_valid = s_hashpart == hashpart
+ if self.debug:
+ TRACE('validate_nonce: %s' % is_valid)
+ return is_valid
+ except ValueError: # split() error
+ pass
+ return False
+
+ def is_nonce_stale(self, max_age_seconds=600):
+ """Returns True if a validated nonce is stale. The nonce contains a
+ timestamp in plaintext and also a secure hash of the timestamp.
+ You should first validate the nonce to ensure the plaintext
+ timestamp is not spoofed.
+ """
+ try:
+ timestamp, hashpart = self.nonce.split(':', 1)
+ if int(timestamp) + max_age_seconds > int(time.time()):
+ return False
+ except ValueError: # int() error
+ pass
+ if self.debug:
+ TRACE('nonce is stale')
+ return True
+
+ def HA2(self, entity_body=''):
+ """Returns the H(A2) string. See :rfc:`2617` section 3.2.2.3."""
+ # RFC 2617 3.2.2.3
+ # If the "qop" directive's value is "auth" or is unspecified,
+ # then A2 is:
+ # A2 = method ":" digest-uri-value
+ #
+ # If the "qop" value is "auth-int", then A2 is:
+ # A2 = method ":" digest-uri-value ":" H(entity-body)
+ if self.qop is None or self.qop == 'auth':
+ a2 = '%s:%s' % (self.http_method, self.uri)
+ elif self.qop == 'auth-int':
+ a2 = '%s:%s:%s' % (self.http_method, self.uri, H(entity_body))
+ else:
+ # in theory, this should never happen, since I validate qop in
+ # __init__()
+ raise ValueError(self.errmsg('Unrecognized value for qop!'))
+ return H(a2)
+
+ def request_digest(self, ha1, entity_body=''):
+ """Calculates the Request-Digest. See :rfc:`2617` section 3.2.2.1.
+
+ ha1
+ The HA1 string obtained from the credentials store.
+
+ entity_body
+ If 'qop' is set to 'auth-int', then A2 includes a hash
+ of the "entity body". The entity body is the part of the
+ message which follows the HTTP headers. See :rfc:`2617` section
+ 4.3. This refers to the entity the user agent sent in the
+ request which has the Authorization header. Typically GET
+ requests don't have an entity, and POST requests do.
+
+ """
+ ha2 = self.HA2(entity_body)
+ # Request-Digest -- RFC 2617 3.2.2.1
+ if self.qop:
+ req = '%s:%s:%s:%s:%s' % (
+ self.nonce, self.nc, self.cnonce, self.qop, ha2)
+ else:
+ req = '%s:%s' % (self.nonce, ha2)
+
+ # RFC 2617 3.2.2.2
+ #
+ # If the "algorithm" directive's value is "MD5" or is unspecified,
+ # then A1 is:
+ # A1 = unq(username-value) ":" unq(realm-value) ":" passwd
+ #
+ # If the "algorithm" directive's value is "MD5-sess", then A1 is
+ # calculated only once - on the first request by the client following
+ # receipt of a WWW-Authenticate challenge from the server.
+ # A1 = H( unq(username-value) ":" unq(realm-value) ":" passwd )
+ # ":" unq(nonce-value) ":" unq(cnonce-value)
+ if self.algorithm == 'MD5-sess':
+ ha1 = H('%s:%s:%s' % (ha1, self.nonce, self.cnonce))
+
+ digest = H('%s:%s' % (ha1, req))
+ return digest
+
+
+def _get_charset_declaration(charset):
+ global FALLBACK_CHARSET
+ charset = charset.upper()
+ return (
+ (', charset="%s"' % charset)
+ if charset != FALLBACK_CHARSET
+ else ''
+ )
+
+
+def www_authenticate(
+ realm, key, algorithm='MD5', nonce=None, qop=qop_auth,
+ stale=False, accept_charset=DEFAULT_CHARSET[:],
+):
+ """Constructs a WWW-Authenticate header for Digest authentication."""
+ if qop not in valid_qops:
+ raise ValueError("Unsupported value for qop: '%s'" % qop)
+ if algorithm not in valid_algorithms:
+ raise ValueError("Unsupported value for algorithm: '%s'" % algorithm)
+
+ HEADER_PATTERN = (
+ 'Digest realm="%s", nonce="%s", algorithm="%s", qop="%s"%s%s'
+ )
+
+ if nonce is None:
+ nonce = synthesize_nonce(realm, key)
+
+ stale_param = ', stale="true"' if stale else ''
+
+ charset_declaration = _get_charset_declaration(accept_charset)
+
+ return HEADER_PATTERN % (
+ realm, nonce, algorithm, qop, stale_param, charset_declaration,
+ )
+
+
+def digest_auth(realm, get_ha1, key, debug=False, accept_charset='utf-8'):
+ """A CherryPy tool that hooks at before_handler to perform
+ HTTP Digest Access Authentication, as specified in :rfc:`2617`.
+
+ If the request has an 'authorization' header with a 'Digest' scheme,
+ this tool authenticates the credentials supplied in that header.
+ If the request has no 'authorization' header, or if it does but the
+ scheme is not "Digest", or if authentication fails, the tool sends
+ a 401 response with a 'WWW-Authenticate' Digest header.
+
+ realm
+ A string containing the authentication realm.
+
+ get_ha1
+ A callable that looks up a username in a credentials store
+ and returns the HA1 string, which is defined in the RFC to be
+ MD5(username : realm : password). The function's signature is:
+ ``get_ha1(realm, username)``
+ where username is obtained from the request's 'authorization' header.
+ If username is not found in the credentials store, get_ha1() returns
+ None.
+
+ key
+ A secret string known only to the server, used in the synthesis
+ of nonces.
+
+ """
+ request = cherrypy.serving.request
+
+ auth_header = request.headers.get('authorization')
+
+ respond_401 = functools.partial(
+ _respond_401, realm, key, accept_charset, debug)
+
+ if not HttpDigestAuthorization.matches(auth_header or ''):
+ respond_401()
+
+ msg = 'The Authorization header could not be parsed.'
+ with cherrypy.HTTPError.handle(ValueError, 400, msg):
+ auth = HttpDigestAuthorization(
+ auth_header, request.method,
+ debug=debug, accept_charset=accept_charset,
+ )
+
+ if debug:
+ TRACE(str(auth))
+
+ if not auth.validate_nonce(realm, key):
+ respond_401()
+
+ ha1 = get_ha1(realm, auth.username)
+
+ if ha1 is None:
+ respond_401()
+
+ # note that for request.body to be available we need to
+ # hook in at before_handler, not on_start_resource like
+ # 3.1.x digest_auth does.
+ digest = auth.request_digest(ha1, entity_body=request.body)
+ if digest != auth.response:
+ respond_401()
+
+ # authenticated
+ if debug:
+ TRACE('digest matches auth.response')
+ # Now check if nonce is stale.
+ # The choice of ten minutes' lifetime for nonce is somewhat
+ # arbitrary
+ if auth.is_nonce_stale(max_age_seconds=600):
+ respond_401(stale=True)
+
+ request.login = auth.username
+ if debug:
+ TRACE('authentication of %s successful' % auth.username)
+
+
+def _respond_401(realm, key, accept_charset, debug, **kwargs):
+ """
+ Respond with 401 status and a WWW-Authenticate header
+ """
+ header = www_authenticate(
+ realm, key,
+ accept_charset=accept_charset,
+ **kwargs
+ )
+ if debug:
+ TRACE(header)
+ cherrypy.serving.response.headers['WWW-Authenticate'] = header
+ raise cherrypy.HTTPError(
+ 401, 'You are not authorized to access that resource')
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/caching.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/caching.py
new file mode 100644
index 0000000000000000000000000000000000000000..08d2d8e4a52648879dbbbcc4d7714d9ce4c5718f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/caching.py
@@ -0,0 +1,478 @@
+"""
+CherryPy implements a simple caching system as a pluggable Tool. This tool
+tries to be an (in-process) HTTP/1.1-compliant cache. It's not quite there
+yet, but it's probably good enough for most sites.
+
+In general, GET responses are cached (along with selecting headers) and, if
+another request arrives for the same resource, the caching Tool will return 304
+Not Modified if possible, or serve the cached response otherwise. It also sets
+request.cached to True if serving a cached representation, and sets
+request.cacheable to False (so it doesn't get cached again).
+
+If POST, PUT, or DELETE requests are made for a cached resource, they
+invalidate (delete) any cached response.
+
+Usage
+=====
+
+Configuration file example::
+
+ [/]
+ tools.caching.on = True
+ tools.caching.delay = 3600
+
+You may use a class other than the default
+:class:`MemoryCache<cherrypy.lib.caching.MemoryCache>` by supplying the config
+entry ``cache_class``; supply the full dotted name of the replacement class
+as the config value. It must implement the basic methods ``get``, ``put``,
+``delete``, and ``clear``.
+
+You may set any attribute, including overriding methods, on the cache
+instance by providing them in config. The above sets the
+:attr:`delay<cherrypy.lib.caching.MemoryCache.delay>` attribute, for example.
+"""
+
+import datetime
+import sys
+import threading
+import time
+
+import cherrypy
+from cherrypy.lib import cptools, httputil
+
+
+class Cache(object):
+
+ """Base class for Cache implementations."""
+
+ def get(self):
+ """Return the current variant if in the cache, else None."""
+ raise NotImplementedError
+
+ def put(self, obj, size):
+ """Store the current variant in the cache."""
+ raise NotImplementedError
+
+ def delete(self):
+ """Remove ALL cached variants of the current resource."""
+ raise NotImplementedError
+
+ def clear(self):
+ """Reset the cache to its initial, empty state."""
+ raise NotImplementedError
+
+
+# ------------------------------ Memory Cache ------------------------------- #
+class AntiStampedeCache(dict):
+
+ """A storage system for cached items which reduces stampede collisions."""
+
+ def wait(self, key, timeout=5, debug=False):
+ """Return the cached value for the given key, or None.
+
+ If timeout is not None, and the value is already
+ being calculated by another thread, wait until the given timeout has
+ elapsed. If the value is available before the timeout expires, it is
+ returned. If not, None is returned, and a sentinel placed in the cache
+ to signal other threads to wait.
+
+ If timeout is None, no waiting is performed nor sentinels used.
+ """
+ value = self.get(key)
+ if isinstance(value, threading.Event):
+ if timeout is None:
+ # Ignore the other thread and recalc it ourselves.
+ if debug:
+ cherrypy.log('No timeout', 'TOOLS.CACHING')
+ return None
+
+ # Wait until it's done or times out.
+ if debug:
+ cherrypy.log('Waiting up to %s seconds' %
+ timeout, 'TOOLS.CACHING')
+ value.wait(timeout)
+ if value.result is not None:
+ # The other thread finished its calculation. Use it.
+ if debug:
+ cherrypy.log('Result!', 'TOOLS.CACHING')
+ return value.result
+ # Timed out. Stick an Event in the slot so other threads wait
+ # on this one to finish calculating the value.
+ if debug:
+ cherrypy.log('Timed out', 'TOOLS.CACHING')
+ e = threading.Event()
+ e.result = None
+ dict.__setitem__(self, key, e)
+
+ return None
+ elif value is None:
+ # Stick an Event in the slot so other threads wait
+ # on this one to finish calculating the value.
+ if debug:
+ cherrypy.log('Timed out', 'TOOLS.CACHING')
+ e = threading.Event()
+ e.result = None
+ dict.__setitem__(self, key, e)
+ return value
+
+ def __setitem__(self, key, value):
+ """Set the cached value for the given key."""
+ existing = self.get(key)
+ dict.__setitem__(self, key, value)
+ if isinstance(existing, threading.Event):
+ # Set Event.result so other threads waiting on it have
+ # immediate access without needing to poll the cache again.
+ existing.result = value
+ existing.set()
+
+
+class MemoryCache(Cache):
+
+ """An in-memory cache for varying response content.
+
+ Each key in self.store is a URI, and each value is an AntiStampedeCache.
+ The response for any given URI may vary based on the values of
+ "selecting request headers"; that is, those named in the Vary
+ response header. We assume the list of header names to be constant
+ for each URI throughout the lifetime of the application, and store
+ that list in ``self.store[uri].selecting_headers``.
+
+ The items contained in ``self.store[uri]`` have keys which are tuples of
+ request header values (in the same order as the names in its
+ selecting_headers), and values which are the actual responses.
+ """
+
+ maxobjects = 1000
+ """The maximum number of cached objects; defaults to 1000."""
+
+ maxobj_size = 100000
+ """The maximum size of each cached object in bytes; defaults to 100 KB."""
+
+ maxsize = 10000000
+ """The maximum size of the entire cache in bytes; defaults to 10 MB."""
+
+ delay = 600
+ """Seconds until the cached content expires; defaults to 600 (10 minutes).
+ """
+
+ antistampede_timeout = 5
+ """Seconds to wait for other threads to release a cache lock."""
+
+ expire_freq = 0.1
+ """Seconds to sleep between cache expiration sweeps."""
+
+ debug = False
+
+ def __init__(self):
+ self.clear()
+
+ # Run self.expire_cache in a separate daemon thread.
+ t = threading.Thread(target=self.expire_cache, name='expire_cache')
+ self.expiration_thread = t
+ t.daemon = True
+ t.start()
+
+ def clear(self):
+ """Reset the cache to its initial, empty state."""
+ self.store = {}
+ self.expirations = {}
+ self.tot_puts = 0
+ self.tot_gets = 0
+ self.tot_hist = 0
+ self.tot_expires = 0
+ self.tot_non_modified = 0
+ self.cursize = 0
+
+ def expire_cache(self):
+ """Continuously examine cached objects, expiring stale ones.
+
+ This function is designed to be run in its own daemon thread,
+ referenced at ``self.expiration_thread``.
+ """
+ # It's possible that "time" will be set to None
+ # arbitrarily, so we check "while time" to avoid exceptions.
+ # See tickets #99 and #180 for more information.
+ while time:
+ now = time.time()
+ # Must make a copy of expirations so it doesn't change size
+ # during iteration
+ for expiration_time, objects in self.expirations.copy().items():
+ if expiration_time <= now:
+ for obj_size, uri, sel_header_values in objects:
+ try:
+ del self.store[uri][tuple(sel_header_values)]
+ self.tot_expires += 1
+ self.cursize -= obj_size
+ except KeyError:
+ # the key may have been deleted elsewhere
+ pass
+ del self.expirations[expiration_time]
+ time.sleep(self.expire_freq)
+
+ def get(self):
+ """Return the current variant if in the cache, else None."""
+ request = cherrypy.serving.request
+ self.tot_gets += 1
+
+ uri = cherrypy.url(qs=request.query_string)
+ uricache = self.store.get(uri)
+ if uricache is None:
+ return None
+
+ header_values = [request.headers.get(h, '')
+ for h in uricache.selecting_headers]
+ variant = uricache.wait(key=tuple(sorted(header_values)),
+ timeout=self.antistampede_timeout,
+ debug=self.debug)
+ if variant is not None:
+ self.tot_hist += 1
+ return variant
+
+ def put(self, variant, size):
+ """Store the current variant in the cache."""
+ request = cherrypy.serving.request
+ response = cherrypy.serving.response
+
+ uri = cherrypy.url(qs=request.query_string)
+ uricache = self.store.get(uri)
+ if uricache is None:
+ uricache = AntiStampedeCache()
+ uricache.selecting_headers = [
+ e.value for e in response.headers.elements('Vary')]
+ self.store[uri] = uricache
+
+ if len(self.store) < self.maxobjects:
+ total_size = self.cursize + size
+
+ # checks if there's space for the object
+ if (size < self.maxobj_size and total_size < self.maxsize):
+ # add to the expirations list
+ expiration_time = response.time + self.delay
+ bucket = self.expirations.setdefault(expiration_time, [])
+ bucket.append((size, uri, uricache.selecting_headers))
+
+ # add to the cache
+ header_values = [request.headers.get(h, '')
+ for h in uricache.selecting_headers]
+ uricache[tuple(sorted(header_values))] = variant
+ self.tot_puts += 1
+ self.cursize = total_size
+
+ def delete(self):
+ """Remove ALL cached variants of the current resource."""
+ uri = cherrypy.url(qs=cherrypy.serving.request.query_string)
+ self.store.pop(uri, None)
+
+
+def get(invalid_methods=('POST', 'PUT', 'DELETE'), debug=False, **kwargs):
+ """Try to obtain cached output. If fresh enough, raise HTTPError(304).
+
+ If POST, PUT, or DELETE:
+ * invalidates (deletes) any cached response for this resource
+ * sets request.cached = False
+ * sets request.cacheable = False
+
+ else if a cached copy exists:
+ * sets request.cached = True
+ * sets request.cacheable = False
+ * sets response.headers to the cached values
+ * checks the cached Last-Modified response header against the
+ current If-(Un)Modified-Since request headers; raises 304
+ if necessary.
+ * sets response.status and response.body to the cached values
+ * returns True
+
+ otherwise:
+ * sets request.cached = False
+ * sets request.cacheable = True
+ * returns False
+ """
+ request = cherrypy.serving.request
+ response = cherrypy.serving.response
+
+ if not hasattr(cherrypy, '_cache'):
+ # Make a process-wide Cache object.
+ cherrypy._cache = kwargs.pop('cache_class', MemoryCache)()
+
+ # Take all remaining kwargs and set them on the Cache object.
+ for k, v in kwargs.items():
+ setattr(cherrypy._cache, k, v)
+ cherrypy._cache.debug = debug
+
+ # POST, PUT, DELETE should invalidate (delete) the cached copy.
+ # See http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.10.
+ if request.method in invalid_methods:
+ if debug:
+ cherrypy.log('request.method %r in invalid_methods %r' %
+ (request.method, invalid_methods), 'TOOLS.CACHING')
+ cherrypy._cache.delete()
+ request.cached = False
+ request.cacheable = False
+ return False
+
+ if 'no-cache' in [e.value for e in request.headers.elements('Pragma')]:
+ request.cached = False
+ request.cacheable = True
+ return False
+
+ cache_data = cherrypy._cache.get()
+ request.cached = bool(cache_data)
+ request.cacheable = not request.cached
+ if request.cached:
+ # Serve the cached copy.
+ max_age = cherrypy._cache.delay
+ for v in [e.value for e in request.headers.elements('Cache-Control')]:
+ atoms = v.split('=', 1)
+ directive = atoms.pop(0)
+ if directive == 'max-age':
+ if len(atoms) != 1 or not atoms[0].isdigit():
+ raise cherrypy.HTTPError(
+ 400, 'Invalid Cache-Control header')
+ max_age = int(atoms[0])
+ break
+ elif directive == 'no-cache':
+ if debug:
+ cherrypy.log(
+ 'Ignoring cache due to Cache-Control: no-cache',
+ 'TOOLS.CACHING')
+ request.cached = False
+ request.cacheable = True
+ return False
+
+ if debug:
+ cherrypy.log('Reading response from cache', 'TOOLS.CACHING')
+ s, h, b, create_time = cache_data
+ age = int(response.time - create_time)
+ if (age > max_age):
+ if debug:
+ cherrypy.log('Ignoring cache due to age > %d' % max_age,
+ 'TOOLS.CACHING')
+ request.cached = False
+ request.cacheable = True
+ return False
+
+ # Copy the response headers. See
+ # https://github.com/cherrypy/cherrypy/issues/721.
+ response.headers = rh = httputil.HeaderMap()
+ for k in h:
+ dict.__setitem__(rh, k, dict.__getitem__(h, k))
+
+ # Add the required Age header
+ response.headers['Age'] = str(age)
+
+ try:
+ # Note that validate_since depends on a Last-Modified header;
+ # this was put into the cached copy, and should have been
+ # resurrected just above (response.headers = cache_data[1]).
+ cptools.validate_since()
+ except cherrypy.HTTPRedirect:
+ x = sys.exc_info()[1]
+ if x.status == 304:
+ cherrypy._cache.tot_non_modified += 1
+ raise
+
+ # serve it & get out from the request
+ response.status = s
+ response.body = b
+ else:
+ if debug:
+ cherrypy.log('request is not cached', 'TOOLS.CACHING')
+ return request.cached
+
+
+def tee_output():
+ """Tee response output to cache storage. Internal."""
+ # Used by CachingTool by attaching to request.hooks
+
+ request = cherrypy.serving.request
+ if 'no-store' in request.headers.values('Cache-Control'):
+ return
+
+ def tee(body):
+ """Tee response.body into a list."""
+ if ('no-cache' in response.headers.values('Pragma') or
+ 'no-store' in response.headers.values('Cache-Control')):
+ for chunk in body:
+ yield chunk
+ return
+
+ output = []
+ for chunk in body:
+ output.append(chunk)
+ yield chunk
+
+ # Save the cache data, but only if the body isn't empty.
+ # e.g. a 304 Not Modified on a static file response will
+ # have an empty body.
+ # If the body is empty, delete the cache because it
+ # contains a stale Threading._Event object that will
+ # stall all consecutive requests until the _Event times
+ # out
+ body = b''.join(output)
+ if not body:
+ cherrypy._cache.delete()
+ else:
+ cherrypy._cache.put((response.status, response.headers or {},
+ body, response.time), len(body))
+
+ response = cherrypy.serving.response
+ response.body = tee(response.body)
+
+
+def expires(secs=0, force=False, debug=False):
+ """Tool for influencing cache mechanisms using the 'Expires' header.
+
+ secs
+ Must be either an int or a datetime.timedelta, and indicates the
+ number of seconds between response.time and when the response should
+ expire. The 'Expires' header will be set to response.time + secs.
+ If secs is zero, the 'Expires' header is set one year in the past, and
+ the following "cache prevention" headers are also set:
+
+ * Pragma: no-cache
+ * Cache-Control': no-cache, must-revalidate
+
+ force
+ If False, the following headers are checked:
+
+ * Etag
+ * Last-Modified
+ * Age
+ * Expires
+
+ If any are already present, none of the above response headers are set.
+
+ """
+
+ response = cherrypy.serving.response
+ headers = response.headers
+
+ cacheable = False
+ if not force:
+ # some header names that indicate that the response can be cached
+ for indicator in ('Etag', 'Last-Modified', 'Age', 'Expires'):
+ if indicator in headers:
+ cacheable = True
+ break
+
+ if not cacheable and not force:
+ if debug:
+ cherrypy.log('request is not cacheable', 'TOOLS.EXPIRES')
+ else:
+ if debug:
+ cherrypy.log('request is cacheable', 'TOOLS.EXPIRES')
+ if isinstance(secs, datetime.timedelta):
+ secs = (86400 * secs.days) + secs.seconds
+
+ if secs == 0:
+ if force or ('Pragma' not in headers):
+ headers['Pragma'] = 'no-cache'
+ if cherrypy.serving.request.protocol >= (1, 1):
+ if force or 'Cache-Control' not in headers:
+ headers['Cache-Control'] = 'no-cache, must-revalidate'
+ # Set an explicit Expires date in the past.
+ expiry = httputil.HTTPDate(1169942400.0)
+ else:
+ expiry = httputil.HTTPDate(response.time + secs)
+ if force or 'Expires' not in headers:
+ headers['Expires'] = expiry
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/covercp.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/covercp.py
new file mode 100644
index 0000000000000000000000000000000000000000..3e2197137ddbf14cf5ee302cb9cd937661393fc7
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/covercp.py
@@ -0,0 +1,390 @@
+"""Code-coverage tools for CherryPy.
+
+To use this module, or the coverage tools in the test suite,
+you need to download 'coverage.py', either Gareth Rees' `original
+implementation <http://www.garethrees.org/2001/12/04/python-coverage/>`_
+or Ned Batchelder's `enhanced version:
+<http://www.nedbatchelder.com/code/modules/coverage.html>`_
+
+To turn on coverage tracing, use the following code::
+
+ cherrypy.engine.subscribe('start', covercp.start)
+
+DO NOT subscribe anything on the 'start_thread' channel, as previously
+recommended. Calling start once in the main thread should be sufficient
+to start coverage on all threads. Calling start again in each thread
+effectively clears any coverage data gathered up to that point.
+
+Run your code, then use the ``covercp.serve()`` function to browse the
+results in a web browser. If you run this module from the command line,
+it will call ``serve()`` for you.
+"""
+
+import re
+import sys
+import cgi
+import os
+import os.path
+import urllib.parse
+
+import cherrypy
+
+
+localFile = os.path.join(os.path.dirname(__file__), 'coverage.cache')
+
+the_coverage = None
+try:
+ from coverage import coverage
+ the_coverage = coverage(data_file=localFile)
+
+ def start():
+ the_coverage.start()
+except ImportError:
+ # Setting the_coverage to None will raise errors
+ # that need to be trapped downstream.
+ the_coverage = None
+
+ import warnings
+ warnings.warn(
+ 'No code coverage will be performed; '
+ 'coverage.py could not be imported.')
+
+ def start():
+ pass
+start.priority = 20
+
+TEMPLATE_MENU = """<html>
+<head>
+ <title>CherryPy Coverage Menu</title>
+ <style>
+ body {font: 9pt Arial, serif;}
+ #tree {
+ font-size: 8pt;
+ font-family: Andale Mono, monospace;
+ white-space: pre;
+ }
+ #tree a:active, a:focus {
+ background-color: black;
+ padding: 1px;
+ color: white;
+ border: 0px solid #9999FF;
+ -moz-outline-style: none;
+ }
+ .fail { color: red;}
+ .pass { color: #888;}
+ #pct { text-align: right;}
+ h3 {
+ font-size: small;
+ font-weight: bold;
+ font-style: italic;
+ margin-top: 5px;
+ }
+ input { border: 1px solid #ccc; padding: 2px; }
+ .directory {
+ color: #933;
+ font-style: italic;
+ font-weight: bold;
+ font-size: 10pt;
+ }
+ .file {
+ color: #400;
+ }
+ a { text-decoration: none; }
+ #crumbs {
+ color: white;
+ font-size: 8pt;
+ font-family: Andale Mono, monospace;
+ width: 100%;
+ background-color: black;
+ }
+ #crumbs a {
+ color: #f88;
+ }
+ #options {
+ line-height: 2.3em;
+ border: 1px solid black;
+ background-color: #eee;
+ padding: 4px;
+ }
+ #exclude {
+ width: 100%;
+ margin-bottom: 3px;
+ border: 1px solid #999;
+ }
+ #submit {
+ background-color: black;
+ color: white;
+ border: 0;
+ margin-bottom: -9px;
+ }
+ </style>
+</head>
+<body>
+<h2>CherryPy Coverage</h2>"""
+
+TEMPLATE_FORM = """
+<div id="options">
+<form action='menu' method=GET>
+ <input type='hidden' name='base' value='%(base)s' />
+ Show percentages
+ <input type='checkbox' %(showpct)s name='showpct' value='checked' /><br />
+ Hide files over
+ <input type='text' id='pct' name='pct' value='%(pct)s' size='3' />%%<br />
+ Exclude files matching<br />
+ <input type='text' id='exclude' name='exclude'
+ value='%(exclude)s' size='20' />
+ <br />
+
+ <input type='submit' value='Change view' id="submit"/>
+</form>
+</div>"""
+
+TEMPLATE_FRAMESET = """<html>
+<head><title>CherryPy coverage data</title></head>
+<frameset cols='250, 1*'>
+ <frame src='menu?base=%s' />
+ <frame name='main' src='' />
+</frameset>
+</html>
+"""
+
+TEMPLATE_COVERAGE = """<html>
+<head>
+ <title>Coverage for %(name)s</title>
+ <style>
+ h2 { margin-bottom: .25em; }
+ p { margin: .25em; }
+ .covered { color: #000; background-color: #fff; }
+ .notcovered { color: #fee; background-color: #500; }
+ .excluded { color: #00f; background-color: #fff; }
+ table .covered, table .notcovered, table .excluded
+ { font-family: Andale Mono, monospace;
+ font-size: 10pt; white-space: pre; }
+
+ .lineno { background-color: #eee;}
+ .notcovered .lineno { background-color: #000;}
+ table { border-collapse: collapse;
+ </style>
+</head>
+<body>
+<h2>%(name)s</h2>
+<p>%(fullpath)s</p>
+<p>Coverage: %(pc)s%%</p>"""
+
+TEMPLATE_LOC_COVERED = """<tr class="covered">
+ <td class="lineno">%s </td>
+ <td>%s</td>
+</tr>\n"""
+TEMPLATE_LOC_NOT_COVERED = """<tr class="notcovered">
+ <td class="lineno">%s </td>
+ <td>%s</td>
+</tr>\n"""
+TEMPLATE_LOC_EXCLUDED = """<tr class="excluded">
+ <td class="lineno">%s </td>
+ <td>%s</td>
+</tr>\n"""
+
+TEMPLATE_ITEM = (
+ "%s%s<a class='file' href='report?name=%s' target='main'>%s</a>\n"
+)
+
+
+def _percent(statements, missing):
+ s = len(statements)
+ e = s - len(missing)
+ if s > 0:
+ return int(round(100.0 * e / s))
+ return 0
+
+
+def _show_branch(root, base, path, pct=0, showpct=False, exclude='',
+ coverage=the_coverage):
+
+ # Show the directory name and any of our children
+ dirs = [k for k, v in root.items() if v]
+ dirs.sort()
+ for name in dirs:
+ newpath = os.path.join(path, name)
+
+ if newpath.lower().startswith(base):
+ relpath = newpath[len(base):]
+ yield '| ' * relpath.count(os.sep)
+ yield (
+ "<a class='directory' "
+ "href='menu?base=%s&exclude=%s'>%s</a>\n" %
+ (newpath, urllib.parse.quote_plus(exclude), name)
+ )
+
+ for chunk in _show_branch(
+ root[name], base, newpath, pct, showpct,
+ exclude, coverage=coverage
+ ):
+ yield chunk
+
+ # Now list the files
+ if path.lower().startswith(base):
+ relpath = path[len(base):]
+ files = [k for k, v in root.items() if not v]
+ files.sort()
+ for name in files:
+ newpath = os.path.join(path, name)
+
+ pc_str = ''
+ if showpct:
+ try:
+ _, statements, _, missing, _ = coverage.analysis2(newpath)
+ except Exception:
+ # Yes, we really want to pass on all errors.
+ pass
+ else:
+ pc = _percent(statements, missing)
+ pc_str = ('%3d%% ' % pc).replace(' ', ' ')
+ if pc < float(pct) or pc == -1:
+ pc_str = "<span class='fail'>%s</span>" % pc_str
+ else:
+ pc_str = "<span class='pass'>%s</span>" % pc_str
+
+ yield TEMPLATE_ITEM % ('| ' * (relpath.count(os.sep) + 1),
+ pc_str, newpath, name)
+
+
+def _skip_file(path, exclude):
+ if exclude:
+ return bool(re.search(exclude, path))
+
+
+def _graft(path, tree):
+ d = tree
+
+ p = path
+ atoms = []
+ while True:
+ p, tail = os.path.split(p)
+ if not tail:
+ break
+ atoms.append(tail)
+ atoms.append(p)
+ if p != '/':
+ atoms.append('/')
+
+ atoms.reverse()
+ for node in atoms:
+ if node:
+ d = d.setdefault(node, {})
+
+
+def get_tree(base, exclude, coverage=the_coverage):
+ """Return covered module names as a nested dict."""
+ tree = {}
+ runs = coverage.data.executed_files()
+ for path in runs:
+ if not _skip_file(path, exclude) and not os.path.isdir(path):
+ _graft(path, tree)
+ return tree
+
+
+class CoverStats(object):
+
+ def __init__(self, coverage, root=None):
+ self.coverage = coverage
+ if root is None:
+ # Guess initial depth. Files outside this path will not be
+ # reachable from the web interface.
+ root = os.path.dirname(cherrypy.__file__)
+ self.root = root
+
+ @cherrypy.expose
+ def index(self):
+ return TEMPLATE_FRAMESET % self.root.lower()
+
+ @cherrypy.expose
+ def menu(self, base='/', pct='50', showpct='',
+ exclude=r'python\d\.\d|test|tut\d|tutorial'):
+
+ # The coverage module uses all-lower-case names.
+ base = base.lower().rstrip(os.sep)
+
+ yield TEMPLATE_MENU
+ yield TEMPLATE_FORM % locals()
+
+ # Start by showing links for parent paths
+ yield "<div id='crumbs'>"
+ path = ''
+ atoms = base.split(os.sep)
+ atoms.pop()
+ for atom in atoms:
+ path += atom + os.sep
+ yield ("<a href='menu?base=%s&exclude=%s'>%s</a> %s"
+ % (path, urllib.parse.quote_plus(exclude), atom, os.sep))
+ yield '</div>'
+
+ yield "<div id='tree'>"
+
+ # Then display the tree
+ tree = get_tree(base, exclude, self.coverage)
+ if not tree:
+ yield '<p>No modules covered.</p>'
+ else:
+ for chunk in _show_branch(tree, base, '/', pct,
+ showpct == 'checked', exclude,
+ coverage=self.coverage):
+ yield chunk
+
+ yield '</div>'
+ yield '</body></html>'
+
+ def annotated_file(self, filename, statements, excluded, missing):
+ source = open(filename, 'r')
+ buffer = []
+ for lineno, line in enumerate(source.readlines()):
+ lineno += 1
+ line = line.strip('\n\r')
+ empty_the_buffer = True
+ if lineno in excluded:
+ template = TEMPLATE_LOC_EXCLUDED
+ elif lineno in missing:
+ template = TEMPLATE_LOC_NOT_COVERED
+ elif lineno in statements:
+ template = TEMPLATE_LOC_COVERED
+ else:
+ empty_the_buffer = False
+ buffer.append((lineno, line))
+ if empty_the_buffer:
+ for lno, pastline in buffer:
+ yield template % (lno, cgi.escape(pastline))
+ buffer = []
+ yield template % (lineno, cgi.escape(line))
+
+ @cherrypy.expose
+ def report(self, name):
+ filename, statements, excluded, missing, _ = self.coverage.analysis2(
+ name)
+ pc = _percent(statements, missing)
+ yield TEMPLATE_COVERAGE % dict(name=os.path.basename(name),
+ fullpath=name,
+ pc=pc)
+ yield '<table>\n'
+ for line in self.annotated_file(filename, statements, excluded,
+ missing):
+ yield line
+ yield '</table>'
+ yield '</body>'
+ yield '</html>'
+
+
+def serve(path=localFile, port=8080, root=None):
+ if coverage is None:
+ raise ImportError('The coverage module could not be imported.')
+ from coverage import coverage
+ cov = coverage(data_file=path)
+ cov.load()
+
+ cherrypy.config.update({'server.socket_port': int(port),
+ 'server.thread_pool': 10,
+ 'environment': 'production',
+ })
+ cherrypy.quickstart(CoverStats(cov, root))
+
+
+if __name__ == '__main__':
+ serve(*tuple(sys.argv[1:]))
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/cpstats.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/cpstats.py
new file mode 100644
index 0000000000000000000000000000000000000000..111af0632ce30e62d75007dd214c391c0de86175
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/cpstats.py
@@ -0,0 +1,694 @@
+"""CPStats, a package for collecting and reporting on program statistics.
+
+Overview
+========
+
+Statistics about program operation are an invaluable monitoring and debugging
+tool. Unfortunately, the gathering and reporting of these critical values is
+usually ad-hoc. This package aims to add a centralized place for gathering
+statistical performance data, a structure for recording that data which
+provides for extrapolation of that data into more useful information,
+and a method of serving that data to both human investigators and
+monitoring software. Let's examine each of those in more detail.
+
+Data Gathering
+--------------
+
+Just as Python's `logging` module provides a common importable for gathering
+and sending messages, performance statistics would benefit from a similar
+common mechanism, and one that does *not* require each package which wishes
+to collect stats to import a third-party module. Therefore, we choose to
+re-use the `logging` module by adding a `statistics` object to it.
+
+That `logging.statistics` object is a nested dict. It is not a custom class,
+because that would:
+
+ 1. require libraries and applications to import a third-party module in
+ order to participate
+ 2. inhibit innovation in extrapolation approaches and in reporting tools, and
+ 3. be slow.
+
+There are, however, some specifications regarding the structure of the dict.::
+
+ {
+ +----"SQLAlchemy": {
+ | "Inserts": 4389745,
+ | "Inserts per Second":
+ | lambda s: s["Inserts"] / (time() - s["Start"]),
+ | C +---"Table Statistics": {
+ | o | "widgets": {-----------+
+ N | l | "Rows": 1.3M, | Record
+ a | l | "Inserts": 400, |
+ m | e | },---------------------+
+ e | c | "froobles": {
+ s | t | "Rows": 7845,
+ p | i | "Inserts": 0,
+ a | o | },
+ c | n +---},
+ e | "Slow Queries":
+ | [{"Query": "SELECT * FROM widgets;",
+ | "Processing Time": 47.840923343,
+ | },
+ | ],
+ +----},
+ }
+
+The `logging.statistics` dict has four levels. The topmost level is nothing
+more than a set of names to introduce modularity, usually along the lines of
+package names. If the SQLAlchemy project wanted to participate, for example,
+it might populate the item `logging.statistics['SQLAlchemy']`, whose value
+would be a second-layer dict we call a "namespace". Namespaces help multiple
+packages to avoid collisions over key names, and make reports easier to read,
+to boot. The maintainers of SQLAlchemy should feel free to use more than one
+namespace if needed (such as 'SQLAlchemy ORM'). Note that there are no case
+or other syntax constraints on the namespace names; they should be chosen
+to be maximally readable by humans (neither too short nor too long).
+
+Each namespace, then, is a dict of named statistical values, such as
+'Requests/sec' or 'Uptime'. You should choose names which will look
+good on a report: spaces and capitalization are just fine.
+
+In addition to scalars, values in a namespace MAY be a (third-layer)
+dict, or a list, called a "collection". For example, the CherryPy
+:class:`StatsTool` keeps track of what each request is doing (or has most
+recently done) in a 'Requests' collection, where each key is a thread ID; each
+value in the subdict MUST be a fourth dict (whew!) of statistical data about
+each thread. We call each subdict in the collection a "record". Similarly,
+the :class:`StatsTool` also keeps a list of slow queries, where each record
+contains data about each slow query, in order.
+
+Values in a namespace or record may also be functions, which brings us to:
+
+Extrapolation
+-------------
+
+The collection of statistical data needs to be fast, as close to unnoticeable
+as possible to the host program. That requires us to minimize I/O, for example,
+but in Python it also means we need to minimize function calls. So when you
+are designing your namespace and record values, try to insert the most basic
+scalar values you already have on hand.
+
+When it comes time to report on the gathered data, however, we usually have
+much more freedom in what we can calculate. Therefore, whenever reporting
+tools (like the provided :class:`StatsPage` CherryPy class) fetch the contents
+of `logging.statistics` for reporting, they first call
+`extrapolate_statistics` (passing the whole `statistics` dict as the only
+argument). This makes a deep copy of the statistics dict so that the
+reporting tool can both iterate over it and even change it without harming
+the original. But it also expands any functions in the dict by calling them.
+For example, you might have a 'Current Time' entry in the namespace with the
+value "lambda scope: time.time()". The "scope" parameter is the current
+namespace dict (or record, if we're currently expanding one of those
+instead), allowing you access to existing static entries. If you're truly
+evil, you can even modify more than one entry at a time.
+
+However, don't try to calculate an entry and then use its value in further
+extrapolations; the order in which the functions are called is not guaranteed.
+This can lead to a certain amount of duplicated work (or a redesign of your
+schema), but that's better than complicating the spec.
+
+After the whole thing has been extrapolated, it's time for:
+
+Reporting
+---------
+
+The :class:`StatsPage` class grabs the `logging.statistics` dict, extrapolates
+it all, and then transforms it to HTML for easy viewing. Each namespace gets
+its own header and attribute table, plus an extra table for each collection.
+This is NOT part of the statistics specification; other tools can format how
+they like.
+
+You can control which columns are output and how they are formatted by updating
+StatsPage.formatting, which is a dict that mirrors the keys and nesting of
+`logging.statistics`. The difference is that, instead of data values, it has
+formatting values. Use None for a given key to indicate to the StatsPage that a
+given column should not be output. Use a string with formatting
+(such as '%.3f') to interpolate the value(s), or use a callable (such as
+lambda v: v.isoformat()) for more advanced formatting. Any entry which is not
+mentioned in the formatting dict is output unchanged.
+
+Monitoring
+----------
+
+Although the HTML output takes pains to assign unique id's to each <td> with
+statistical data, you're probably better off fetching /cpstats/data, which
+outputs the whole (extrapolated) `logging.statistics` dict in JSON format.
+That is probably easier to parse, and doesn't have any formatting controls,
+so you get the "original" data in a consistently-serialized format.
+Note: there's no treatment yet for datetime objects. Try time.time() instead
+for now if you can. Nagios will probably thank you.
+
+Turning Collection Off
+----------------------
+
+It is recommended each namespace have an "Enabled" item which, if False,
+stops collection (but not reporting) of statistical data. Applications
+SHOULD provide controls to pause and resume collection by setting these
+entries to False or True, if present.
+
+
+Usage
+=====
+
+To collect statistics on CherryPy applications::
+
+ from cherrypy.lib import cpstats
+ appconfig['/']['tools.cpstats.on'] = True
+
+To collect statistics on your own code::
+
+ import logging
+ # Initialize the repository
+ if not hasattr(logging, 'statistics'): logging.statistics = {}
+ # Initialize my namespace
+ mystats = logging.statistics.setdefault('My Stuff', {})
+ # Initialize my namespace's scalars and collections
+ mystats.update({
+ 'Enabled': True,
+ 'Start Time': time.time(),
+ 'Important Events': 0,
+ 'Events/Second': lambda s: (
+ (s['Important Events'] / (time.time() - s['Start Time']))),
+ })
+ ...
+ for event in events:
+ ...
+ # Collect stats
+ if mystats.get('Enabled', False):
+ mystats['Important Events'] += 1
+
+To report statistics::
+
+ root.cpstats = cpstats.StatsPage()
+
+To format statistics reports::
+
+ See 'Reporting', above.
+
+"""
+
+import logging
+import os
+import sys
+import threading
+import time
+
+import cherrypy
+from cherrypy._json import json
+
+# ------------------------------- Statistics -------------------------------- #
+
+if not hasattr(logging, 'statistics'):
+ logging.statistics = {}
+
+
+def extrapolate_statistics(scope):
+ """Return an extrapolated copy of the given scope."""
+ c = {}
+ for k, v in scope.copy().items():
+ if isinstance(v, dict):
+ v = extrapolate_statistics(v)
+ elif isinstance(v, (list, tuple)):
+ v = [extrapolate_statistics(record) for record in v]
+ elif hasattr(v, '__call__'):
+ v = v(scope)
+ c[k] = v
+ return c
+
+
+# -------------------- CherryPy Applications Statistics --------------------- #
+
+appstats = logging.statistics.setdefault('CherryPy Applications', {})
+appstats.update({
+ 'Enabled': True,
+ 'Bytes Read/Request': lambda s: (
+ s['Total Requests'] and
+ (s['Total Bytes Read'] / float(s['Total Requests'])) or
+ 0.0
+ ),
+ 'Bytes Read/Second': lambda s: s['Total Bytes Read'] / s['Uptime'](s),
+ 'Bytes Written/Request': lambda s: (
+ s['Total Requests'] and
+ (s['Total Bytes Written'] / float(s['Total Requests'])) or
+ 0.0
+ ),
+ 'Bytes Written/Second': lambda s: (
+ s['Total Bytes Written'] / s['Uptime'](s)
+ ),
+ 'Current Time': lambda s: time.time(),
+ 'Current Requests': 0,
+ 'Requests/Second': lambda s: float(s['Total Requests']) / s['Uptime'](s),
+ 'Server Version': cherrypy.__version__,
+ 'Start Time': time.time(),
+ 'Total Bytes Read': 0,
+ 'Total Bytes Written': 0,
+ 'Total Requests': 0,
+ 'Total Time': 0,
+ 'Uptime': lambda s: time.time() - s['Start Time'],
+ 'Requests': {},
+})
+
+
+def proc_time(s):
+ return time.time() - s['Start Time']
+
+
+class ByteCountWrapper(object):
+
+ """Wraps a file-like object, counting the number of bytes read."""
+
+ def __init__(self, rfile):
+ self.rfile = rfile
+ self.bytes_read = 0
+
+ def read(self, size=-1):
+ data = self.rfile.read(size)
+ self.bytes_read += len(data)
+ return data
+
+ def readline(self, size=-1):
+ data = self.rfile.readline(size)
+ self.bytes_read += len(data)
+ return data
+
+ def readlines(self, sizehint=0):
+ # Shamelessly stolen from StringIO
+ total = 0
+ lines = []
+ line = self.readline()
+ while line:
+ lines.append(line)
+ total += len(line)
+ if 0 < sizehint <= total:
+ break
+ line = self.readline()
+ return lines
+
+ def close(self):
+ self.rfile.close()
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ data = self.rfile.next()
+ self.bytes_read += len(data)
+ return data
+
+
+def average_uriset_time(s):
+ return s['Count'] and (s['Sum'] / s['Count']) or 0
+
+
+def _get_threading_ident():
+ if sys.version_info >= (3, 3):
+ return threading.get_ident()
+ return threading._get_ident()
+
+
+class StatsTool(cherrypy.Tool):
+
+ """Record various information about the current request."""
+
+ def __init__(self):
+ cherrypy.Tool.__init__(self, 'on_end_request', self.record_stop)
+
+ def _setup(self):
+ """Hook this tool into cherrypy.request.
+
+ The standard CherryPy request object will automatically call this
+ method when the tool is "turned on" in config.
+ """
+ if appstats.get('Enabled', False):
+ cherrypy.Tool._setup(self)
+ self.record_start()
+
+ def record_start(self):
+ """Record the beginning of a request."""
+ request = cherrypy.serving.request
+ if not hasattr(request.rfile, 'bytes_read'):
+ request.rfile = ByteCountWrapper(request.rfile)
+ request.body.fp = request.rfile
+
+ r = request.remote
+
+ appstats['Current Requests'] += 1
+ appstats['Total Requests'] += 1
+ appstats['Requests'][_get_threading_ident()] = {
+ 'Bytes Read': None,
+ 'Bytes Written': None,
+ # Use a lambda so the ip gets updated by tools.proxy later
+ 'Client': lambda s: '%s:%s' % (r.ip, r.port),
+ 'End Time': None,
+ 'Processing Time': proc_time,
+ 'Request-Line': request.request_line,
+ 'Response Status': None,
+ 'Start Time': time.time(),
+ }
+
+ def record_stop(
+ self, uriset=None, slow_queries=1.0, slow_queries_count=100,
+ debug=False, **kwargs):
+ """Record the end of a request."""
+ resp = cherrypy.serving.response
+ w = appstats['Requests'][_get_threading_ident()]
+
+ r = cherrypy.request.rfile.bytes_read
+ w['Bytes Read'] = r
+ appstats['Total Bytes Read'] += r
+
+ if resp.stream:
+ w['Bytes Written'] = 'chunked'
+ else:
+ cl = int(resp.headers.get('Content-Length', 0))
+ w['Bytes Written'] = cl
+ appstats['Total Bytes Written'] += cl
+
+ w['Response Status'] = \
+ getattr(resp, 'output_status', resp.status).decode()
+
+ w['End Time'] = time.time()
+ p = w['End Time'] - w['Start Time']
+ w['Processing Time'] = p
+ appstats['Total Time'] += p
+
+ appstats['Current Requests'] -= 1
+
+ if debug:
+ cherrypy.log('Stats recorded: %s' % repr(w), 'TOOLS.CPSTATS')
+
+ if uriset:
+ rs = appstats.setdefault('URI Set Tracking', {})
+ r = rs.setdefault(uriset, {
+ 'Min': None, 'Max': None, 'Count': 0, 'Sum': 0,
+ 'Avg': average_uriset_time})
+ if r['Min'] is None or p < r['Min']:
+ r['Min'] = p
+ if r['Max'] is None or p > r['Max']:
+ r['Max'] = p
+ r['Count'] += 1
+ r['Sum'] += p
+
+ if slow_queries and p > slow_queries:
+ sq = appstats.setdefault('Slow Queries', [])
+ sq.append(w.copy())
+ if len(sq) > slow_queries_count:
+ sq.pop(0)
+
+
+cherrypy.tools.cpstats = StatsTool()
+
+
+# ---------------------- CherryPy Statistics Reporting ---------------------- #
+
+thisdir = os.path.abspath(os.path.dirname(__file__))
+
+missing = object()
+
+
+def locale_date(v):
+ return time.strftime('%c', time.gmtime(v))
+
+
+def iso_format(v):
+ return time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(v))
+
+
+def pause_resume(ns):
+ def _pause_resume(enabled):
+ pause_disabled = ''
+ resume_disabled = ''
+ if enabled:
+ resume_disabled = 'disabled="disabled" '
+ else:
+ pause_disabled = 'disabled="disabled" '
+ return """
+ <form action="pause" method="POST" style="display:inline">
+ <input type="hidden" name="namespace" value="%s" />
+ <input type="submit" value="Pause" %s/>
+ </form>
+ <form action="resume" method="POST" style="display:inline">
+ <input type="hidden" name="namespace" value="%s" />
+ <input type="submit" value="Resume" %s/>
+ </form>
+ """ % (ns, pause_disabled, ns, resume_disabled)
+ return _pause_resume
+
+
+class StatsPage(object):
+
+ formatting = {
+ 'CherryPy Applications': {
+ 'Enabled': pause_resume('CherryPy Applications'),
+ 'Bytes Read/Request': '%.3f',
+ 'Bytes Read/Second': '%.3f',
+ 'Bytes Written/Request': '%.3f',
+ 'Bytes Written/Second': '%.3f',
+ 'Current Time': iso_format,
+ 'Requests/Second': '%.3f',
+ 'Start Time': iso_format,
+ 'Total Time': '%.3f',
+ 'Uptime': '%.3f',
+ 'Slow Queries': {
+ 'End Time': None,
+ 'Processing Time': '%.3f',
+ 'Start Time': iso_format,
+ },
+ 'URI Set Tracking': {
+ 'Avg': '%.3f',
+ 'Max': '%.3f',
+ 'Min': '%.3f',
+ 'Sum': '%.3f',
+ },
+ 'Requests': {
+ 'Bytes Read': '%s',
+ 'Bytes Written': '%s',
+ 'End Time': None,
+ 'Processing Time': '%.3f',
+ 'Start Time': None,
+ },
+ },
+ 'CherryPy WSGIServer': {
+ 'Enabled': pause_resume('CherryPy WSGIServer'),
+ 'Connections/second': '%.3f',
+ 'Start time': iso_format,
+ },
+ }
+
+ @cherrypy.expose
+ def index(self):
+ # Transform the raw data into pretty output for HTML
+ yield """
+<html>
+<head>
+ <title>Statistics</title>
+<style>
+
+th, td {
+ padding: 0.25em 0.5em;
+ border: 1px solid #666699;
+}
+
+table {
+ border-collapse: collapse;
+}
+
+table.stats1 {
+ width: 100%;
+}
+
+table.stats1 th {
+ font-weight: bold;
+ text-align: right;
+ background-color: #CCD5DD;
+}
+
+table.stats2, h2 {
+ margin-left: 50px;
+}
+
+table.stats2 th {
+ font-weight: bold;
+ text-align: center;
+ background-color: #CCD5DD;
+}
+
+</style>
+</head>
+<body>
+"""
+ for title, scalars, collections in self.get_namespaces():
+ yield """
+<h1>%s</h1>
+
+<table class='stats1'>
+ <tbody>
+""" % title
+ for i, (key, value) in enumerate(scalars):
+ colnum = i % 3
+ if colnum == 0:
+ yield """
+ <tr>"""
+ yield (
+ """
+ <th>%(key)s</th><td id='%(title)s-%(key)s'>%(value)s</td>""" %
+ vars()
+ )
+ if colnum == 2:
+ yield """
+ </tr>"""
+
+ if colnum == 0:
+ yield """
+ <th></th><td></td>
+ <th></th><td></td>
+ </tr>"""
+ elif colnum == 1:
+ yield """
+ <th></th><td></td>
+ </tr>"""
+ yield """
+ </tbody>
+</table>"""
+
+ for subtitle, headers, subrows in collections:
+ yield """
+<h2>%s</h2>
+<table class='stats2'>
+ <thead>
+ <tr>""" % subtitle
+ for key in headers:
+ yield """
+ <th>%s</th>""" % key
+ yield """
+ </tr>
+ </thead>
+ <tbody>"""
+ for subrow in subrows:
+ yield """
+ <tr>"""
+ for value in subrow:
+ yield """
+ <td>%s</td>""" % value
+ yield """
+ </tr>"""
+ yield """
+ </tbody>
+</table>"""
+ yield """
+</body>
+</html>
+"""
+
+ def get_namespaces(self):
+ """Yield (title, scalars, collections) for each namespace."""
+ s = extrapolate_statistics(logging.statistics)
+ for title, ns in sorted(s.items()):
+ scalars = []
+ collections = []
+ ns_fmt = self.formatting.get(title, {})
+ for k, v in sorted(ns.items()):
+ fmt = ns_fmt.get(k, {})
+ if isinstance(v, dict):
+ headers, subrows = self.get_dict_collection(v, fmt)
+ collections.append((k, ['ID'] + headers, subrows))
+ elif isinstance(v, (list, tuple)):
+ headers, subrows = self.get_list_collection(v, fmt)
+ collections.append((k, headers, subrows))
+ else:
+ format = ns_fmt.get(k, missing)
+ if format is None:
+ # Don't output this column.
+ continue
+ if hasattr(format, '__call__'):
+ v = format(v)
+ elif format is not missing:
+ v = format % v
+ scalars.append((k, v))
+ yield title, scalars, collections
+
+ def get_dict_collection(self, v, formatting):
+ """Return ([headers], [rows]) for the given collection."""
+ # E.g., the 'Requests' dict.
+ headers = []
+ vals = v.values()
+ for record in vals:
+ for k3 in record:
+ format = formatting.get(k3, missing)
+ if format is None:
+ # Don't output this column.
+ continue
+ if k3 not in headers:
+ headers.append(k3)
+ headers.sort()
+
+ subrows = []
+ for k2, record in sorted(v.items()):
+ subrow = [k2]
+ for k3 in headers:
+ v3 = record.get(k3, '')
+ format = formatting.get(k3, missing)
+ if format is None:
+ # Don't output this column.
+ continue
+ if hasattr(format, '__call__'):
+ v3 = format(v3)
+ elif format is not missing:
+ v3 = format % v3
+ subrow.append(v3)
+ subrows.append(subrow)
+
+ return headers, subrows
+
+ def get_list_collection(self, v, formatting):
+ """Return ([headers], [subrows]) for the given collection."""
+ # E.g., the 'Slow Queries' list.
+ headers = []
+ for record in v:
+ for k3 in record:
+ format = formatting.get(k3, missing)
+ if format is None:
+ # Don't output this column.
+ continue
+ if k3 not in headers:
+ headers.append(k3)
+ headers.sort()
+
+ subrows = []
+ for record in v:
+ subrow = []
+ for k3 in headers:
+ v3 = record.get(k3, '')
+ format = formatting.get(k3, missing)
+ if format is None:
+ # Don't output this column.
+ continue
+ if hasattr(format, '__call__'):
+ v3 = format(v3)
+ elif format is not missing:
+ v3 = format % v3
+ subrow.append(v3)
+ subrows.append(subrow)
+
+ return headers, subrows
+
+ if json is not None:
+ @cherrypy.expose
+ def data(self):
+ s = extrapolate_statistics(logging.statistics)
+ cherrypy.response.headers['Content-Type'] = 'application/json'
+ return json.dumps(s, sort_keys=True, indent=4).encode('utf-8')
+
+ @cherrypy.expose
+ def pause(self, namespace):
+ logging.statistics.get(namespace, {})['Enabled'] = False
+ raise cherrypy.HTTPRedirect('./')
+ pause.cp_config = {'tools.allow.on': True,
+ 'tools.allow.methods': ['POST']}
+
+ @cherrypy.expose
+ def resume(self, namespace):
+ logging.statistics.get(namespace, {})['Enabled'] = True
+ raise cherrypy.HTTPRedirect('./')
+ resume.cp_config = {'tools.allow.on': True,
+ 'tools.allow.methods': ['POST']}
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/cptools.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/cptools.py
new file mode 100644
index 0000000000000000000000000000000000000000..613a8995130da66edbde2e109d349755dd396b79
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/cptools.py
@@ -0,0 +1,637 @@
+"""Functions for builtin CherryPy tools."""
+
+import logging
+import re
+from hashlib import md5
+import urllib.parse
+
+import cherrypy
+from cherrypy._cpcompat import text_or_bytes
+from cherrypy.lib import httputil as _httputil
+from cherrypy.lib import is_iterator
+
+
+# Conditional HTTP request support #
+
+def validate_etags(autotags=False, debug=False):
+ """Validate the current ETag against If-Match, If-None-Match headers.
+
+ If autotags is True, an ETag response-header value will be provided
+ from an MD5 hash of the response body (unless some other code has
+ already provided an ETag header). If False (the default), the ETag
+ will not be automatic.
+
+ WARNING: the autotags feature is not designed for URL's which allow
+ methods other than GET. For example, if a POST to the same URL returns
+ no content, the automatic ETag will be incorrect, breaking a fundamental
+ use for entity tags in a possibly destructive fashion. Likewise, if you
+ raise 304 Not Modified, the response body will be empty, the ETag hash
+ will be incorrect, and your application will break.
+ See :rfc:`2616` Section 14.24.
+ """
+ response = cherrypy.serving.response
+
+ # Guard against being run twice.
+ if hasattr(response, 'ETag'):
+ return
+
+ status, reason, msg = _httputil.valid_status(response.status)
+
+ etag = response.headers.get('ETag')
+
+ # Automatic ETag generation. See warning in docstring.
+ if etag:
+ if debug:
+ cherrypy.log('ETag already set: %s' % etag, 'TOOLS.ETAGS')
+ elif not autotags:
+ if debug:
+ cherrypy.log('Autotags off', 'TOOLS.ETAGS')
+ elif status != 200:
+ if debug:
+ cherrypy.log('Status not 200', 'TOOLS.ETAGS')
+ else:
+ etag = response.collapse_body()
+ etag = '"%s"' % md5(etag).hexdigest()
+ if debug:
+ cherrypy.log('Setting ETag: %s' % etag, 'TOOLS.ETAGS')
+ response.headers['ETag'] = etag
+
+ response.ETag = etag
+
+ # "If the request would, without the If-Match header field, result in
+ # anything other than a 2xx or 412 status, then the If-Match header
+ # MUST be ignored."
+ if debug:
+ cherrypy.log('Status: %s' % status, 'TOOLS.ETAGS')
+ if status >= 200 and status <= 299:
+ request = cherrypy.serving.request
+
+ conditions = request.headers.elements('If-Match') or []
+ conditions = [str(x) for x in conditions]
+ if debug:
+ cherrypy.log('If-Match conditions: %s' % repr(conditions),
+ 'TOOLS.ETAGS')
+ if conditions and not (conditions == ['*'] or etag in conditions):
+ raise cherrypy.HTTPError(412, 'If-Match failed: ETag %r did '
+ 'not match %r' % (etag, conditions))
+
+ conditions = request.headers.elements('If-None-Match') or []
+ conditions = [str(x) for x in conditions]
+ if debug:
+ cherrypy.log('If-None-Match conditions: %s' % repr(conditions),
+ 'TOOLS.ETAGS')
+ if conditions == ['*'] or etag in conditions:
+ if debug:
+ cherrypy.log('request.method: %s' %
+ request.method, 'TOOLS.ETAGS')
+ if request.method in ('GET', 'HEAD'):
+ raise cherrypy.HTTPRedirect([], 304)
+ else:
+ raise cherrypy.HTTPError(412, 'If-None-Match failed: ETag %r '
+ 'matched %r' % (etag, conditions))
+
+
+def validate_since():
+ """Validate the current Last-Modified against If-Modified-Since headers.
+
+ If no code has set the Last-Modified response header, then no validation
+ will be performed.
+ """
+ response = cherrypy.serving.response
+ lastmod = response.headers.get('Last-Modified')
+ if lastmod:
+ status, reason, msg = _httputil.valid_status(response.status)
+
+ request = cherrypy.serving.request
+
+ since = request.headers.get('If-Unmodified-Since')
+ if since and since != lastmod:
+ if (status >= 200 and status <= 299) or status == 412:
+ raise cherrypy.HTTPError(412)
+
+ since = request.headers.get('If-Modified-Since')
+ if since and since == lastmod:
+ if (status >= 200 and status <= 299) or status == 304:
+ if request.method in ('GET', 'HEAD'):
+ raise cherrypy.HTTPRedirect([], 304)
+ else:
+ raise cherrypy.HTTPError(412)
+
+
+# Tool code #
+
+def allow(methods=None, debug=False):
+ """Raise 405 if request.method not in methods (default ['GET', 'HEAD']).
+
+ The given methods are case-insensitive, and may be in any order.
+ If only one method is allowed, you may supply a single string;
+ if more than one, supply a list of strings.
+
+ Regardless of whether the current method is allowed or not, this
+ also emits an 'Allow' response header, containing the given methods.
+ """
+ if not isinstance(methods, (tuple, list)):
+ methods = [methods]
+ methods = [m.upper() for m in methods if m]
+ if not methods:
+ methods = ['GET', 'HEAD']
+ elif 'GET' in methods and 'HEAD' not in methods:
+ methods.append('HEAD')
+
+ cherrypy.response.headers['Allow'] = ', '.join(methods)
+ if cherrypy.request.method not in methods:
+ if debug:
+ cherrypy.log('request.method %r not in methods %r' %
+ (cherrypy.request.method, methods), 'TOOLS.ALLOW')
+ raise cherrypy.HTTPError(405)
+ else:
+ if debug:
+ cherrypy.log('request.method %r in methods %r' %
+ (cherrypy.request.method, methods), 'TOOLS.ALLOW')
+
+
+def proxy(base=None, local='X-Forwarded-Host', remote='X-Forwarded-For',
+ scheme='X-Forwarded-Proto', debug=False):
+ """Change the base URL (scheme://host[:port][/path]).
+
+ For running a CP server behind Apache, lighttpd, or other HTTP server.
+
+ For Apache and lighttpd, you should leave the 'local' argument at the
+ default value of 'X-Forwarded-Host'. For Squid, you probably want to set
+ tools.proxy.local = 'Origin'.
+
+ If you want the new request.base to include path info (not just the host),
+ you must explicitly set base to the full base path, and ALSO set 'local'
+ to '', so that the X-Forwarded-Host request header (which never includes
+ path info) does not override it. Regardless, the value for 'base' MUST
+ NOT end in a slash.
+
+ cherrypy.request.remote.ip (the IP address of the client) will be
+ rewritten if the header specified by the 'remote' arg is valid.
+ By default, 'remote' is set to 'X-Forwarded-For'. If you do not
+ want to rewrite remote.ip, set the 'remote' arg to an empty string.
+ """
+
+ request = cherrypy.serving.request
+
+ if scheme:
+ s = request.headers.get(scheme, None)
+ if debug:
+ cherrypy.log('Testing scheme %r:%r' % (scheme, s), 'TOOLS.PROXY')
+ if s == 'on' and 'ssl' in scheme.lower():
+ # This handles e.g. webfaction's 'X-Forwarded-Ssl: on' header
+ scheme = 'https'
+ else:
+ # This is for lighttpd/pound/Mongrel's 'X-Forwarded-Proto: https'
+ scheme = s
+ if not scheme:
+ scheme = request.base[:request.base.find('://')]
+
+ if local:
+ lbase = request.headers.get(local, None)
+ if debug:
+ cherrypy.log('Testing local %r:%r' % (local, lbase), 'TOOLS.PROXY')
+ if lbase is not None:
+ base = lbase.split(',')[0]
+ if not base:
+ default = urllib.parse.urlparse(request.base).netloc
+ base = request.headers.get('Host', default)
+
+ if base.find('://') == -1:
+ # add http:// or https:// if needed
+ base = scheme + '://' + base
+
+ request.base = base
+
+ if remote:
+ xff = request.headers.get(remote)
+ if debug:
+ cherrypy.log('Testing remote %r:%r' % (remote, xff), 'TOOLS.PROXY')
+ if xff:
+ if remote == 'X-Forwarded-For':
+ # Grab the first IP in a comma-separated list. Ref #1268.
+ xff = next(ip.strip() for ip in xff.split(','))
+ request.remote.ip = xff
+
+
+def ignore_headers(headers=('Range',), debug=False):
+ """Delete request headers whose field names are included in 'headers'.
+
+ This is a useful tool for working behind certain HTTP servers;
+ for example, Apache duplicates the work that CP does for 'Range'
+ headers, and will doubly-truncate the response.
+ """
+ request = cherrypy.serving.request
+ for name in headers:
+ if name in request.headers:
+ if debug:
+ cherrypy.log('Ignoring request header %r' % name,
+ 'TOOLS.IGNORE_HEADERS')
+ del request.headers[name]
+
+
+def response_headers(headers=None, debug=False):
+ """Set headers on the response."""
+ if debug:
+ cherrypy.log('Setting response headers: %s' % repr(headers),
+ 'TOOLS.RESPONSE_HEADERS')
+ for name, value in (headers or []):
+ cherrypy.serving.response.headers[name] = value
+
+
+response_headers.failsafe = True
+
+
+def referer(pattern, accept=True, accept_missing=False, error=403,
+ message='Forbidden Referer header.', debug=False):
+ """Raise HTTPError if Referer header does/does not match the given pattern.
+
+ pattern
+ A regular expression pattern to test against the Referer.
+
+ accept
+ If True, the Referer must match the pattern; if False,
+ the Referer must NOT match the pattern.
+
+ accept_missing
+ If True, permit requests with no Referer header.
+
+ error
+ The HTTP error code to return to the client on failure.
+
+ message
+ A string to include in the response body on failure.
+
+ """
+ try:
+ ref = cherrypy.serving.request.headers['Referer']
+ match = bool(re.match(pattern, ref))
+ if debug:
+ cherrypy.log('Referer %r matches %r' % (ref, pattern),
+ 'TOOLS.REFERER')
+ if accept == match:
+ return
+ except KeyError:
+ if debug:
+ cherrypy.log('No Referer header', 'TOOLS.REFERER')
+ if accept_missing:
+ return
+
+ raise cherrypy.HTTPError(error, message)
+
+
+class SessionAuth(object):
+
+ """Assert that the user is logged in."""
+
+ session_key = 'username'
+ debug = False
+
+ def check_username_and_password(self, username, password):
+ pass
+
+ def anonymous(self):
+ """Provide a temporary user name for anonymous users."""
+ pass
+
+ def on_login(self, username):
+ pass
+
+ def on_logout(self, username):
+ pass
+
+ def on_check(self, username):
+ pass
+
+ def login_screen(self, from_page='..', username='', error_msg='',
+ **kwargs):
+ return (str("""<html><body>
+Message: %(error_msg)s
+<form method="post" action="do_login">
+ Login: <input type="text" name="username" value="%(username)s" size="10" />
+ <br />
+ Password: <input type="password" name="password" size="10" />
+ <br />
+ <input type="hidden" name="from_page" value="%(from_page)s" />
+ <br />
+ <input type="submit" />
+</form>
+</body></html>""") % vars()).encode('utf-8')
+
+ def do_login(self, username, password, from_page='..', **kwargs):
+ """Login. May raise redirect, or return True if request handled."""
+ response = cherrypy.serving.response
+ error_msg = self.check_username_and_password(username, password)
+ if error_msg:
+ body = self.login_screen(from_page, username, error_msg)
+ response.body = body
+ if 'Content-Length' in response.headers:
+ # Delete Content-Length header so finalize() recalcs it.
+ del response.headers['Content-Length']
+ return True
+ else:
+ cherrypy.serving.request.login = username
+ cherrypy.session[self.session_key] = username
+ self.on_login(username)
+ raise cherrypy.HTTPRedirect(from_page or '/')
+
+ def do_logout(self, from_page='..', **kwargs):
+ """Logout. May raise redirect, or return True if request handled."""
+ sess = cherrypy.session
+ username = sess.get(self.session_key)
+ sess[self.session_key] = None
+ if username:
+ cherrypy.serving.request.login = None
+ self.on_logout(username)
+ raise cherrypy.HTTPRedirect(from_page)
+
+ def do_check(self):
+ """Assert username. Raise redirect, or return True if request handled.
+ """
+ sess = cherrypy.session
+ request = cherrypy.serving.request
+ response = cherrypy.serving.response
+
+ username = sess.get(self.session_key)
+ if not username:
+ sess[self.session_key] = username = self.anonymous()
+ self._debug_message('No session[username], trying anonymous')
+ if not username:
+ url = cherrypy.url(qs=request.query_string)
+ self._debug_message(
+ 'No username, routing to login_screen with from_page %(url)r',
+ locals(),
+ )
+ response.body = self.login_screen(url)
+ if 'Content-Length' in response.headers:
+ # Delete Content-Length header so finalize() recalcs it.
+ del response.headers['Content-Length']
+ return True
+ self._debug_message('Setting request.login to %(username)r', locals())
+ request.login = username
+ self.on_check(username)
+
+ def _debug_message(self, template, context={}):
+ if not self.debug:
+ return
+ cherrypy.log(template % context, 'TOOLS.SESSAUTH')
+
+ def run(self):
+ request = cherrypy.serving.request
+ response = cherrypy.serving.response
+
+ path = request.path_info
+ if path.endswith('login_screen'):
+ self._debug_message('routing %(path)r to login_screen', locals())
+ response.body = self.login_screen()
+ return True
+ elif path.endswith('do_login'):
+ if request.method != 'POST':
+ response.headers['Allow'] = 'POST'
+ self._debug_message('do_login requires POST')
+ raise cherrypy.HTTPError(405)
+ self._debug_message('routing %(path)r to do_login', locals())
+ return self.do_login(**request.params)
+ elif path.endswith('do_logout'):
+ if request.method != 'POST':
+ response.headers['Allow'] = 'POST'
+ raise cherrypy.HTTPError(405)
+ self._debug_message('routing %(path)r to do_logout', locals())
+ return self.do_logout(**request.params)
+ else:
+ self._debug_message('No special path, running do_check')
+ return self.do_check()
+
+
+def session_auth(**kwargs):
+ """Session authentication hook.
+
+ Any attribute of the SessionAuth class may be overridden
+ via a keyword arg to this function:
+
+ """ + '\n '.join(
+ '{!s}: {!s}'.format(k, type(getattr(SessionAuth, k)).__name__)
+ for k in dir(SessionAuth)
+ if not k.startswith('__')
+ )
+ sa = SessionAuth()
+ for k, v in kwargs.items():
+ setattr(sa, k, v)
+ return sa.run()
+
+
+def log_traceback(severity=logging.ERROR, debug=False):
+ """Write the last error's traceback to the cherrypy error log."""
+ cherrypy.log('', 'HTTP', severity=severity, traceback=True)
+
+
+def log_request_headers(debug=False):
+ """Write request headers to the cherrypy error log."""
+ h = [' %s: %s' % (k, v) for k, v in cherrypy.serving.request.header_list]
+ cherrypy.log('\nRequest Headers:\n' + '\n'.join(h), 'HTTP')
+
+
+def log_hooks(debug=False):
+ """Write request.hooks to the cherrypy error log."""
+ request = cherrypy.serving.request
+
+ msg = []
+ # Sort by the standard points if possible.
+ from cherrypy import _cprequest
+ points = _cprequest.hookpoints
+ for k in request.hooks.keys():
+ if k not in points:
+ points.append(k)
+
+ for k in points:
+ msg.append(' %s:' % k)
+ v = request.hooks.get(k, [])
+ v.sort()
+ for h in v:
+ msg.append(' %r' % h)
+ cherrypy.log('\nRequest Hooks for ' + cherrypy.url() +
+ ':\n' + '\n'.join(msg), 'HTTP')
+
+
+def redirect(url='', internal=True, debug=False):
+ """Raise InternalRedirect or HTTPRedirect to the given url."""
+ if debug:
+ cherrypy.log('Redirecting %sto: %s' %
+ ({True: 'internal ', False: ''}[internal], url),
+ 'TOOLS.REDIRECT')
+ if internal:
+ raise cherrypy.InternalRedirect(url)
+ else:
+ raise cherrypy.HTTPRedirect(url)
+
+
+def trailing_slash(missing=True, extra=False, status=None, debug=False):
+ """Redirect if path_info has (missing|extra) trailing slash."""
+ request = cherrypy.serving.request
+ pi = request.path_info
+
+ if debug:
+ cherrypy.log('is_index: %r, missing: %r, extra: %r, path_info: %r' %
+ (request.is_index, missing, extra, pi),
+ 'TOOLS.TRAILING_SLASH')
+ if request.is_index is True:
+ if missing:
+ if not pi.endswith('/'):
+ new_url = cherrypy.url(pi + '/', request.query_string)
+ raise cherrypy.HTTPRedirect(new_url, status=status or 301)
+ elif request.is_index is False:
+ if extra:
+ # If pi == '/', don't redirect to ''!
+ if pi.endswith('/') and pi != '/':
+ new_url = cherrypy.url(pi[:-1], request.query_string)
+ raise cherrypy.HTTPRedirect(new_url, status=status or 301)
+
+
+def flatten(debug=False):
+ """Wrap response.body in a generator that recursively iterates over body.
+
+ This allows cherrypy.response.body to consist of 'nested generators';
+ that is, a set of generators that yield generators.
+ """
+ def flattener(input):
+ numchunks = 0
+ for x in input:
+ if not is_iterator(x):
+ numchunks += 1
+ yield x
+ else:
+ for y in flattener(x):
+ numchunks += 1
+ yield y
+ if debug:
+ cherrypy.log('Flattened %d chunks' % numchunks, 'TOOLS.FLATTEN')
+ response = cherrypy.serving.response
+ response.body = flattener(response.body)
+
+
+def accept(media=None, debug=False):
+ """Return the client's preferred media-type (from the given Content-Types).
+
+ If 'media' is None (the default), no test will be performed.
+
+ If 'media' is provided, it should be the Content-Type value (as a string)
+ or values (as a list or tuple of strings) which the current resource
+ can emit. The client's acceptable media ranges (as declared in the
+ Accept request header) will be matched in order to these Content-Type
+ values; the first such string is returned. That is, the return value
+ will always be one of the strings provided in the 'media' arg (or None
+ if 'media' is None).
+
+ If no match is found, then HTTPError 406 (Not Acceptable) is raised.
+ Note that most web browsers send */* as a (low-quality) acceptable
+ media range, which should match any Content-Type. In addition, "...if
+ no Accept header field is present, then it is assumed that the client
+ accepts all media types."
+
+ Matching types are checked in order of client preference first,
+ and then in the order of the given 'media' values.
+
+ Note that this function does not honor accept-params (other than "q").
+ """
+ if not media:
+ return
+ if isinstance(media, text_or_bytes):
+ media = [media]
+ request = cherrypy.serving.request
+
+ # Parse the Accept request header, and try to match one
+ # of the requested media-ranges (in order of preference).
+ ranges = request.headers.elements('Accept')
+ if not ranges:
+ # Any media type is acceptable.
+ if debug:
+ cherrypy.log('No Accept header elements', 'TOOLS.ACCEPT')
+ return media[0]
+ else:
+ # Note that 'ranges' is sorted in order of preference
+ for element in ranges:
+ if element.qvalue > 0:
+ if element.value == '*/*':
+ # Matches any type or subtype
+ if debug:
+ cherrypy.log('Match due to */*', 'TOOLS.ACCEPT')
+ return media[0]
+ elif element.value.endswith('/*'):
+ # Matches any subtype
+ mtype = element.value[:-1] # Keep the slash
+ for m in media:
+ if m.startswith(mtype):
+ if debug:
+ cherrypy.log('Match due to %s' % element.value,
+ 'TOOLS.ACCEPT')
+ return m
+ else:
+ # Matches exact value
+ if element.value in media:
+ if debug:
+ cherrypy.log('Match due to %s' % element.value,
+ 'TOOLS.ACCEPT')
+ return element.value
+
+ # No suitable media-range found.
+ ah = request.headers.get('Accept')
+ if ah is None:
+ msg = 'Your client did not send an Accept header.'
+ else:
+ msg = 'Your client sent this Accept header: %s.' % ah
+ msg += (' But this resource only emits these media types: %s.' %
+ ', '.join(media))
+ raise cherrypy.HTTPError(406, msg)
+
+
+class MonitoredHeaderMap(_httputil.HeaderMap):
+
+ def transform_key(self, key):
+ self.accessed_headers.add(key)
+ return super(MonitoredHeaderMap, self).transform_key(key)
+
+ def __init__(self):
+ self.accessed_headers = set()
+ super(MonitoredHeaderMap, self).__init__()
+
+
+def autovary(ignore=None, debug=False):
+ """Auto-populate the Vary response header based on request.header access.
+ """
+ request = cherrypy.serving.request
+
+ req_h = request.headers
+ request.headers = MonitoredHeaderMap()
+ request.headers.update(req_h)
+ if ignore is None:
+ ignore = set(['Content-Disposition', 'Content-Length', 'Content-Type'])
+
+ def set_response_header():
+ resp_h = cherrypy.serving.response.headers
+ v = set([e.value for e in resp_h.elements('Vary')])
+ if debug:
+ cherrypy.log(
+ 'Accessed headers: %s' % request.headers.accessed_headers,
+ 'TOOLS.AUTOVARY')
+ v = v.union(request.headers.accessed_headers)
+ v = v.difference(ignore)
+ v = list(v)
+ v.sort()
+ resp_h['Vary'] = ', '.join(v)
+ request.hooks.attach('before_finalize', set_response_header, 95)
+
+
+def convert_params(exception=ValueError, error=400):
+ """Convert request params based on function annotations, with error handling.
+
+ exception
+ Exception class to catch.
+
+ status
+ The HTTP error code to return to the client on failure.
+ """
+ request = cherrypy.serving.request
+ types = request.handler.callable.__annotations__
+ with cherrypy.HTTPError.handle(exception, error):
+ for key in set(types).intersection(request.params):
+ request.params[key] = types[key](request.params[key])
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/encoding.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/encoding.py
new file mode 100644
index 0000000000000000000000000000000000000000..c2c478a507f1360d4880427244166b8f67acd6a8
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/encoding.py
@@ -0,0 +1,454 @@
+import struct
+import time
+import io
+
+import cherrypy
+from cherrypy._cpcompat import text_or_bytes
+from cherrypy.lib import file_generator
+from cherrypy.lib import is_closable_iterator
+from cherrypy.lib import set_vary_header
+
+
+_COMPRESSION_LEVEL_FAST = 1
+_COMPRESSION_LEVEL_BEST = 9
+
+
+def decode(encoding=None, default_encoding='utf-8'):
+ """Replace or extend the list of charsets used to decode a request entity.
+
+ Either argument may be a single string or a list of strings.
+
+ encoding
+ If not None, restricts the set of charsets attempted while decoding
+ a request entity to the given set (even if a different charset is
+ given in the Content-Type request header).
+
+ default_encoding
+ Only in effect if the 'encoding' argument is not given.
+ If given, the set of charsets attempted while decoding a request
+ entity is *extended* with the given value(s).
+
+ """
+ body = cherrypy.request.body
+ if encoding is not None:
+ if not isinstance(encoding, list):
+ encoding = [encoding]
+ body.attempt_charsets = encoding
+ elif default_encoding:
+ if not isinstance(default_encoding, list):
+ default_encoding = [default_encoding]
+ body.attempt_charsets = body.attempt_charsets + default_encoding
+
+
+class UTF8StreamEncoder:
+ def __init__(self, iterator):
+ self._iterator = iterator
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ return self.__next__()
+
+ def __next__(self):
+ res = next(self._iterator)
+ if isinstance(res, str):
+ res = res.encode('utf-8')
+ return res
+
+ def close(self):
+ if is_closable_iterator(self._iterator):
+ self._iterator.close()
+
+ def __getattr__(self, attr):
+ if attr.startswith('__'):
+ raise AttributeError(self, attr)
+ return getattr(self._iterator, attr)
+
+
+class ResponseEncoder:
+
+ default_encoding = 'utf-8'
+ failmsg = 'Response body could not be encoded with %r.'
+ encoding = None
+ errors = 'strict'
+ text_only = True
+ add_charset = True
+ debug = False
+
+ def __init__(self, **kwargs):
+ for k, v in kwargs.items():
+ setattr(self, k, v)
+
+ self.attempted_charsets = set()
+ request = cherrypy.serving.request
+ if request.handler is not None:
+ # Replace request.handler with self
+ if self.debug:
+ cherrypy.log('Replacing request.handler', 'TOOLS.ENCODE')
+ self.oldhandler = request.handler
+ request.handler = self
+
+ def encode_stream(self, encoding):
+ """Encode a streaming response body.
+
+ Use a generator wrapper, and just pray it works as the stream is
+ being written out.
+ """
+ if encoding in self.attempted_charsets:
+ return False
+ self.attempted_charsets.add(encoding)
+
+ def encoder(body):
+ for chunk in body:
+ if isinstance(chunk, str):
+ chunk = chunk.encode(encoding, self.errors)
+ yield chunk
+ self.body = encoder(self.body)
+ return True
+
+ def encode_string(self, encoding):
+ """Encode a buffered response body."""
+ if encoding in self.attempted_charsets:
+ return False
+ self.attempted_charsets.add(encoding)
+ body = []
+ for chunk in self.body:
+ if isinstance(chunk, str):
+ try:
+ chunk = chunk.encode(encoding, self.errors)
+ except (LookupError, UnicodeError):
+ return False
+ body.append(chunk)
+ self.body = body
+ return True
+
+ def find_acceptable_charset(self):
+ request = cherrypy.serving.request
+ response = cherrypy.serving.response
+
+ if self.debug:
+ cherrypy.log('response.stream %r' %
+ response.stream, 'TOOLS.ENCODE')
+ if response.stream:
+ encoder = self.encode_stream
+ else:
+ encoder = self.encode_string
+ if 'Content-Length' in response.headers:
+ # Delete Content-Length header so finalize() recalcs it.
+ # Encoded strings may be of different lengths from their
+ # unicode equivalents, and even from each other. For example:
+ # >>> t = u"\u7007\u3040"
+ # >>> len(t)
+ # 2
+ # >>> len(t.encode("UTF-8"))
+ # 6
+ # >>> len(t.encode("utf7"))
+ # 8
+ del response.headers['Content-Length']
+
+ # Parse the Accept-Charset request header, and try to provide one
+ # of the requested charsets (in order of user preference).
+ encs = request.headers.elements('Accept-Charset')
+ charsets = [enc.value.lower() for enc in encs]
+ if self.debug:
+ cherrypy.log('charsets %s' % repr(charsets), 'TOOLS.ENCODE')
+
+ if self.encoding is not None:
+ # If specified, force this encoding to be used, or fail.
+ encoding = self.encoding.lower()
+ if self.debug:
+ cherrypy.log('Specified encoding %r' %
+ encoding, 'TOOLS.ENCODE')
+ if (not charsets) or '*' in charsets or encoding in charsets:
+ if self.debug:
+ cherrypy.log('Attempting encoding %r' %
+ encoding, 'TOOLS.ENCODE')
+ if encoder(encoding):
+ return encoding
+ else:
+ if not encs:
+ if self.debug:
+ cherrypy.log('Attempting default encoding %r' %
+ self.default_encoding, 'TOOLS.ENCODE')
+ # Any character-set is acceptable.
+ if encoder(self.default_encoding):
+ return self.default_encoding
+ else:
+ raise cherrypy.HTTPError(500, self.failmsg %
+ self.default_encoding)
+ else:
+ for element in encs:
+ if element.qvalue > 0:
+ if element.value == '*':
+ # Matches any charset. Try our default.
+ if self.debug:
+ cherrypy.log('Attempting default encoding due '
+ 'to %r' % element, 'TOOLS.ENCODE')
+ if encoder(self.default_encoding):
+ return self.default_encoding
+ else:
+ encoding = element.value
+ if self.debug:
+ cherrypy.log('Attempting encoding %s (qvalue >'
+ '0)' % element, 'TOOLS.ENCODE')
+ if encoder(encoding):
+ return encoding
+
+ if '*' not in charsets:
+ # If no "*" is present in an Accept-Charset field, then all
+ # character sets not explicitly mentioned get a quality
+ # value of 0, except for ISO-8859-1, which gets a quality
+ # value of 1 if not explicitly mentioned.
+ iso = 'iso-8859-1'
+ if iso not in charsets:
+ if self.debug:
+ cherrypy.log('Attempting ISO-8859-1 encoding',
+ 'TOOLS.ENCODE')
+ if encoder(iso):
+ return iso
+
+ # No suitable encoding found.
+ ac = request.headers.get('Accept-Charset')
+ if ac is None:
+ msg = 'Your client did not send an Accept-Charset header.'
+ else:
+ msg = 'Your client sent this Accept-Charset header: %s.' % ac
+ _charsets = ', '.join(sorted(self.attempted_charsets))
+ msg += ' We tried these charsets: %s.' % (_charsets,)
+ raise cherrypy.HTTPError(406, msg)
+
+ def __call__(self, *args, **kwargs):
+ response = cherrypy.serving.response
+ self.body = self.oldhandler(*args, **kwargs)
+
+ self.body = prepare_iter(self.body)
+
+ ct = response.headers.elements('Content-Type')
+ if self.debug:
+ cherrypy.log('Content-Type: %r' % [str(h)
+ for h in ct], 'TOOLS.ENCODE')
+ if ct and self.add_charset:
+ ct = ct[0]
+ if self.text_only:
+ if ct.value.lower().startswith('text/'):
+ if self.debug:
+ cherrypy.log(
+ 'Content-Type %s starts with "text/"' % ct,
+ 'TOOLS.ENCODE')
+ do_find = True
+ else:
+ if self.debug:
+ cherrypy.log('Not finding because Content-Type %s '
+ 'does not start with "text/"' % ct,
+ 'TOOLS.ENCODE')
+ do_find = False
+ else:
+ if self.debug:
+ cherrypy.log('Finding because not text_only',
+ 'TOOLS.ENCODE')
+ do_find = True
+
+ if do_find:
+ # Set "charset=..." param on response Content-Type header
+ ct.params['charset'] = self.find_acceptable_charset()
+ if self.debug:
+ cherrypy.log('Setting Content-Type %s' % ct,
+ 'TOOLS.ENCODE')
+ response.headers['Content-Type'] = str(ct)
+
+ return self.body
+
+
+def prepare_iter(value):
+ """
+ Ensure response body is iterable and resolves to False when empty.
+ """
+ if isinstance(value, text_or_bytes):
+ # strings get wrapped in a list because iterating over a single
+ # item list is much faster than iterating over every character
+ # in a long string.
+ if value:
+ value = [value]
+ else:
+ # [''] doesn't evaluate to False, so replace it with [].
+ value = []
+ # Don't use isinstance here; io.IOBase which has an ABC takes
+ # 1000 times as long as, say, isinstance(value, str)
+ elif hasattr(value, 'read'):
+ value = file_generator(value)
+ elif value is None:
+ value = []
+ return value
+
+
+# GZIP
+
+
+def compress(body, compress_level):
+ """Compress 'body' at the given compress_level."""
+ import zlib
+
+ # See https://tools.ietf.org/html/rfc1952
+ yield b'\x1f\x8b' # ID1 and ID2: gzip marker
+ yield b'\x08' # CM: compression method
+ yield b'\x00' # FLG: none set
+ # MTIME: 4 bytes
+ yield struct.pack('<L', int(time.time()) & int('FFFFFFFF', 16))
+
+ # RFC 1952, section 2.3.1:
+ #
+ # XFL (eXtra FLags)
+ # These flags are available for use by specific compression
+ # methods. The "deflate" method (CM = 8) sets these flags as
+ # follows:
+ #
+ # XFL = 2 - compressor used maximum compression,
+ # slowest algorithm
+ # XFL = 4 - compressor used fastest algorithm
+ if compress_level == _COMPRESSION_LEVEL_BEST:
+ yield b'\x02' # XFL: max compression, slowest algo
+ elif compress_level == _COMPRESSION_LEVEL_FAST:
+ yield b'\x04' # XFL: min compression, fastest algo
+ else:
+ yield b'\x00' # XFL: compression unset/tradeoff
+ yield b'\xff' # OS: unknown
+
+ crc = zlib.crc32(b'')
+ size = 0
+ zobj = zlib.compressobj(compress_level,
+ zlib.DEFLATED, -zlib.MAX_WBITS,
+ zlib.DEF_MEM_LEVEL, 0)
+ for line in body:
+ size += len(line)
+ crc = zlib.crc32(line, crc)
+ yield zobj.compress(line)
+ yield zobj.flush()
+
+ # CRC32: 4 bytes
+ yield struct.pack('<L', crc & int('FFFFFFFF', 16))
+ # ISIZE: 4 bytes
+ yield struct.pack('<L', size & int('FFFFFFFF', 16))
+
+
+def decompress(body):
+ import gzip
+
+ zbuf = io.BytesIO()
+ zbuf.write(body)
+ zbuf.seek(0)
+ zfile = gzip.GzipFile(mode='rb', fileobj=zbuf)
+ data = zfile.read()
+ zfile.close()
+ return data
+
+
+def gzip(compress_level=5, mime_types=['text/html', 'text/plain'],
+ debug=False):
+ """Try to gzip the response body if Content-Type in mime_types.
+
+ cherrypy.response.headers['Content-Type'] must be set to one of the
+ values in the mime_types arg before calling this function.
+
+ The provided list of mime-types must be of one of the following form:
+ * `type/subtype`
+ * `type/*`
+ * `type/*+subtype`
+
+ No compression is performed if any of the following hold:
+ * The client sends no Accept-Encoding request header
+ * No 'gzip' or 'x-gzip' is present in the Accept-Encoding header
+ * No 'gzip' or 'x-gzip' with a qvalue > 0 is present
+ * The 'identity' value is given with a qvalue > 0.
+
+ """
+ request = cherrypy.serving.request
+ response = cherrypy.serving.response
+
+ set_vary_header(response, 'Accept-Encoding')
+
+ if not response.body:
+ # Response body is empty (might be a 304 for instance)
+ if debug:
+ cherrypy.log('No response body', context='TOOLS.GZIP')
+ return
+
+ # If returning cached content (which should already have been gzipped),
+ # don't re-zip.
+ if getattr(request, 'cached', False):
+ if debug:
+ cherrypy.log('Not gzipping cached response', context='TOOLS.GZIP')
+ return
+
+ acceptable = request.headers.elements('Accept-Encoding')
+ if not acceptable:
+ # If no Accept-Encoding field is present in a request,
+ # the server MAY assume that the client will accept any
+ # content coding. In this case, if "identity" is one of
+ # the available content-codings, then the server SHOULD use
+ # the "identity" content-coding, unless it has additional
+ # information that a different content-coding is meaningful
+ # to the client.
+ if debug:
+ cherrypy.log('No Accept-Encoding', context='TOOLS.GZIP')
+ return
+
+ ct = response.headers.get('Content-Type', '').split(';')[0]
+ for coding in acceptable:
+ if coding.value == 'identity' and coding.qvalue != 0:
+ if debug:
+ cherrypy.log('Non-zero identity qvalue: %s' % coding,
+ context='TOOLS.GZIP')
+ return
+ if coding.value in ('gzip', 'x-gzip'):
+ if coding.qvalue == 0:
+ if debug:
+ cherrypy.log('Zero gzip qvalue: %s' % coding,
+ context='TOOLS.GZIP')
+ return
+
+ if ct not in mime_types:
+ # If the list of provided mime-types contains tokens
+ # such as 'text/*' or 'application/*+xml',
+ # we go through them and find the most appropriate one
+ # based on the given content-type.
+ # The pattern matching is only caring about the most
+ # common cases, as stated above, and doesn't support
+ # for extra parameters.
+ found = False
+ if '/' in ct:
+ ct_media_type, ct_sub_type = ct.split('/')
+ for mime_type in mime_types:
+ if '/' in mime_type:
+ media_type, sub_type = mime_type.split('/')
+ if ct_media_type == media_type:
+ if sub_type == '*':
+ found = True
+ break
+ elif '+' in sub_type and '+' in ct_sub_type:
+ ct_left, ct_right = ct_sub_type.split('+')
+ left, right = sub_type.split('+')
+ if left == '*' and ct_right == right:
+ found = True
+ break
+
+ if not found:
+ if debug:
+ cherrypy.log('Content-Type %s not in mime_types %r' %
+ (ct, mime_types), context='TOOLS.GZIP')
+ return
+
+ if debug:
+ cherrypy.log('Gzipping', context='TOOLS.GZIP')
+ # Return a generator that compresses the page
+ response.headers['Content-Encoding'] = 'gzip'
+ response.body = compress(response.body, compress_level)
+ if 'Content-Length' in response.headers:
+ # Delete Content-Length header so finalize() recalcs it.
+ del response.headers['Content-Length']
+
+ return
+
+ if debug:
+ cherrypy.log('No acceptable encoding found.', context='GZIP')
+ cherrypy.HTTPError(406, 'identity, gzip').set_response()
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/gctools.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/gctools.py
new file mode 100644
index 0000000000000000000000000000000000000000..26746d78b8203ccc0dbd6b069ab7e60be8903ace
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/gctools.py
@@ -0,0 +1,218 @@
+import gc
+import inspect
+import sys
+import time
+
+try:
+ import objgraph
+except ImportError:
+ objgraph = None
+
+import cherrypy
+from cherrypy import _cprequest, _cpwsgi
+from cherrypy.process.plugins import SimplePlugin
+
+
+class ReferrerTree(object):
+
+ """An object which gathers all referrers of an object to a given depth."""
+
+ peek_length = 40
+
+ def __init__(self, ignore=None, maxdepth=2, maxparents=10):
+ self.ignore = ignore or []
+ self.ignore.append(inspect.currentframe().f_back)
+ self.maxdepth = maxdepth
+ self.maxparents = maxparents
+
+ def ascend(self, obj, depth=1):
+ """Return a nested list containing referrers of the given object."""
+ depth += 1
+ parents = []
+
+ # Gather all referrers in one step to minimize
+ # cascading references due to repr() logic.
+ refs = gc.get_referrers(obj)
+ self.ignore.append(refs)
+ if len(refs) > self.maxparents:
+ return [('[%s referrers]' % len(refs), [])]
+
+ try:
+ ascendcode = self.ascend.__code__
+ except AttributeError:
+ ascendcode = self.ascend.im_func.func_code
+ for parent in refs:
+ if inspect.isframe(parent) and parent.f_code is ascendcode:
+ continue
+ if parent in self.ignore:
+ continue
+ if depth <= self.maxdepth:
+ parents.append((parent, self.ascend(parent, depth)))
+ else:
+ parents.append((parent, []))
+
+ return parents
+
+ def peek(self, s):
+ """Return s, restricted to a sane length."""
+ if len(s) > (self.peek_length + 3):
+ half = self.peek_length // 2
+ return s[:half] + '...' + s[-half:]
+ else:
+ return s
+
+ def _format(self, obj, descend=True):
+ """Return a string representation of a single object."""
+ if inspect.isframe(obj):
+ filename, lineno, func, context, index = inspect.getframeinfo(obj)
+ return "<frame of function '%s'>" % func
+
+ if not descend:
+ return self.peek(repr(obj))
+
+ if isinstance(obj, dict):
+ return '{' + ', '.join(['%s: %s' % (self._format(k, descend=False),
+ self._format(v, descend=False))
+ for k, v in obj.items()]) + '}'
+ elif isinstance(obj, list):
+ return '[' + ', '.join([self._format(item, descend=False)
+ for item in obj]) + ']'
+ elif isinstance(obj, tuple):
+ return '(' + ', '.join([self._format(item, descend=False)
+ for item in obj]) + ')'
+
+ r = self.peek(repr(obj))
+ if isinstance(obj, (str, int, float)):
+ return r
+ return '%s: %s' % (type(obj), r)
+
+ def format(self, tree):
+ """Return a list of string reprs from a nested list of referrers."""
+ output = []
+
+ def ascend(branch, depth=1):
+ for parent, grandparents in branch:
+ output.append((' ' * depth) + self._format(parent))
+ if grandparents:
+ ascend(grandparents, depth + 1)
+ ascend(tree)
+ return output
+
+
+def get_instances(cls):
+ return [x for x in gc.get_objects() if isinstance(x, cls)]
+
+
+class RequestCounter(SimplePlugin):
+
+ def start(self):
+ self.count = 0
+
+ def before_request(self):
+ self.count += 1
+
+ def after_request(self):
+ self.count -= 1
+
+
+request_counter = RequestCounter(cherrypy.engine)
+request_counter.subscribe()
+
+
+def get_context(obj):
+ if isinstance(obj, _cprequest.Request):
+ return 'path=%s;stage=%s' % (obj.path_info, obj.stage)
+ elif isinstance(obj, _cprequest.Response):
+ return 'status=%s' % obj.status
+ elif isinstance(obj, _cpwsgi.AppResponse):
+ return 'PATH_INFO=%s' % obj.environ.get('PATH_INFO', '')
+ elif hasattr(obj, 'tb_lineno'):
+ return 'tb_lineno=%s' % obj.tb_lineno
+ return ''
+
+
+class GCRoot(object):
+
+ """A CherryPy page handler for testing reference leaks."""
+
+ classes = [
+ (_cprequest.Request, 2, 2,
+ 'Should be 1 in this request thread and 1 in the main thread.'),
+ (_cprequest.Response, 2, 2,
+ 'Should be 1 in this request thread and 1 in the main thread.'),
+ (_cpwsgi.AppResponse, 1, 1,
+ 'Should be 1 in this request thread only.'),
+ ]
+
+ @cherrypy.expose
+ def index(self):
+ return 'Hello, world!'
+
+ @cherrypy.expose
+ def stats(self):
+ output = ['Statistics:']
+
+ for trial in range(10):
+ if request_counter.count > 0:
+ break
+ time.sleep(0.5)
+ else:
+ output.append('\nNot all requests closed properly.')
+
+ # gc_collect isn't perfectly synchronous, because it may
+ # break reference cycles that then take time to fully
+ # finalize. Call it thrice and hope for the best.
+ gc.collect()
+ gc.collect()
+ unreachable = gc.collect()
+ if unreachable:
+ if objgraph is not None:
+ final = objgraph.by_type('Nondestructible')
+ if final:
+ objgraph.show_backrefs(final, filename='finalizers.png')
+
+ trash = {}
+ for x in gc.garbage:
+ trash[type(x)] = trash.get(type(x), 0) + 1
+ if trash:
+ output.insert(0, '\n%s unreachable objects:' % unreachable)
+ trash = [(v, k) for k, v in trash.items()]
+ trash.sort()
+ for pair in trash:
+ output.append(' ' + repr(pair))
+
+ # Check declared classes to verify uncollected instances.
+ # These don't have to be part of a cycle; they can be
+ # any objects that have unanticipated referrers that keep
+ # them from being collected.
+ allobjs = {}
+ for cls, minobj, maxobj, msg in self.classes:
+ allobjs[cls] = get_instances(cls)
+
+ for cls, minobj, maxobj, msg in self.classes:
+ objs = allobjs[cls]
+ lenobj = len(objs)
+ if lenobj < minobj or lenobj > maxobj:
+ if minobj == maxobj:
+ output.append(
+ '\nExpected %s %r references, got %s.' %
+ (minobj, cls, lenobj))
+ else:
+ output.append(
+ '\nExpected %s to %s %r references, got %s.' %
+ (minobj, maxobj, cls, lenobj))
+
+ for obj in objs:
+ if objgraph is not None:
+ ig = [id(objs), id(inspect.currentframe())]
+ fname = 'graph_%s_%s.png' % (cls.__name__, id(obj))
+ objgraph.show_backrefs(
+ obj, extra_ignore=ig, max_depth=4, too_many=20,
+ filename=fname, extra_info=get_context)
+ output.append('\nReferrers for %s (refcount=%s):' %
+ (repr(obj), sys.getrefcount(obj)))
+ t = ReferrerTree(ignore=[objs], maxdepth=3)
+ tree = t.ascend(obj)
+ output.extend(t.format(tree))
+
+ return '\n'.join(output)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/httputil.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/httputil.py
new file mode 100644
index 0000000000000000000000000000000000000000..eedf8d89cb5605c1f6bb010d5f8f343c143abe84
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/httputil.py
@@ -0,0 +1,518 @@
+"""HTTP library functions.
+
+This module contains functions for building an HTTP application
+framework: any one, not just one whose name starts with "Ch". ;) If you
+reference any modules from some popular framework inside *this* module,
+FuManChu will personally hang you up by your thumbs and submit you
+to a public caning.
+"""
+
+import functools
+import email.utils
+import re
+import builtins
+from binascii import b2a_base64
+from cgi import parse_header
+from email.header import decode_header
+from http.server import BaseHTTPRequestHandler
+from urllib.parse import unquote_plus
+
+import jaraco.collections
+
+import cherrypy
+from cherrypy._cpcompat import ntob, ntou
+
+response_codes = BaseHTTPRequestHandler.responses.copy()
+
+# From https://github.com/cherrypy/cherrypy/issues/361
+response_codes[500] = ('Internal Server Error',
+ 'The server encountered an unexpected condition '
+ 'which prevented it from fulfilling the request.')
+response_codes[503] = ('Service Unavailable',
+ 'The server is currently unable to handle the '
+ 'request due to a temporary overloading or '
+ 'maintenance of the server.')
+
+
+HTTPDate = functools.partial(email.utils.formatdate, usegmt=True)
+
+
+def urljoin(*atoms):
+ r"""Return the given path \*atoms, joined into a single URL.
+
+ This will correctly join a SCRIPT_NAME and PATH_INFO into the
+ original URL, even if either atom is blank.
+ """
+ url = '/'.join([x for x in atoms if x])
+ while '//' in url:
+ url = url.replace('//', '/')
+ # Special-case the final url of "", and return "/" instead.
+ return url or '/'
+
+
+def urljoin_bytes(*atoms):
+ """Return the given path `*atoms`, joined into a single URL.
+
+ This will correctly join a SCRIPT_NAME and PATH_INFO into the
+ original URL, even if either atom is blank.
+ """
+ url = b'/'.join([x for x in atoms if x])
+ while b'//' in url:
+ url = url.replace(b'//', b'/')
+ # Special-case the final url of "", and return "/" instead.
+ return url or b'/'
+
+
+def protocol_from_http(protocol_str):
+ """Return a protocol tuple from the given 'HTTP/x.y' string."""
+ return int(protocol_str[5]), int(protocol_str[7])
+
+
+def get_ranges(headervalue, content_length):
+ """Return a list of (start, stop) indices from a Range header, or None.
+
+ Each (start, stop) tuple will be composed of two ints, which are suitable
+ for use in a slicing operation. That is, the header "Range: bytes=3-6",
+ if applied against a Python string, is requesting resource[3:7]. This
+ function will return the list [(3, 7)].
+
+ If this function returns an empty list, you should return HTTP 416.
+ """
+
+ if not headervalue:
+ return None
+
+ result = []
+ bytesunit, byteranges = headervalue.split('=', 1)
+ for brange in byteranges.split(','):
+ start, stop = [x.strip() for x in brange.split('-', 1)]
+ if start:
+ if not stop:
+ stop = content_length - 1
+ start, stop = int(start), int(stop)
+ if start >= content_length:
+ # From rfc 2616 sec 14.16:
+ # "If the server receives a request (other than one
+ # including an If-Range request-header field) with an
+ # unsatisfiable Range request-header field (that is,
+ # all of whose byte-range-spec values have a first-byte-pos
+ # value greater than the current length of the selected
+ # resource), it SHOULD return a response code of 416
+ # (Requested range not satisfiable)."
+ continue
+ if stop < start:
+ # From rfc 2616 sec 14.16:
+ # "If the server ignores a byte-range-spec because it
+ # is syntactically invalid, the server SHOULD treat
+ # the request as if the invalid Range header field
+ # did not exist. (Normally, this means return a 200
+ # response containing the full entity)."
+ return None
+ result.append((start, stop + 1))
+ else:
+ if not stop:
+ # See rfc quote above.
+ return None
+ # Negative subscript (last N bytes)
+ #
+ # RFC 2616 Section 14.35.1:
+ # If the entity is shorter than the specified suffix-length,
+ # the entire entity-body is used.
+ if int(stop) > content_length:
+ result.append((0, content_length))
+ else:
+ result.append((content_length - int(stop), content_length))
+
+ return result
+
+
+class HeaderElement(object):
+
+ """An element (with parameters) from an HTTP header's element list."""
+
+ def __init__(self, value, params=None):
+ self.value = value
+ if params is None:
+ params = {}
+ self.params = params
+
+ def __cmp__(self, other):
+ return builtins.cmp(self.value, other.value)
+
+ def __lt__(self, other):
+ return self.value < other.value
+
+ def __str__(self):
+ p = [';%s=%s' % (k, v) for k, v in self.params.items()]
+ return str('%s%s' % (self.value, ''.join(p)))
+
+ def __bytes__(self):
+ return ntob(self.__str__())
+
+ def __unicode__(self):
+ return ntou(self.__str__())
+
+ @staticmethod
+ def parse(elementstr):
+ """Transform 'token;key=val' to ('token', {'key': 'val'})."""
+ initial_value, params = parse_header(elementstr)
+ return initial_value, params
+
+ @classmethod
+ def from_str(cls, elementstr):
+ """Construct an instance from a string of the form 'token;key=val'."""
+ ival, params = cls.parse(elementstr)
+ return cls(ival, params)
+
+
+q_separator = re.compile(r'; *q *=')
+
+
+class AcceptElement(HeaderElement):
+
+ """An element (with parameters) from an Accept* header's element list.
+
+ AcceptElement objects are comparable; the more-preferred object will be
+ "less than" the less-preferred object. They are also therefore sortable;
+ if you sort a list of AcceptElement objects, they will be listed in
+ priority order; the most preferred value will be first. Yes, it should
+ have been the other way around, but it's too late to fix now.
+ """
+
+ @classmethod
+ def from_str(cls, elementstr):
+ qvalue = None
+ # The first "q" parameter (if any) separates the initial
+ # media-range parameter(s) (if any) from the accept-params.
+ atoms = q_separator.split(elementstr, 1)
+ media_range = atoms.pop(0).strip()
+ if atoms:
+ # The qvalue for an Accept header can have extensions. The other
+ # headers cannot, but it's easier to parse them as if they did.
+ qvalue = HeaderElement.from_str(atoms[0].strip())
+
+ media_type, params = cls.parse(media_range)
+ if qvalue is not None:
+ params['q'] = qvalue
+ return cls(media_type, params)
+
+ @property
+ def qvalue(self):
+ 'The qvalue, or priority, of this value.'
+ val = self.params.get('q', '1')
+ if isinstance(val, HeaderElement):
+ val = val.value
+ try:
+ return float(val)
+ except ValueError as val_err:
+ """Fail client requests with invalid quality value.
+
+ Ref: https://github.com/cherrypy/cherrypy/issues/1370
+ """
+ raise cherrypy.HTTPError(
+ 400,
+ 'Malformed HTTP header: `{}`'.
+ format(str(self)),
+ ) from val_err
+
+ def __cmp__(self, other):
+ diff = builtins.cmp(self.qvalue, other.qvalue)
+ if diff == 0:
+ diff = builtins.cmp(str(self), str(other))
+ return diff
+
+ def __lt__(self, other):
+ if self.qvalue == other.qvalue:
+ return str(self) < str(other)
+ else:
+ return self.qvalue < other.qvalue
+
+
+RE_HEADER_SPLIT = re.compile(',(?=(?:[^"]*"[^"]*")*[^"]*$)')
+
+
+def header_elements(fieldname, fieldvalue):
+ """Return a sorted HeaderElement list from a comma-separated header string.
+ """
+ if not fieldvalue:
+ return []
+
+ result = []
+ for element in RE_HEADER_SPLIT.split(fieldvalue):
+ if fieldname.startswith('Accept') or fieldname == 'TE':
+ hv = AcceptElement.from_str(element)
+ else:
+ hv = HeaderElement.from_str(element)
+ result.append(hv)
+
+ return list(reversed(sorted(result)))
+
+
+def decode_TEXT(value):
+ r"""
+ Decode :rfc:`2047` TEXT
+
+ >>> decode_TEXT("=?utf-8?q?f=C3=BCr?=") == b'f\xfcr'.decode('latin-1')
+ True
+ """
+ atoms = decode_header(value)
+ decodedvalue = ''
+ for atom, charset in atoms:
+ if charset is not None:
+ atom = atom.decode(charset)
+ decodedvalue += atom
+ return decodedvalue
+
+
+def decode_TEXT_maybe(value):
+ """
+ Decode the text but only if '=?' appears in it.
+ """
+ return decode_TEXT(value) if '=?' in value else value
+
+
+def valid_status(status):
+ """Return legal HTTP status Code, Reason-phrase and Message.
+
+ The status arg must be an int, a str that begins with an int
+ or the constant from ``http.client`` stdlib module.
+
+ If status has no reason-phrase is supplied, a default reason-
+ phrase will be provided.
+
+ >>> import http.client
+ >>> from http.server import BaseHTTPRequestHandler
+ >>> valid_status(http.client.ACCEPTED) == (
+ ... int(http.client.ACCEPTED),
+ ... ) + BaseHTTPRequestHandler.responses[http.client.ACCEPTED]
+ True
+ """
+
+ if not status:
+ status = 200
+
+ code, reason = status, None
+ if isinstance(status, str):
+ code, _, reason = status.partition(' ')
+ reason = reason.strip() or None
+
+ try:
+ code = int(code)
+ except (TypeError, ValueError):
+ raise ValueError('Illegal response status from server '
+ '(%s is non-numeric).' % repr(code))
+
+ if code < 100 or code > 599:
+ raise ValueError('Illegal response status from server '
+ '(%s is out of range).' % repr(code))
+
+ if code not in response_codes:
+ # code is unknown but not illegal
+ default_reason, message = '', ''
+ else:
+ default_reason, message = response_codes[code]
+
+ if reason is None:
+ reason = default_reason
+
+ return code, reason, message
+
+
+# NOTE: the parse_qs functions that follow are modified version of those
+# in the python3.0 source - we need to pass through an encoding to the unquote
+# method, but the default parse_qs function doesn't allow us to. These do.
+
+def _parse_qs(qs, keep_blank_values=0, strict_parsing=0, encoding='utf-8'):
+ """Parse a query given as a string argument.
+
+ Arguments:
+
+ qs: URL-encoded query string to be parsed
+
+ keep_blank_values: flag indicating whether blank values in
+ URL encoded queries should be treated as blank strings. A
+ true value indicates that blanks should be retained as blank
+ strings. The default false value indicates that blank values
+ are to be ignored and treated as if they were not included.
+
+ strict_parsing: flag indicating what to do with parsing errors. If
+ false (the default), errors are silently ignored. If true,
+ errors raise a ValueError exception.
+
+ Returns a dict, as G-d intended.
+ """
+ pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
+ d = {}
+ for name_value in pairs:
+ if not name_value and not strict_parsing:
+ continue
+ nv = name_value.split('=', 1)
+ if len(nv) != 2:
+ if strict_parsing:
+ raise ValueError('bad query field: %r' % (name_value,))
+ # Handle case of a control-name with no equal sign
+ if keep_blank_values:
+ nv.append('')
+ else:
+ continue
+ if len(nv[1]) or keep_blank_values:
+ name = unquote_plus(nv[0], encoding, errors='strict')
+ value = unquote_plus(nv[1], encoding, errors='strict')
+ if name in d:
+ if not isinstance(d[name], list):
+ d[name] = [d[name]]
+ d[name].append(value)
+ else:
+ d[name] = value
+ return d
+
+
+image_map_pattern = re.compile(r'[0-9]+,[0-9]+')
+
+
+def parse_query_string(query_string, keep_blank_values=True, encoding='utf-8'):
+ """Build a params dictionary from a query_string.
+
+ Duplicate key/value pairs in the provided query_string will be
+ returned as {'key': [val1, val2, ...]}. Single key/values will
+ be returned as strings: {'key': 'value'}.
+ """
+ if image_map_pattern.match(query_string):
+ # Server-side image map. Map the coords to 'x' and 'y'
+ # (like CGI::Request does).
+ pm = query_string.split(',')
+ pm = {'x': int(pm[0]), 'y': int(pm[1])}
+ else:
+ pm = _parse_qs(query_string, keep_blank_values, encoding=encoding)
+ return pm
+
+
+class CaseInsensitiveDict(jaraco.collections.KeyTransformingDict):
+
+ """A case-insensitive dict subclass.
+
+ Each key is changed on entry to title case.
+ """
+
+ @staticmethod
+ def transform_key(key):
+ if key is None:
+ # TODO(#1830): why?
+ return 'None'
+ return key.title()
+
+
+# TEXT = <any OCTET except CTLs, but including LWS>
+#
+# A CRLF is allowed in the definition of TEXT only as part of a header
+# field continuation. It is expected that the folding LWS will be
+# replaced with a single SP before interpretation of the TEXT value."
+if str == bytes:
+ header_translate_table = ''.join([chr(i) for i in range(256)])
+ header_translate_deletechars = ''.join(
+ [chr(i) for i in range(32)]) + chr(127)
+else:
+ header_translate_table = None
+ header_translate_deletechars = bytes(range(32)) + bytes([127])
+
+
+class HeaderMap(CaseInsensitiveDict):
+
+ """A dict subclass for HTTP request and response headers.
+
+ Each key is changed on entry to str(key).title(). This allows headers
+ to be case-insensitive and avoid duplicates.
+
+ Values are header values (decoded according to :rfc:`2047` if necessary).
+ """
+
+ protocol = (1, 1)
+ encodings = ['ISO-8859-1']
+
+ # Someday, when http-bis is done, this will probably get dropped
+ # since few servers, clients, or intermediaries do it. But until then,
+ # we're going to obey the spec as is.
+ # "Words of *TEXT MAY contain characters from character sets other than
+ # ISO-8859-1 only when encoded according to the rules of RFC 2047."
+ use_rfc_2047 = True
+
+ def elements(self, key):
+ """Return a sorted list of HeaderElements for the given header."""
+ return header_elements(self.transform_key(key), self.get(key))
+
+ def values(self, key):
+ """Return a sorted list of HeaderElement.value for the given header."""
+ return [e.value for e in self.elements(key)]
+
+ def output(self):
+ """Transform self into a list of (name, value) tuples."""
+ return list(self.encode_header_items(self.items()))
+
+ @classmethod
+ def encode_header_items(cls, header_items):
+ """
+ Prepare the sequence of name, value tuples into a form suitable for
+ transmitting on the wire for HTTP.
+ """
+ for k, v in header_items:
+ if not isinstance(v, str) and not isinstance(v, bytes):
+ v = str(v)
+
+ yield tuple(map(cls.encode_header_item, (k, v)))
+
+ @classmethod
+ def encode_header_item(cls, item):
+ if isinstance(item, str):
+ item = cls.encode(item)
+
+ # See header_translate_* constants above.
+ # Replace only if you really know what you're doing.
+ return item.translate(
+ header_translate_table, header_translate_deletechars)
+
+ @classmethod
+ def encode(cls, v):
+ """Return the given header name or value, encoded for HTTP output."""
+ for enc in cls.encodings:
+ try:
+ return v.encode(enc)
+ except UnicodeEncodeError:
+ continue
+
+ if cls.protocol == (1, 1) and cls.use_rfc_2047:
+ # Encode RFC-2047 TEXT
+ # (e.g. u"\u8200" -> "=?utf-8?b?6IiA?=").
+ # We do our own here instead of using the email module
+ # because we never want to fold lines--folding has
+ # been deprecated by the HTTP working group.
+ v = b2a_base64(v.encode('utf-8'))
+ return (b'=?utf-8?b?' + v.strip(b'\n') + b'?=')
+
+ raise ValueError('Could not encode header part %r using '
+ 'any of the encodings %r.' %
+ (v, cls.encodings))
+
+
+class Host(object):
+
+ """An internet address.
+
+ name
+ Should be the client's host name. If not available (because no DNS
+ lookup is performed), the IP address should be used instead.
+
+ """
+
+ ip = '0.0.0.0'
+ port = 80
+ name = 'unknown.tld'
+
+ def __init__(self, ip, port, name=None):
+ self.ip = ip
+ self.port = port
+ if name is None:
+ name = ip
+ self.name = name
+
+ def __repr__(self):
+ return 'httputil.Host(%r, %r, %r)' % (self.ip, self.port, self.name)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/jsontools.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/jsontools.py
new file mode 100644
index 0000000000000000000000000000000000000000..9ca75a8f3b10538ef23e1383579da92461bf5c9c
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/jsontools.py
@@ -0,0 +1,89 @@
+import cherrypy
+from cherrypy import _json as json
+from cherrypy._cpcompat import text_or_bytes, ntou
+
+
+def json_processor(entity):
+ """Read application/json data into request.json."""
+ if not entity.headers.get(ntou('Content-Length'), ntou('')):
+ raise cherrypy.HTTPError(411)
+
+ body = entity.fp.read()
+ with cherrypy.HTTPError.handle(ValueError, 400, 'Invalid JSON document'):
+ cherrypy.serving.request.json = json.decode(body.decode('utf-8'))
+
+
+def json_in(content_type=[ntou('application/json'), ntou('text/javascript')],
+ force=True, debug=False, processor=json_processor):
+ """Add a processor to parse JSON request entities:
+ The default processor places the parsed data into request.json.
+
+ Incoming request entities which match the given content_type(s) will
+ be deserialized from JSON to the Python equivalent, and the result
+ stored at cherrypy.request.json. The 'content_type' argument may
+ be a Content-Type string or a list of allowable Content-Type strings.
+
+ If the 'force' argument is True (the default), then entities of other
+ content types will not be allowed; "415 Unsupported Media Type" is
+ raised instead.
+
+ Supply your own processor to use a custom decoder, or to handle the parsed
+ data differently. The processor can be configured via
+ tools.json_in.processor or via the decorator method.
+
+ Note that the deserializer requires the client send a Content-Length
+ request header, or it will raise "411 Length Required". If for any
+ other reason the request entity cannot be deserialized from JSON,
+ it will raise "400 Bad Request: Invalid JSON document".
+ """
+ request = cherrypy.serving.request
+ if isinstance(content_type, text_or_bytes):
+ content_type = [content_type]
+
+ if force:
+ if debug:
+ cherrypy.log('Removing body processors %s' %
+ repr(request.body.processors.keys()), 'TOOLS.JSON_IN')
+ request.body.processors.clear()
+ request.body.default_proc = cherrypy.HTTPError(
+ 415, 'Expected an entity of content type %s' %
+ ', '.join(content_type))
+
+ for ct in content_type:
+ if debug:
+ cherrypy.log('Adding body processor for %s' % ct, 'TOOLS.JSON_IN')
+ request.body.processors[ct] = processor
+
+
+def json_handler(*args, **kwargs):
+ value = cherrypy.serving.request._json_inner_handler(*args, **kwargs)
+ return json.encode(value)
+
+
+def json_out(content_type='application/json', debug=False,
+ handler=json_handler):
+ """Wrap request.handler to serialize its output to JSON. Sets Content-Type.
+
+ If the given content_type is None, the Content-Type response header
+ is not set.
+
+ Provide your own handler to use a custom encoder. For example
+ cherrypy.config['tools.json_out.handler'] = <function>, or
+ @json_out(handler=function).
+ """
+ request = cherrypy.serving.request
+ # request.handler may be set to None by e.g. the caching tool
+ # to signal to all components that a response body has already
+ # been attached, in which case we don't need to wrap anything.
+ if request.handler is None:
+ return
+ if debug:
+ cherrypy.log('Replacing %s with JSON handler' % request.handler,
+ 'TOOLS.JSON_OUT')
+ request._json_inner_handler = request.handler
+ request.handler = handler
+ if content_type is not None:
+ if debug:
+ cherrypy.log('Setting Content-Type to %s' %
+ content_type, 'TOOLS.JSON_OUT')
+ cherrypy.serving.response.headers['Content-Type'] = content_type
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/locking.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/locking.py
new file mode 100644
index 0000000000000000000000000000000000000000..317fb58c90befb438f015fabe5ea3e3b1af21182
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/locking.py
@@ -0,0 +1,47 @@
+import datetime
+
+
+class NeverExpires(object):
+ def expired(self):
+ return False
+
+
+class Timer(object):
+ """
+ A simple timer that will indicate when an expiration time has passed.
+ """
+ def __init__(self, expiration):
+ 'Create a timer that expires at `expiration` (UTC datetime)'
+ self.expiration = expiration
+
+ @classmethod
+ def after(cls, elapsed):
+ """
+ Return a timer that will expire after `elapsed` passes.
+ """
+ return cls(datetime.datetime.utcnow() + elapsed)
+
+ def expired(self):
+ return datetime.datetime.utcnow() >= self.expiration
+
+
+class LockTimeout(Exception):
+ 'An exception when a lock could not be acquired before a timeout period'
+
+
+class LockChecker(object):
+ """
+ Keep track of the time and detect if a timeout has expired
+ """
+ def __init__(self, session_id, timeout):
+ self.session_id = session_id
+ if timeout:
+ self.timer = Timer.after(timeout)
+ else:
+ self.timer = NeverExpires()
+
+ def expired(self):
+ if self.timer.expired():
+ raise LockTimeout(
+ 'Timeout acquiring lock for %(session_id)s' % vars(self))
+ return False
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/profiler.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/profiler.py
new file mode 100644
index 0000000000000000000000000000000000000000..fccf2eb85da8780f85e7ea7fd632df7b09fa751e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/profiler.py
@@ -0,0 +1,221 @@
+"""Profiler tools for CherryPy.
+
+CherryPy users
+==============
+
+You can profile any of your pages as follows::
+
+ from cherrypy.lib import profiler
+
+ class Root:
+ p = profiler.Profiler("/path/to/profile/dir")
+
+ @cherrypy.expose
+ def index(self):
+ self.p.run(self._index)
+
+ def _index(self):
+ return "Hello, world!"
+
+ cherrypy.tree.mount(Root())
+
+You can also turn on profiling for all requests
+using the ``make_app`` function as WSGI middleware.
+
+CherryPy developers
+===================
+
+This module can be used whenever you make changes to CherryPy,
+to get a quick sanity-check on overall CP performance. Use the
+``--profile`` flag when running the test suite. Then, use the ``serve()``
+function to browse the results in a web browser. If you run this
+module from the command line, it will call ``serve()`` for you.
+
+"""
+
+import io
+import os
+import os.path
+import sys
+import warnings
+
+import cherrypy
+
+
+try:
+ import profile
+ import pstats
+
+ def new_func_strip_path(func_name):
+ """Make profiler output more readable by adding `__init__` modules' parents
+ """
+ filename, line, name = func_name
+ if filename.endswith('__init__.py'):
+ return (
+ os.path.basename(filename[:-12]) + filename[-12:],
+ line,
+ name,
+ )
+ return os.path.basename(filename), line, name
+
+ pstats.func_strip_path = new_func_strip_path
+except ImportError:
+ profile = None
+ pstats = None
+
+
+_count = 0
+
+
+class Profiler(object):
+
+ def __init__(self, path=None):
+ if not path:
+ path = os.path.join(os.path.dirname(__file__), 'profile')
+ self.path = path
+ if not os.path.exists(path):
+ os.makedirs(path)
+
+ def run(self, func, *args, **params):
+ """Dump profile data into self.path."""
+ global _count
+ c = _count = _count + 1
+ path = os.path.join(self.path, 'cp_%04d.prof' % c)
+ prof = profile.Profile()
+ result = prof.runcall(func, *args, **params)
+ prof.dump_stats(path)
+ return result
+
+ def statfiles(self):
+ """:rtype: list of available profiles.
+ """
+ return [f for f in os.listdir(self.path)
+ if f.startswith('cp_') and f.endswith('.prof')]
+
+ def stats(self, filename, sortby='cumulative'):
+ """:rtype stats(index): output of print_stats() for the given profile.
+ """
+ sio = io.StringIO()
+ if sys.version_info >= (2, 5):
+ s = pstats.Stats(os.path.join(self.path, filename), stream=sio)
+ s.strip_dirs()
+ s.sort_stats(sortby)
+ s.print_stats()
+ else:
+ # pstats.Stats before Python 2.5 didn't take a 'stream' arg,
+ # but just printed to stdout. So re-route stdout.
+ s = pstats.Stats(os.path.join(self.path, filename))
+ s.strip_dirs()
+ s.sort_stats(sortby)
+ oldout = sys.stdout
+ try:
+ sys.stdout = sio
+ s.print_stats()
+ finally:
+ sys.stdout = oldout
+ response = sio.getvalue()
+ sio.close()
+ return response
+
+ @cherrypy.expose
+ def index(self):
+ return """<html>
+ <head><title>CherryPy profile data</title></head>
+ <frameset cols='200, 1*'>
+ <frame src='menu' />
+ <frame name='main' src='' />
+ </frameset>
+ </html>
+ """
+
+ @cherrypy.expose
+ def menu(self):
+ yield '<h2>Profiling runs</h2>'
+ yield '<p>Click on one of the runs below to see profiling data.</p>'
+ runs = self.statfiles()
+ runs.sort()
+ for i in runs:
+ yield "<a href='report?filename=%s' target='main'>%s</a><br />" % (
+ i, i)
+
+ @cherrypy.expose
+ def report(self, filename):
+ cherrypy.response.headers['Content-Type'] = 'text/plain'
+ return self.stats(filename)
+
+
+class ProfileAggregator(Profiler):
+
+ def __init__(self, path=None):
+ Profiler.__init__(self, path)
+ global _count
+ self.count = _count = _count + 1
+ self.profiler = profile.Profile()
+
+ def run(self, func, *args, **params):
+ path = os.path.join(self.path, 'cp_%04d.prof' % self.count)
+ result = self.profiler.runcall(func, *args, **params)
+ self.profiler.dump_stats(path)
+ return result
+
+
+class make_app:
+
+ def __init__(self, nextapp, path=None, aggregate=False):
+ """Make a WSGI middleware app which wraps 'nextapp' with profiling.
+
+ nextapp
+ the WSGI application to wrap, usually an instance of
+ cherrypy.Application.
+
+ path
+ where to dump the profiling output.
+
+ aggregate
+ if True, profile data for all HTTP requests will go in
+ a single file. If False (the default), each HTTP request will
+ dump its profile data into a separate file.
+
+ """
+ if profile is None or pstats is None:
+ msg = ('Your installation of Python does not have a profile '
+ "module. If you're on Debian, try "
+ '`sudo apt-get install python-profiler`. '
+ 'See http://www.cherrypy.org/wiki/ProfilingOnDebian '
+ 'for details.')
+ warnings.warn(msg)
+
+ self.nextapp = nextapp
+ self.aggregate = aggregate
+ if aggregate:
+ self.profiler = ProfileAggregator(path)
+ else:
+ self.profiler = Profiler(path)
+
+ def __call__(self, environ, start_response):
+ def gather():
+ result = []
+ for line in self.nextapp(environ, start_response):
+ result.append(line)
+ return result
+ return self.profiler.run(gather)
+
+
+def serve(path=None, port=8080):
+ if profile is None or pstats is None:
+ msg = ('Your installation of Python does not have a profile module. '
+ "If you're on Debian, try "
+ '`sudo apt-get install python-profiler`. '
+ 'See http://www.cherrypy.org/wiki/ProfilingOnDebian '
+ 'for details.')
+ warnings.warn(msg)
+
+ cherrypy.config.update({'server.socket_port': int(port),
+ 'server.thread_pool': 10,
+ 'environment': 'production',
+ })
+ cherrypy.quickstart(Profiler(path))
+
+
+if __name__ == '__main__':
+ serve(*tuple(sys.argv[1:]))
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/reprconf.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/reprconf.py
new file mode 100644
index 0000000000000000000000000000000000000000..3976652e114827ed7cc06dc583b4a3055cbb7a25
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/reprconf.py
@@ -0,0 +1,397 @@
+"""Generic configuration system using unrepr.
+
+Configuration data may be supplied as a Python dictionary, as a filename,
+or as an open file object. When you supply a filename or file, Python's
+builtin ConfigParser is used (with some extensions).
+
+Namespaces
+----------
+
+Configuration keys are separated into namespaces by the first "." in the key.
+
+The only key that cannot exist in a namespace is the "environment" entry.
+This special entry 'imports' other config entries from a template stored in
+the Config.environments dict.
+
+You can define your own namespaces to be called when new config is merged
+by adding a named handler to Config.namespaces. The name can be any string,
+and the handler must be either a callable or a context manager.
+"""
+
+import builtins
+import configparser
+import operator
+import sys
+
+from cherrypy._cpcompat import text_or_bytes
+
+
+class NamespaceSet(dict):
+
+ """A dict of config namespace names and handlers.
+
+ Each config entry should begin with a namespace name; the corresponding
+ namespace handler will be called once for each config entry in that
+ namespace, and will be passed two arguments: the config key (with the
+ namespace removed) and the config value.
+
+ Namespace handlers may be any Python callable; they may also be
+ context managers, in which case their __enter__
+ method should return a callable to be used as the handler.
+ See cherrypy.tools (the Toolbox class) for an example.
+ """
+
+ def __call__(self, config):
+ """Iterate through config and pass it to each namespace handler.
+
+ config
+ A flat dict, where keys use dots to separate
+ namespaces, and values are arbitrary.
+
+ The first name in each config key is used to look up the corresponding
+ namespace handler. For example, a config entry of {'tools.gzip.on': v}
+ will call the 'tools' namespace handler with the args: ('gzip.on', v)
+ """
+ # Separate the given config into namespaces
+ ns_confs = {}
+ for k in config:
+ if '.' in k:
+ ns, name = k.split('.', 1)
+ bucket = ns_confs.setdefault(ns, {})
+ bucket[name] = config[k]
+
+ # I chose __enter__ and __exit__ so someday this could be
+ # rewritten using 'with' statement:
+ # for ns, handler in self.items():
+ # with handler as callable:
+ # for k, v in ns_confs.get(ns, {}).items():
+ # callable(k, v)
+ for ns, handler in self.items():
+ exit = getattr(handler, '__exit__', None)
+ if exit:
+ callable = handler.__enter__()
+ no_exc = True
+ try:
+ try:
+ for k, v in ns_confs.get(ns, {}).items():
+ callable(k, v)
+ except Exception:
+ # The exceptional case is handled here
+ no_exc = False
+ if exit is None:
+ raise
+ if not exit(*sys.exc_info()):
+ raise
+ # The exception is swallowed if exit() returns true
+ finally:
+ # The normal and non-local-goto cases are handled here
+ if no_exc and exit:
+ exit(None, None, None)
+ else:
+ for k, v in ns_confs.get(ns, {}).items():
+ handler(k, v)
+
+ def __repr__(self):
+ return '%s.%s(%s)' % (self.__module__, self.__class__.__name__,
+ dict.__repr__(self))
+
+ def __copy__(self):
+ newobj = self.__class__()
+ newobj.update(self)
+ return newobj
+ copy = __copy__
+
+
+class Config(dict):
+
+ """A dict-like set of configuration data, with defaults and namespaces.
+
+ May take a file, filename, or dict.
+ """
+
+ defaults = {}
+ environments = {}
+ namespaces = NamespaceSet()
+
+ def __init__(self, file=None, **kwargs):
+ self.reset()
+ if file is not None:
+ self.update(file)
+ if kwargs:
+ self.update(kwargs)
+
+ def reset(self):
+ """Reset self to default values."""
+ self.clear()
+ dict.update(self, self.defaults)
+
+ def update(self, config):
+ """Update self from a dict, file, or filename."""
+ self._apply(Parser.load(config))
+
+ def _apply(self, config):
+ """Update self from a dict."""
+ which_env = config.get('environment')
+ if which_env:
+ env = self.environments[which_env]
+ for k in env:
+ if k not in config:
+ config[k] = env[k]
+
+ dict.update(self, config)
+ self.namespaces(config)
+
+ def __setitem__(self, k, v):
+ dict.__setitem__(self, k, v)
+ self.namespaces({k: v})
+
+
+class Parser(configparser.ConfigParser):
+
+ """Sub-class of ConfigParser that keeps the case of options and that
+ raises an exception if the file cannot be read.
+ """
+
+ def optionxform(self, optionstr):
+ return optionstr
+
+ def read(self, filenames):
+ if isinstance(filenames, text_or_bytes):
+ filenames = [filenames]
+ for filename in filenames:
+ # try:
+ # fp = open(filename)
+ # except IOError:
+ # continue
+ fp = open(filename)
+ try:
+ self._read(fp, filename)
+ finally:
+ fp.close()
+
+ def as_dict(self, raw=False, vars=None):
+ """Convert an INI file to a dictionary"""
+ # Load INI file into a dict
+ result = {}
+ for section in self.sections():
+ if section not in result:
+ result[section] = {}
+ for option in self.options(section):
+ value = self.get(section, option, raw=raw, vars=vars)
+ try:
+ value = unrepr(value)
+ except Exception:
+ x = sys.exc_info()[1]
+ msg = ('Config error in section: %r, option: %r, '
+ 'value: %r. Config values must be valid Python.' %
+ (section, option, value))
+ raise ValueError(msg, x.__class__.__name__, x.args)
+ result[section][option] = value
+ return result
+
+ def dict_from_file(self, file):
+ if hasattr(file, 'read'):
+ self.readfp(file)
+ else:
+ self.read(file)
+ return self.as_dict()
+
+ @classmethod
+ def load(self, input):
+ """Resolve 'input' to dict from a dict, file, or filename."""
+ is_file = (
+ # Filename
+ isinstance(input, text_or_bytes)
+ # Open file object
+ or hasattr(input, 'read')
+ )
+ return Parser().dict_from_file(input) if is_file else input.copy()
+
+
+# public domain "unrepr" implementation, found on the web and then improved.
+
+
+class _Builder:
+
+ def build(self, o):
+ m = getattr(self, 'build_' + o.__class__.__name__, None)
+ if m is None:
+ raise TypeError('unrepr does not recognize %s' %
+ repr(o.__class__.__name__))
+ return m(o)
+
+ def astnode(self, s):
+ """Return a Python3 ast Node compiled from a string."""
+ try:
+ import ast
+ except ImportError:
+ # Fallback to eval when ast package is not available,
+ # e.g. IronPython 1.0.
+ return eval(s)
+
+ p = ast.parse('__tempvalue__ = ' + s)
+ return p.body[0].value
+
+ def build_Subscript(self, o):
+ return self.build(o.value)[self.build(o.slice)]
+
+ def build_Index(self, o):
+ return self.build(o.value)
+
+ def _build_call35(self, o):
+ """
+ Workaround for python 3.5 _ast.Call signature, docs found here
+ https://greentreesnakes.readthedocs.org/en/latest/nodes.html
+ """
+ import ast
+ callee = self.build(o.func)
+ args = []
+ if o.args is not None:
+ for a in o.args:
+ if isinstance(a, ast.Starred):
+ args.append(self.build(a.value))
+ else:
+ args.append(self.build(a))
+ kwargs = {}
+ for kw in o.keywords:
+ if kw.arg is None: # double asterix `**`
+ rst = self.build(kw.value)
+ if not isinstance(rst, dict):
+ raise TypeError('Invalid argument for call.'
+ 'Must be a mapping object.')
+ # give preference to the keys set directly from arg=value
+ for k, v in rst.items():
+ if k not in kwargs:
+ kwargs[k] = v
+ else: # defined on the call as: arg=value
+ kwargs[kw.arg] = self.build(kw.value)
+ return callee(*args, **kwargs)
+
+ def build_Call(self, o):
+ if sys.version_info >= (3, 5):
+ return self._build_call35(o)
+
+ callee = self.build(o.func)
+
+ if o.args is None:
+ args = ()
+ else:
+ args = tuple([self.build(a) for a in o.args])
+
+ if o.starargs is None:
+ starargs = ()
+ else:
+ starargs = tuple(self.build(o.starargs))
+
+ if o.kwargs is None:
+ kwargs = {}
+ else:
+ kwargs = self.build(o.kwargs)
+ if o.keywords is not None: # direct a=b keywords
+ for kw in o.keywords:
+ # preference because is a direct keyword against **kwargs
+ kwargs[kw.arg] = self.build(kw.value)
+ return callee(*(args + starargs), **kwargs)
+
+ def build_List(self, o):
+ return list(map(self.build, o.elts))
+
+ def build_Str(self, o):
+ return o.s
+
+ def build_Num(self, o):
+ return o.n
+
+ def build_Dict(self, o):
+ return dict([(self.build(k), self.build(v))
+ for k, v in zip(o.keys, o.values)])
+
+ def build_Tuple(self, o):
+ return tuple(self.build_List(o))
+
+ def build_Name(self, o):
+ name = o.id
+ if name == 'None':
+ return None
+ if name == 'True':
+ return True
+ if name == 'False':
+ return False
+
+ # See if the Name is a package or module. If it is, import it.
+ try:
+ return modules(name)
+ except ImportError:
+ pass
+
+ # See if the Name is in builtins.
+ try:
+ return getattr(builtins, name)
+ except AttributeError:
+ pass
+
+ raise TypeError('unrepr could not resolve the name %s' % repr(name))
+
+ def build_NameConstant(self, o):
+ return o.value
+
+ build_Constant = build_NameConstant # Python 3.8 change
+
+ def build_UnaryOp(self, o):
+ op, operand = map(self.build, [o.op, o.operand])
+ return op(operand)
+
+ def build_BinOp(self, o):
+ left, op, right = map(self.build, [o.left, o.op, o.right])
+ return op(left, right)
+
+ def build_Add(self, o):
+ return operator.add
+
+ def build_Mult(self, o):
+ return operator.mul
+
+ def build_USub(self, o):
+ return operator.neg
+
+ def build_Attribute(self, o):
+ parent = self.build(o.value)
+ return getattr(parent, o.attr)
+
+ def build_NoneType(self, o):
+ return None
+
+
+def unrepr(s):
+ """Return a Python object compiled from a string."""
+ if not s:
+ return s
+ b = _Builder()
+ obj = b.astnode(s)
+ return b.build(obj)
+
+
+def modules(modulePath):
+ """Load a module and retrieve a reference to that module."""
+ __import__(modulePath)
+ return sys.modules[modulePath]
+
+
+def attributes(full_attribute_name):
+ """Load a module and retrieve an attribute of that module."""
+
+ # Parse out the path, module, and attribute
+ last_dot = full_attribute_name.rfind('.')
+ attr_name = full_attribute_name[last_dot + 1:]
+ mod_path = full_attribute_name[:last_dot]
+
+ mod = modules(mod_path)
+ # Let an AttributeError propagate outward.
+ try:
+ attr = getattr(mod, attr_name)
+ except AttributeError:
+ raise AttributeError("'%s' object has no attribute '%s'"
+ % (mod_path, attr_name))
+
+ # Return a reference to the attribute.
+ return attr
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/sessions.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/sessions.py
new file mode 100644
index 0000000000000000000000000000000000000000..5b3328f2d451651e932703c842203bc557a99001
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/sessions.py
@@ -0,0 +1,910 @@
+"""Session implementation for CherryPy.
+
+You need to edit your config file to use sessions. Here's an example::
+
+ [/]
+ tools.sessions.on = True
+ tools.sessions.storage_class = cherrypy.lib.sessions.FileSession
+ tools.sessions.storage_path = "/home/site/sessions"
+ tools.sessions.timeout = 60
+
+This sets the session to be stored in files in the directory
+/home/site/sessions, and the session timeout to 60 minutes. If you omit
+``storage_class``, the sessions will be saved in RAM.
+``tools.sessions.on`` is the only required line for working sessions,
+the rest are optional.
+
+By default, the session ID is passed in a cookie, so the client's browser must
+have cookies enabled for your site.
+
+To set data for the current session, use
+``cherrypy.session['fieldname'] = 'fieldvalue'``;
+to get data use ``cherrypy.session.get('fieldname')``.
+
+================
+Locking sessions
+================
+
+By default, the ``'locking'`` mode of sessions is ``'implicit'``, which means
+the session is locked early and unlocked late. Be mindful of this default mode
+for any requests that take a long time to process (streaming responses,
+expensive calculations, database lookups, API calls, etc), as other concurrent
+requests that also utilize sessions will hang until the session is unlocked.
+
+If you want to control when the session data is locked and unlocked,
+set ``tools.sessions.locking = 'explicit'``. Then call
+``cherrypy.session.acquire_lock()`` and ``cherrypy.session.release_lock()``.
+Regardless of which mode you use, the session is guaranteed to be unlocked when
+the request is complete.
+
+=================
+Expiring Sessions
+=================
+
+You can force a session to expire with :func:`cherrypy.lib.sessions.expire`.
+Simply call that function at the point you want the session to expire, and it
+will cause the session cookie to expire client-side.
+
+===========================
+Session Fixation Protection
+===========================
+
+If CherryPy receives, via a request cookie, a session id that it does not
+recognize, it will reject that id and create a new one to return in the
+response cookie. This `helps prevent session fixation attacks
+<http://en.wikipedia.org/wiki/Session_fixation#Regenerate_SID_on_each_request>`_.
+However, CherryPy "recognizes" a session id by looking up the saved session
+data for that id. Therefore, if you never save any session data,
+**you will get a new session id for every request**.
+
+A side effect of CherryPy overwriting unrecognised session ids is that if you
+have multiple, separate CherryPy applications running on a single domain (e.g.
+on different ports), each app will overwrite the other's session id because by
+default they use the same cookie name (``"session_id"``) but do not recognise
+each others sessions. It is therefore a good idea to use a different name for
+each, for example::
+
+ [/]
+ ...
+ tools.sessions.name = "my_app_session_id"
+
+================
+Sharing Sessions
+================
+
+If you run multiple instances of CherryPy (for example via mod_python behind
+Apache prefork), you most likely cannot use the RAM session backend, since each
+instance of CherryPy will have its own memory space. Use a different backend
+instead, and verify that all instances are pointing at the same file or db
+location. Alternately, you might try a load balancer which makes sessions
+"sticky". Google is your friend, there.
+
+================
+Expiration Dates
+================
+
+The response cookie will possess an expiration date to inform the client at
+which point to stop sending the cookie back in requests. If the server time
+and client time differ, expect sessions to be unreliable. **Make sure the
+system time of your server is accurate**.
+
+CherryPy defaults to a 60-minute session timeout, which also applies to the
+cookie which is sent to the client. Unfortunately, some versions of Safari
+("4 public beta" on Windows XP at least) appear to have a bug in their parsing
+of the GMT expiration date--they appear to interpret the date as one hour in
+the past. Sixty minutes minus one hour is pretty close to zero, so you may
+experience this bug as a new session id for every request, unless the requests
+are less than one second apart. To fix, try increasing the session.timeout.
+
+On the other extreme, some users report Firefox sending cookies after their
+expiration date, although this was on a system with an inaccurate system time.
+Maybe FF doesn't trust system time.
+"""
+import sys
+import datetime
+import os
+import time
+import threading
+import binascii
+import pickle
+
+import zc.lockfile
+
+import cherrypy
+from cherrypy.lib import httputil
+from cherrypy.lib import locking
+from cherrypy.lib import is_iterator
+
+
+missing = object()
+
+
+class Session(object):
+
+ """A CherryPy dict-like Session object (one per request)."""
+
+ _id = None
+
+ id_observers = None
+ "A list of callbacks to which to pass new id's."
+
+ @property
+ def id(self):
+ """Return the current session id."""
+ return self._id
+
+ @id.setter
+ def id(self, value):
+ self._id = value
+ for o in self.id_observers:
+ o(value)
+
+ timeout = 60
+ 'Number of minutes after which to delete session data.'
+
+ locked = False
+ """
+ If True, this session instance has exclusive read/write access
+ to session data."""
+
+ loaded = False
+ """
+ If True, data has been retrieved from storage. This should happen
+ automatically on the first attempt to access session data."""
+
+ clean_thread = None
+ 'Class-level Monitor which calls self.clean_up.'
+
+ clean_freq = 5
+ 'The poll rate for expired session cleanup in minutes.'
+
+ originalid = None
+ 'The session id passed by the client. May be missing or unsafe.'
+
+ missing = False
+ 'True if the session requested by the client did not exist.'
+
+ regenerated = False
+ """
+ True if the application called session.regenerate(). This is not set by
+ internal calls to regenerate the session id."""
+
+ debug = False
+ 'If True, log debug information.'
+
+ # --------------------- Session management methods --------------------- #
+
+ def __init__(self, id=None, **kwargs):
+ self.id_observers = []
+ self._data = {}
+
+ for k, v in kwargs.items():
+ setattr(self, k, v)
+
+ self.originalid = id
+ self.missing = False
+ if id is None:
+ if self.debug:
+ cherrypy.log('No id given; making a new one', 'TOOLS.SESSIONS')
+ self._regenerate()
+ else:
+ self.id = id
+ if self._exists():
+ if self.debug:
+ cherrypy.log('Set id to %s.' % id, 'TOOLS.SESSIONS')
+ else:
+ if self.debug:
+ cherrypy.log('Expired or malicious session %r; '
+ 'making a new one' % id, 'TOOLS.SESSIONS')
+ # Expired or malicious session. Make a new one.
+ # See https://github.com/cherrypy/cherrypy/issues/709.
+ self.id = None
+ self.missing = True
+ self._regenerate()
+
+ def now(self):
+ """Generate the session specific concept of 'now'.
+
+ Other session providers can override this to use alternative,
+ possibly timezone aware, versions of 'now'.
+ """
+ return datetime.datetime.now()
+
+ def regenerate(self):
+ """Replace the current session (with a new id)."""
+ self.regenerated = True
+ self._regenerate()
+
+ def _regenerate(self):
+ if self.id is not None:
+ if self.debug:
+ cherrypy.log(
+ 'Deleting the existing session %r before '
+ 'regeneration.' % self.id,
+ 'TOOLS.SESSIONS')
+ self.delete()
+
+ old_session_was_locked = self.locked
+ if old_session_was_locked:
+ self.release_lock()
+ if self.debug:
+ cherrypy.log('Old lock released.', 'TOOLS.SESSIONS')
+
+ self.id = None
+ while self.id is None:
+ self.id = self.generate_id()
+ # Assert that the generated id is not already stored.
+ if self._exists():
+ self.id = None
+ if self.debug:
+ cherrypy.log('Set id to generated %s.' % self.id,
+ 'TOOLS.SESSIONS')
+
+ if old_session_was_locked:
+ self.acquire_lock()
+ if self.debug:
+ cherrypy.log('Regenerated lock acquired.', 'TOOLS.SESSIONS')
+
+ def clean_up(self):
+ """Clean up expired sessions."""
+ pass
+
+ def generate_id(self):
+ """Return a new session id."""
+ return binascii.hexlify(os.urandom(20)).decode('ascii')
+
+ def save(self):
+ """Save session data."""
+ try:
+ # If session data has never been loaded then it's never been
+ # accessed: no need to save it
+ if self.loaded:
+ t = datetime.timedelta(seconds=self.timeout * 60)
+ expiration_time = self.now() + t
+ if self.debug:
+ cherrypy.log('Saving session %r with expiry %s' %
+ (self.id, expiration_time),
+ 'TOOLS.SESSIONS')
+ self._save(expiration_time)
+ else:
+ if self.debug:
+ cherrypy.log(
+ 'Skipping save of session %r (no session loaded).' %
+ self.id, 'TOOLS.SESSIONS')
+ finally:
+ if self.locked:
+ # Always release the lock if the user didn't release it
+ self.release_lock()
+ if self.debug:
+ cherrypy.log('Lock released after save.', 'TOOLS.SESSIONS')
+
+ def load(self):
+ """Copy stored session data into this session instance."""
+ data = self._load()
+ # data is either None or a tuple (session_data, expiration_time)
+ if data is None or data[1] < self.now():
+ if self.debug:
+ cherrypy.log('Expired session %r, flushing data.' % self.id,
+ 'TOOLS.SESSIONS')
+ self._data = {}
+ else:
+ if self.debug:
+ cherrypy.log('Data loaded for session %r.' % self.id,
+ 'TOOLS.SESSIONS')
+ self._data = data[0]
+ self.loaded = True
+
+ # Stick the clean_thread in the class, not the instance.
+ # The instances are created and destroyed per-request.
+ cls = self.__class__
+ if self.clean_freq and not cls.clean_thread:
+ # clean_up is an instancemethod and not a classmethod,
+ # so that tool config can be accessed inside the method.
+ t = cherrypy.process.plugins.Monitor(
+ cherrypy.engine, self.clean_up, self.clean_freq * 60,
+ name='Session cleanup')
+ t.subscribe()
+ cls.clean_thread = t
+ t.start()
+ if self.debug:
+ cherrypy.log('Started cleanup thread.', 'TOOLS.SESSIONS')
+
+ def delete(self):
+ """Delete stored session data."""
+ self._delete()
+ if self.debug:
+ cherrypy.log('Deleted session %s.' % self.id,
+ 'TOOLS.SESSIONS')
+
+ # -------------------- Application accessor methods -------------------- #
+
+ def __getitem__(self, key):
+ if not self.loaded:
+ self.load()
+ return self._data[key]
+
+ def __setitem__(self, key, value):
+ if not self.loaded:
+ self.load()
+ self._data[key] = value
+
+ def __delitem__(self, key):
+ if not self.loaded:
+ self.load()
+ del self._data[key]
+
+ def pop(self, key, default=missing):
+ """Remove the specified key and return the corresponding value.
+ If key is not found, default is returned if given,
+ otherwise KeyError is raised.
+ """
+ if not self.loaded:
+ self.load()
+ if default is missing:
+ return self._data.pop(key)
+ else:
+ return self._data.pop(key, default)
+
+ def __contains__(self, key):
+ if not self.loaded:
+ self.load()
+ return key in self._data
+
+ def get(self, key, default=None):
+ """D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None."""
+ if not self.loaded:
+ self.load()
+ return self._data.get(key, default)
+
+ def update(self, d):
+ """D.update(E) -> None. Update D from E: for k in E: D[k] = E[k]."""
+ if not self.loaded:
+ self.load()
+ self._data.update(d)
+
+ def setdefault(self, key, default=None):
+ """D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D."""
+ if not self.loaded:
+ self.load()
+ return self._data.setdefault(key, default)
+
+ def clear(self):
+ """D.clear() -> None. Remove all items from D."""
+ if not self.loaded:
+ self.load()
+ self._data.clear()
+
+ def keys(self):
+ """D.keys() -> list of D's keys."""
+ if not self.loaded:
+ self.load()
+ return self._data.keys()
+
+ def items(self):
+ """D.items() -> list of D's (key, value) pairs, as 2-tuples."""
+ if not self.loaded:
+ self.load()
+ return self._data.items()
+
+ def values(self):
+ """D.values() -> list of D's values."""
+ if not self.loaded:
+ self.load()
+ return self._data.values()
+
+
+class RamSession(Session):
+
+ # Class-level objects. Don't rebind these!
+ cache = {}
+ locks = {}
+
+ def clean_up(self):
+ """Clean up expired sessions."""
+
+ now = self.now()
+ for _id, (data, expiration_time) in self.cache.copy().items():
+ if expiration_time <= now:
+ try:
+ del self.cache[_id]
+ except KeyError:
+ pass
+ try:
+ if self.locks[_id].acquire(blocking=False):
+ lock = self.locks.pop(_id)
+ lock.release()
+ except KeyError:
+ pass
+
+ # added to remove obsolete lock objects
+ for _id in list(self.locks):
+ locked = (
+ _id not in self.cache
+ and self.locks[_id].acquire(blocking=False)
+ )
+ if locked:
+ lock = self.locks.pop(_id)
+ lock.release()
+
+ def _exists(self):
+ return self.id in self.cache
+
+ def _load(self):
+ return self.cache.get(self.id)
+
+ def _save(self, expiration_time):
+ self.cache[self.id] = (self._data, expiration_time)
+
+ def _delete(self):
+ self.cache.pop(self.id, None)
+
+ def acquire_lock(self):
+ """Acquire an exclusive lock on the currently-loaded session data."""
+ self.locked = True
+ self.locks.setdefault(self.id, threading.RLock()).acquire()
+
+ def release_lock(self):
+ """Release the lock on the currently-loaded session data."""
+ self.locks[self.id].release()
+ self.locked = False
+
+ def __len__(self):
+ """Return the number of active sessions."""
+ return len(self.cache)
+
+
+class FileSession(Session):
+
+ """Implementation of the File backend for sessions
+
+ storage_path
+ The folder where session data will be saved. Each session
+ will be saved as pickle.dump(data, expiration_time) in its own file;
+ the filename will be self.SESSION_PREFIX + self.id.
+
+ lock_timeout
+ A timedelta or numeric seconds indicating how long
+ to block acquiring a lock. If None (default), acquiring a lock
+ will block indefinitely.
+ """
+
+ SESSION_PREFIX = 'session-'
+ LOCK_SUFFIX = '.lock'
+ pickle_protocol = pickle.HIGHEST_PROTOCOL
+
+ def __init__(self, id=None, **kwargs):
+ # The 'storage_path' arg is required for file-based sessions.
+ kwargs['storage_path'] = os.path.abspath(kwargs['storage_path'])
+ kwargs.setdefault('lock_timeout', None)
+
+ Session.__init__(self, id=id, **kwargs)
+
+ # validate self.lock_timeout
+ if isinstance(self.lock_timeout, (int, float)):
+ self.lock_timeout = datetime.timedelta(seconds=self.lock_timeout)
+ if not isinstance(self.lock_timeout, (datetime.timedelta, type(None))):
+ raise ValueError(
+ 'Lock timeout must be numeric seconds or a timedelta instance.'
+ )
+
+ @classmethod
+ def setup(cls, **kwargs):
+ """Set up the storage system for file-based sessions.
+
+ This should only be called once per process; this will be done
+ automatically when using sessions.init (as the built-in Tool does).
+ """
+ # The 'storage_path' arg is required for file-based sessions.
+ kwargs['storage_path'] = os.path.abspath(kwargs['storage_path'])
+
+ for k, v in kwargs.items():
+ setattr(cls, k, v)
+
+ def _get_file_path(self):
+ f = os.path.join(self.storage_path, self.SESSION_PREFIX + self.id)
+ if not os.path.abspath(f).startswith(self.storage_path):
+ raise cherrypy.HTTPError(400, 'Invalid session id in cookie.')
+ return f
+
+ def _exists(self):
+ path = self._get_file_path()
+ return os.path.exists(path)
+
+ def _load(self, path=None):
+ assert self.locked, ('The session load without being locked. '
+ "Check your tools' priority levels.")
+ if path is None:
+ path = self._get_file_path()
+ try:
+ f = open(path, 'rb')
+ try:
+ return pickle.load(f)
+ finally:
+ f.close()
+ except (IOError, EOFError):
+ e = sys.exc_info()[1]
+ if self.debug:
+ cherrypy.log('Error loading the session pickle: %s' %
+ e, 'TOOLS.SESSIONS')
+ return None
+
+ def _save(self, expiration_time):
+ assert self.locked, ('The session was saved without being locked. '
+ "Check your tools' priority levels.")
+ f = open(self._get_file_path(), 'wb')
+ try:
+ pickle.dump((self._data, expiration_time), f, self.pickle_protocol)
+ finally:
+ f.close()
+
+ def _delete(self):
+ assert self.locked, ('The session deletion without being locked. '
+ "Check your tools' priority levels.")
+ try:
+ os.unlink(self._get_file_path())
+ except OSError:
+ pass
+
+ def acquire_lock(self, path=None):
+ """Acquire an exclusive lock on the currently-loaded session data."""
+ if path is None:
+ path = self._get_file_path()
+ path += self.LOCK_SUFFIX
+ checker = locking.LockChecker(self.id, self.lock_timeout)
+ while not checker.expired():
+ try:
+ self.lock = zc.lockfile.LockFile(path)
+ except zc.lockfile.LockError:
+ time.sleep(0.1)
+ else:
+ break
+ self.locked = True
+ if self.debug:
+ cherrypy.log('Lock acquired.', 'TOOLS.SESSIONS')
+
+ def release_lock(self, path=None):
+ """Release the lock on the currently-loaded session data."""
+ self.lock.close()
+ self.locked = False
+
+ def clean_up(self):
+ """Clean up expired sessions."""
+ now = self.now()
+ # Iterate over all session files in self.storage_path
+ for fname in os.listdir(self.storage_path):
+ have_session = (
+ fname.startswith(self.SESSION_PREFIX)
+ and not fname.endswith(self.LOCK_SUFFIX)
+ )
+ if have_session:
+ # We have a session file: lock and load it and check
+ # if it's expired. If it fails, nevermind.
+ path = os.path.join(self.storage_path, fname)
+ self.acquire_lock(path)
+ if self.debug:
+ # This is a bit of a hack, since we're calling clean_up
+ # on the first instance rather than the entire class,
+ # so depending on whether you have "debug" set on the
+ # path of the first session called, this may not run.
+ cherrypy.log('Cleanup lock acquired.', 'TOOLS.SESSIONS')
+
+ try:
+ contents = self._load(path)
+ # _load returns None on IOError
+ if contents is not None:
+ data, expiration_time = contents
+ if expiration_time < now:
+ # Session expired: deleting it
+ os.unlink(path)
+ finally:
+ self.release_lock(path)
+
+ def __len__(self):
+ """Return the number of active sessions."""
+ return len([fname for fname in os.listdir(self.storage_path)
+ if (fname.startswith(self.SESSION_PREFIX) and
+ not fname.endswith(self.LOCK_SUFFIX))])
+
+
+class MemcachedSession(Session):
+
+ # The most popular memcached client for Python isn't thread-safe.
+ # Wrap all .get and .set operations in a single lock.
+ mc_lock = threading.RLock()
+
+ # This is a separate set of locks per session id.
+ locks = {}
+
+ servers = ['localhost:11211']
+
+ @classmethod
+ def setup(cls, **kwargs):
+ """Set up the storage system for memcached-based sessions.
+
+ This should only be called once per process; this will be done
+ automatically when using sessions.init (as the built-in Tool does).
+ """
+ for k, v in kwargs.items():
+ setattr(cls, k, v)
+
+ import memcache
+ cls.cache = memcache.Client(cls.servers)
+
+ def _exists(self):
+ self.mc_lock.acquire()
+ try:
+ return bool(self.cache.get(self.id))
+ finally:
+ self.mc_lock.release()
+
+ def _load(self):
+ self.mc_lock.acquire()
+ try:
+ return self.cache.get(self.id)
+ finally:
+ self.mc_lock.release()
+
+ def _save(self, expiration_time):
+ # Send the expiration time as "Unix time" (seconds since 1/1/1970)
+ td = int(time.mktime(expiration_time.timetuple()))
+ self.mc_lock.acquire()
+ try:
+ if not self.cache.set(self.id, (self._data, expiration_time), td):
+ raise AssertionError(
+ 'Session data for id %r not set.' % self.id)
+ finally:
+ self.mc_lock.release()
+
+ def _delete(self):
+ self.cache.delete(self.id)
+
+ def acquire_lock(self):
+ """Acquire an exclusive lock on the currently-loaded session data."""
+ self.locked = True
+ self.locks.setdefault(self.id, threading.RLock()).acquire()
+ if self.debug:
+ cherrypy.log('Lock acquired.', 'TOOLS.SESSIONS')
+
+ def release_lock(self):
+ """Release the lock on the currently-loaded session data."""
+ self.locks[self.id].release()
+ self.locked = False
+
+ def __len__(self):
+ """Return the number of active sessions."""
+ raise NotImplementedError
+
+
+# Hook functions (for CherryPy tools)
+
+def save():
+ """Save any changed session data."""
+
+ if not hasattr(cherrypy.serving, 'session'):
+ return
+ request = cherrypy.serving.request
+ response = cherrypy.serving.response
+
+ # Guard against running twice
+ if hasattr(request, '_sessionsaved'):
+ return
+ request._sessionsaved = True
+
+ if response.stream:
+ # If the body is being streamed, we have to save the data
+ # *after* the response has been written out
+ request.hooks.attach('on_end_request', cherrypy.session.save)
+ else:
+ # If the body is not being streamed, we save the data now
+ # (so we can release the lock).
+ if is_iterator(response.body):
+ response.collapse_body()
+ cherrypy.session.save()
+
+
+save.failsafe = True
+
+
+def close():
+ """Close the session object for this request."""
+ sess = getattr(cherrypy.serving, 'session', None)
+ if getattr(sess, 'locked', False):
+ # If the session is still locked we release the lock
+ sess.release_lock()
+ if sess.debug:
+ cherrypy.log('Lock released on close.', 'TOOLS.SESSIONS')
+
+
+close.failsafe = True
+close.priority = 90
+
+
+def init(storage_type=None, path=None, path_header=None, name='session_id',
+ timeout=60, domain=None, secure=False, clean_freq=5,
+ persistent=True, httponly=False, debug=False,
+ # Py27 compat
+ # *, storage_class=RamSession,
+ **kwargs):
+ """Initialize session object (using cookies).
+
+ storage_class
+ The Session subclass to use. Defaults to RamSession.
+
+ storage_type
+ (deprecated)
+ One of 'ram', 'file', memcached'. This will be
+ used to look up the corresponding class in cherrypy.lib.sessions
+ globals. For example, 'file' will use the FileSession class.
+
+ path
+ The 'path' value to stick in the response cookie metadata.
+
+ path_header
+ If 'path' is None (the default), then the response
+ cookie 'path' will be pulled from request.headers[path_header].
+
+ name
+ The name of the cookie.
+
+ timeout
+ The expiration timeout (in minutes) for the stored session data.
+ If 'persistent' is True (the default), this is also the timeout
+ for the cookie.
+
+ domain
+ The cookie domain.
+
+ secure
+ If False (the default) the cookie 'secure' value will not
+ be set. If True, the cookie 'secure' value will be set (to 1).
+
+ clean_freq (minutes)
+ The poll rate for expired session cleanup.
+
+ persistent
+ If True (the default), the 'timeout' argument will be used
+ to expire the cookie. If False, the cookie will not have an expiry,
+ and the cookie will be a "session cookie" which expires when the
+ browser is closed.
+
+ httponly
+ If False (the default) the cookie 'httponly' value will not be set.
+ If True, the cookie 'httponly' value will be set (to 1).
+
+ Any additional kwargs will be bound to the new Session instance,
+ and may be specific to the storage type. See the subclass of Session
+ you're using for more information.
+ """
+
+ # Py27 compat
+ storage_class = kwargs.pop('storage_class', RamSession)
+
+ request = cherrypy.serving.request
+
+ # Guard against running twice
+ if hasattr(request, '_session_init_flag'):
+ return
+ request._session_init_flag = True
+
+ # Check if request came with a session ID
+ id = None
+ if name in request.cookie:
+ id = request.cookie[name].value
+ if debug:
+ cherrypy.log('ID obtained from request.cookie: %r' % id,
+ 'TOOLS.SESSIONS')
+
+ first_time = not hasattr(cherrypy, 'session')
+
+ if storage_type:
+ if first_time:
+ msg = 'storage_type is deprecated. Supply storage_class instead'
+ cherrypy.log(msg)
+ storage_class = storage_type.title() + 'Session'
+ storage_class = globals()[storage_class]
+
+ # call setup first time only
+ if first_time:
+ if hasattr(storage_class, 'setup'):
+ storage_class.setup(**kwargs)
+
+ # Create and attach a new Session instance to cherrypy.serving.
+ # It will possess a reference to (and lock, and lazily load)
+ # the requested session data.
+ kwargs['timeout'] = timeout
+ kwargs['clean_freq'] = clean_freq
+ cherrypy.serving.session = sess = storage_class(id, **kwargs)
+ sess.debug = debug
+
+ def update_cookie(id):
+ """Update the cookie every time the session id changes."""
+ cherrypy.serving.response.cookie[name] = id
+ sess.id_observers.append(update_cookie)
+
+ # Create cherrypy.session which will proxy to cherrypy.serving.session
+ if not hasattr(cherrypy, 'session'):
+ cherrypy.session = cherrypy._ThreadLocalProxy('session')
+
+ if persistent:
+ cookie_timeout = timeout
+ else:
+ # See http://support.microsoft.com/kb/223799/EN-US/
+ # and http://support.mozilla.com/en-US/kb/Cookies
+ cookie_timeout = None
+ set_response_cookie(path=path, path_header=path_header, name=name,
+ timeout=cookie_timeout, domain=domain, secure=secure,
+ httponly=httponly)
+
+
+def set_response_cookie(path=None, path_header=None, name='session_id',
+ timeout=60, domain=None, secure=False, httponly=False):
+ """Set a response cookie for the client.
+
+ path
+ the 'path' value to stick in the response cookie metadata.
+
+ path_header
+ if 'path' is None (the default), then the response
+ cookie 'path' will be pulled from request.headers[path_header].
+
+ name
+ the name of the cookie.
+
+ timeout
+ the expiration timeout for the cookie. If 0 or other boolean
+ False, no 'expires' param will be set, and the cookie will be a
+ "session cookie" which expires when the browser is closed.
+
+ domain
+ the cookie domain.
+
+ secure
+ if False (the default) the cookie 'secure' value will not
+ be set. If True, the cookie 'secure' value will be set (to 1).
+
+ httponly
+ If False (the default) the cookie 'httponly' value will not be set.
+ If True, the cookie 'httponly' value will be set (to 1).
+
+ """
+ # Set response cookie
+ cookie = cherrypy.serving.response.cookie
+ cookie[name] = cherrypy.serving.session.id
+ cookie[name]['path'] = (
+ path or
+ cherrypy.serving.request.headers.get(path_header) or
+ '/'
+ )
+
+ if timeout:
+ cookie[name]['max-age'] = timeout * 60
+ _add_MSIE_max_age_workaround(cookie[name], timeout)
+ if domain is not None:
+ cookie[name]['domain'] = domain
+ if secure:
+ cookie[name]['secure'] = 1
+ if httponly:
+ if not cookie[name].isReservedKey('httponly'):
+ raise ValueError('The httponly cookie token is not supported.')
+ cookie[name]['httponly'] = 1
+
+
+def _add_MSIE_max_age_workaround(cookie, timeout):
+ """
+ We'd like to use the "max-age" param as indicated in
+ http://www.faqs.org/rfcs/rfc2109.html but IE doesn't
+ save it to disk and the session is lost if people close
+ the browser. So we have to use the old "expires" ... sigh ...
+ """
+ expires = time.time() + timeout * 60
+ cookie['expires'] = httputil.HTTPDate(expires)
+
+
+def expire():
+ """Expire the current session cookie."""
+ name = cherrypy.serving.request.config.get(
+ 'tools.sessions.name', 'session_id')
+ one_year = 60 * 60 * 24 * 365
+ e = time.time() - one_year
+ cherrypy.serving.response.cookie[name]['expires'] = httputil.HTTPDate(e)
+ cherrypy.serving.response.cookie[name].pop('max-age', None)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/static.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/static.py
new file mode 100644
index 0000000000000000000000000000000000000000..66a5a94747da045649f675e9241c30bb4039f5da
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/static.py
@@ -0,0 +1,416 @@
+"""Module with helpers for serving static files."""
+
+import os
+import platform
+import re
+import stat
+import mimetypes
+import urllib.parse
+import unicodedata
+
+from email.generator import _make_boundary as make_boundary
+from io import UnsupportedOperation
+
+import cherrypy
+from cherrypy._cpcompat import ntob
+from cherrypy.lib import cptools, httputil, file_generator_limited
+
+
+def _setup_mimetypes():
+ """Pre-initialize global mimetype map."""
+ if not mimetypes.inited:
+ mimetypes.init()
+ mimetypes.types_map['.dwg'] = 'image/x-dwg'
+ mimetypes.types_map['.ico'] = 'image/x-icon'
+ mimetypes.types_map['.bz2'] = 'application/x-bzip2'
+ mimetypes.types_map['.gz'] = 'application/x-gzip'
+
+
+_setup_mimetypes()
+
+
+def _make_content_disposition(disposition, file_name):
+ """Create HTTP header for downloading a file with a UTF-8 filename.
+
+ This function implements the recommendations of :rfc:`6266#appendix-D`.
+ See this and related answers: https://stackoverflow.com/a/8996249/2173868.
+ """
+ # As normalization algorithm for `unicodedata` is used composed form (NFC
+ # and NFKC) with compatibility equivalence criteria (NFK), so "NFKC" is the
+ # one. It first applies the compatibility decomposition, followed by the
+ # canonical composition. Should be displayed in the same manner, should be
+ # treated in the same way by applications such as alphabetizing names or
+ # searching, and may be substituted for each other.
+ # See: https://en.wikipedia.org/wiki/Unicode_equivalence.
+ ascii_name = (
+ unicodedata.normalize('NFKC', file_name).
+ encode('ascii', errors='ignore').decode()
+ )
+ header = '{}; filename="{}"'.format(disposition, ascii_name)
+ if ascii_name != file_name:
+ quoted_name = urllib.parse.quote(file_name)
+ header += '; filename*=UTF-8\'\'{}'.format(quoted_name)
+ return header
+
+
+def serve_file(path, content_type=None, disposition=None, name=None,
+ debug=False):
+ """Set status, headers, and body in order to serve the given path.
+
+ The Content-Type header will be set to the content_type arg, if provided.
+ If not provided, the Content-Type will be guessed by the file extension
+ of the 'path' argument.
+
+ If disposition is not None, the Content-Disposition header will be set
+ to "<disposition>; filename=<name>; filename*=utf-8''<name>"
+ as described in :rfc:`6266#appendix-D`.
+ If name is None, it will be set to the basename of path.
+ If disposition is None, no Content-Disposition header will be written.
+ """
+ response = cherrypy.serving.response
+
+ # If path is relative, users should fix it by making path absolute.
+ # That is, CherryPy should not guess where the application root is.
+ # It certainly should *not* use cwd (since CP may be invoked from a
+ # variety of paths). If using tools.staticdir, you can make your relative
+ # paths become absolute by supplying a value for "tools.staticdir.root".
+ if not os.path.isabs(path):
+ msg = "'%s' is not an absolute path." % path
+ if debug:
+ cherrypy.log(msg, 'TOOLS.STATICFILE')
+ raise ValueError(msg)
+
+ try:
+ st = os.stat(path)
+ except (OSError, TypeError, ValueError):
+ # OSError when file fails to stat
+ # TypeError on Python 2 when there's a null byte
+ # ValueError on Python 3 when there's a null byte
+ if debug:
+ cherrypy.log('os.stat(%r) failed' % path, 'TOOLS.STATIC')
+ raise cherrypy.NotFound()
+
+ # Check if path is a directory.
+ if stat.S_ISDIR(st.st_mode):
+ # Let the caller deal with it as they like.
+ if debug:
+ cherrypy.log('%r is a directory' % path, 'TOOLS.STATIC')
+ raise cherrypy.NotFound()
+
+ # Set the Last-Modified response header, so that
+ # modified-since validation code can work.
+ response.headers['Last-Modified'] = httputil.HTTPDate(st.st_mtime)
+ cptools.validate_since()
+
+ if content_type is None:
+ # Set content-type based on filename extension
+ ext = ''
+ i = path.rfind('.')
+ if i != -1:
+ ext = path[i:].lower()
+ content_type = mimetypes.types_map.get(ext, None)
+ if content_type is not None:
+ response.headers['Content-Type'] = content_type
+ if debug:
+ cherrypy.log('Content-Type: %r' % content_type, 'TOOLS.STATIC')
+
+ cd = None
+ if disposition is not None:
+ if name is None:
+ name = os.path.basename(path)
+ cd = _make_content_disposition(disposition, name)
+ response.headers['Content-Disposition'] = cd
+ if debug:
+ cherrypy.log('Content-Disposition: %r' % cd, 'TOOLS.STATIC')
+
+ # Set Content-Length and use an iterable (file object)
+ # this way CP won't load the whole file in memory
+ content_length = st.st_size
+ fileobj = open(path, 'rb')
+ return _serve_fileobj(fileobj, content_type, content_length, debug=debug)
+
+
+def serve_fileobj(fileobj, content_type=None, disposition=None, name=None,
+ debug=False):
+ """Set status, headers, and body in order to serve the given file object.
+
+ The Content-Type header will be set to the content_type arg, if provided.
+
+ If disposition is not None, the Content-Disposition header will be set
+ to "<disposition>; filename=<name>; filename*=utf-8''<name>"
+ as described in :rfc:`6266#appendix-D`.
+ If name is None, 'filename' will not be set.
+ If disposition is None, no Content-Disposition header will be written.
+
+ CAUTION: If the request contains a 'Range' header, one or more seek()s will
+ be performed on the file object. This may cause undesired behavior if
+ the file object is not seekable. It could also produce undesired results
+ if the caller set the read position of the file object prior to calling
+ serve_fileobj(), expecting that the data would be served starting from that
+ position.
+ """
+ response = cherrypy.serving.response
+
+ try:
+ st = os.fstat(fileobj.fileno())
+ except AttributeError:
+ if debug:
+ cherrypy.log('os has no fstat attribute', 'TOOLS.STATIC')
+ content_length = None
+ except UnsupportedOperation:
+ content_length = None
+ else:
+ # Set the Last-Modified response header, so that
+ # modified-since validation code can work.
+ response.headers['Last-Modified'] = httputil.HTTPDate(st.st_mtime)
+ cptools.validate_since()
+ content_length = st.st_size
+
+ if content_type is not None:
+ response.headers['Content-Type'] = content_type
+ if debug:
+ cherrypy.log('Content-Type: %r' % content_type, 'TOOLS.STATIC')
+
+ cd = None
+ if disposition is not None:
+ if name is None:
+ cd = disposition
+ else:
+ cd = _make_content_disposition(disposition, name)
+ response.headers['Content-Disposition'] = cd
+ if debug:
+ cherrypy.log('Content-Disposition: %r' % cd, 'TOOLS.STATIC')
+
+ return _serve_fileobj(fileobj, content_type, content_length, debug=debug)
+
+
+def _serve_fileobj(fileobj, content_type, content_length, debug=False):
+ """Internal. Set response.body to the given file object, perhaps ranged."""
+ response = cherrypy.serving.response
+
+ # HTTP/1.0 didn't have Range/Accept-Ranges headers, or the 206 code
+ request = cherrypy.serving.request
+ if request.protocol >= (1, 1):
+ response.headers['Accept-Ranges'] = 'bytes'
+ r = httputil.get_ranges(request.headers.get('Range'), content_length)
+ if r == []:
+ response.headers['Content-Range'] = 'bytes */%s' % content_length
+ message = ('Invalid Range (first-byte-pos greater than '
+ 'Content-Length)')
+ if debug:
+ cherrypy.log(message, 'TOOLS.STATIC')
+ raise cherrypy.HTTPError(416, message)
+
+ if r:
+ if len(r) == 1:
+ # Return a single-part response.
+ start, stop = r[0]
+ if stop > content_length:
+ stop = content_length
+ r_len = stop - start
+ if debug:
+ cherrypy.log(
+ 'Single part; start: %r, stop: %r' % (start, stop),
+ 'TOOLS.STATIC')
+ response.status = '206 Partial Content'
+ response.headers['Content-Range'] = (
+ 'bytes %s-%s/%s' % (start, stop - 1, content_length))
+ response.headers['Content-Length'] = r_len
+ fileobj.seek(start)
+ response.body = file_generator_limited(fileobj, r_len)
+ else:
+ # Return a multipart/byteranges response.
+ response.status = '206 Partial Content'
+ boundary = make_boundary()
+ ct = 'multipart/byteranges; boundary=%s' % boundary
+ response.headers['Content-Type'] = ct
+ if 'Content-Length' in response.headers:
+ # Delete Content-Length header so finalize() recalcs it.
+ del response.headers['Content-Length']
+
+ def file_ranges():
+ # Apache compatibility:
+ yield b'\r\n'
+
+ for start, stop in r:
+ if debug:
+ cherrypy.log(
+ 'Multipart; start: %r, stop: %r' % (
+ start, stop),
+ 'TOOLS.STATIC')
+ yield ntob('--' + boundary, 'ascii')
+ yield ntob('\r\nContent-type: %s' % content_type,
+ 'ascii')
+ yield ntob(
+ '\r\nContent-range: bytes %s-%s/%s\r\n\r\n' % (
+ start, stop - 1, content_length),
+ 'ascii')
+ fileobj.seek(start)
+ gen = file_generator_limited(fileobj, stop - start)
+ for chunk in gen:
+ yield chunk
+ yield b'\r\n'
+ # Final boundary
+ yield ntob('--' + boundary + '--', 'ascii')
+
+ # Apache compatibility:
+ yield b'\r\n'
+ response.body = file_ranges()
+ return response.body
+ else:
+ if debug:
+ cherrypy.log('No byteranges requested', 'TOOLS.STATIC')
+
+ # Set Content-Length and use an iterable (file object)
+ # this way CP won't load the whole file in memory
+ response.headers['Content-Length'] = content_length
+ response.body = fileobj
+ return response.body
+
+
+def serve_download(path, name=None):
+ """Serve 'path' as an application/x-download attachment."""
+ # This is such a common idiom I felt it deserved its own wrapper.
+ return serve_file(path, 'application/x-download', 'attachment', name)
+
+
+def _attempt(filename, content_types, debug=False):
+ if debug:
+ cherrypy.log('Attempting %r (content_types %r)' %
+ (filename, content_types), 'TOOLS.STATICDIR')
+ try:
+ # you can set the content types for a
+ # complete directory per extension
+ content_type = None
+ if content_types:
+ r, ext = os.path.splitext(filename)
+ content_type = content_types.get(ext[1:], None)
+ serve_file(filename, content_type=content_type, debug=debug)
+ return True
+ except cherrypy.NotFound:
+ # If we didn't find the static file, continue handling the
+ # request. We might find a dynamic handler instead.
+ if debug:
+ cherrypy.log('NotFound', 'TOOLS.STATICFILE')
+ return False
+
+
+def staticdir(section, dir, root='', match='', content_types=None, index='',
+ debug=False):
+ """Serve a static resource from the given (root +) dir.
+
+ match
+ If given, request.path_info will be searched for the given
+ regular expression before attempting to serve static content.
+
+ content_types
+ If given, it should be a Python dictionary of
+ {file-extension: content-type} pairs, where 'file-extension' is
+ a string (e.g. "gif") and 'content-type' is the value to write
+ out in the Content-Type response header (e.g. "image/gif").
+
+ index
+ If provided, it should be the (relative) name of a file to
+ serve for directory requests. For example, if the dir argument is
+ '/home/me', the Request-URI is 'myapp', and the index arg is
+ 'index.html', the file '/home/me/myapp/index.html' will be sought.
+ """
+ request = cherrypy.serving.request
+ if request.method not in ('GET', 'HEAD'):
+ if debug:
+ cherrypy.log('request.method not GET or HEAD', 'TOOLS.STATICDIR')
+ return False
+
+ if match and not re.search(match, request.path_info):
+ if debug:
+ cherrypy.log('request.path_info %r does not match pattern %r' %
+ (request.path_info, match), 'TOOLS.STATICDIR')
+ return False
+
+ # Allow the use of '~' to refer to a user's home directory.
+ dir = os.path.expanduser(dir)
+
+ # If dir is relative, make absolute using "root".
+ if not os.path.isabs(dir):
+ if not root:
+ msg = 'Static dir requires an absolute dir (or root).'
+ if debug:
+ cherrypy.log(msg, 'TOOLS.STATICDIR')
+ raise ValueError(msg)
+ dir = os.path.join(root, dir)
+
+ # Determine where we are in the object tree relative to 'section'
+ # (where the static tool was defined).
+ if section == 'global':
+ section = '/'
+ section = section.rstrip(r'\/')
+ branch = request.path_info[len(section) + 1:]
+ branch = urllib.parse.unquote(branch.lstrip(r'\/'))
+
+ # Requesting a file in sub-dir of the staticdir results
+ # in mixing of delimiter styles, e.g. C:\static\js/script.js.
+ # Windows accepts this form except not when the path is
+ # supplied in extended-path notation, e.g. \\?\C:\static\js/script.js.
+ # http://bit.ly/1vdioCX
+ if platform.system() == 'Windows':
+ branch = branch.replace('/', '\\')
+
+ # If branch is "", filename will end in a slash
+ filename = os.path.join(dir, branch)
+ if debug:
+ cherrypy.log('Checking file %r to fulfill %r' %
+ (filename, request.path_info), 'TOOLS.STATICDIR')
+
+ # There's a chance that the branch pulled from the URL might
+ # have ".." or similar uplevel attacks in it. Check that the final
+ # filename is a child of dir.
+ if not os.path.normpath(filename).startswith(os.path.normpath(dir)):
+ raise cherrypy.HTTPError(403) # Forbidden
+
+ handled = _attempt(filename, content_types)
+ if not handled:
+ # Check for an index file if a folder was requested.
+ if index:
+ handled = _attempt(os.path.join(filename, index), content_types)
+ if handled:
+ request.is_index = filename[-1] in (r'\/')
+ return handled
+
+
+def staticfile(filename, root=None, match='', content_types=None, debug=False):
+ """Serve a static resource from the given (root +) filename.
+
+ match
+ If given, request.path_info will be searched for the given
+ regular expression before attempting to serve static content.
+
+ content_types
+ If given, it should be a Python dictionary of
+ {file-extension: content-type} pairs, where 'file-extension' is
+ a string (e.g. "gif") and 'content-type' is the value to write
+ out in the Content-Type response header (e.g. "image/gif").
+
+ """
+ request = cherrypy.serving.request
+ if request.method not in ('GET', 'HEAD'):
+ if debug:
+ cherrypy.log('request.method not GET or HEAD', 'TOOLS.STATICFILE')
+ return False
+
+ if match and not re.search(match, request.path_info):
+ if debug:
+ cherrypy.log('request.path_info %r does not match pattern %r' %
+ (request.path_info, match), 'TOOLS.STATICFILE')
+ return False
+
+ # If filename is relative, make absolute using "root".
+ if not os.path.isabs(filename):
+ if not root:
+ msg = "Static tool requires an absolute filename (got '%s')." % (
+ filename,)
+ if debug:
+ cherrypy.log(msg, 'TOOLS.STATICFILE')
+ raise ValueError(msg)
+ filename = os.path.join(root, filename)
+
+ return _attempt(filename, content_types, debug=debug)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/xmlrpcutil.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/xmlrpcutil.py
new file mode 100644
index 0000000000000000000000000000000000000000..29d9c4a2b23ecac6d07285f0d872f3aff3b0ef1c
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/lib/xmlrpcutil.py
@@ -0,0 +1,60 @@
+"""XML-RPC tool helpers."""
+import sys
+from xmlrpc.client import (
+ loads as xmlrpc_loads, dumps as xmlrpc_dumps,
+ Fault as XMLRPCFault
+)
+
+import cherrypy
+from cherrypy._cpcompat import ntob
+
+
+def process_body():
+ """Return (params, method) from request body."""
+ try:
+ return xmlrpc_loads(cherrypy.request.body.read())
+ except Exception:
+ return ('ERROR PARAMS', ), 'ERRORMETHOD'
+
+
+def patched_path(path):
+ """Return 'path', doctored for RPC."""
+ if not path.endswith('/'):
+ path += '/'
+ if path.startswith('/RPC2/'):
+ # strip the first /rpc2
+ path = path[5:]
+ return path
+
+
+def _set_response(body):
+ """Set up HTTP status, headers and body within CherryPy."""
+ # The XML-RPC spec (http://www.xmlrpc.com/spec) says:
+ # "Unless there's a lower-level error, always return 200 OK."
+ # Since Python's xmlrpc_client interprets a non-200 response
+ # as a "Protocol Error", we'll just return 200 every time.
+ response = cherrypy.response
+ response.status = '200 OK'
+ response.body = ntob(body, 'utf-8')
+ response.headers['Content-Type'] = 'text/xml'
+ response.headers['Content-Length'] = len(body)
+
+
+def respond(body, encoding='utf-8', allow_none=0):
+ """Construct HTTP response body."""
+ if not isinstance(body, XMLRPCFault):
+ body = (body,)
+
+ _set_response(
+ xmlrpc_dumps(
+ body, methodresponse=1,
+ encoding=encoding,
+ allow_none=allow_none
+ )
+ )
+
+
+def on_error(*args, **kwargs):
+ """Construct HTTP response body for an error response."""
+ body = str(sys.exc_info()[1])
+ _set_response(xmlrpc_dumps(XMLRPCFault(1, body)))
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/process/__init__.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/process/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..f242d226a170cccd4f4e5d1add793332014b460d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/process/__init__.py
@@ -0,0 +1,17 @@
+"""Site container for an HTTP server.
+
+A Web Site Process Bus object is used to connect applications, servers,
+and frameworks with site-wide services such as daemonization, process
+reload, signal handling, drop privileges, PID file management, logging
+for all of these, and many more.
+
+The 'plugins' module defines a few abstract and concrete services for
+use with the bus. Some use tool-specific channels; see the documentation
+for each class.
+"""
+
+from .wspbus import bus
+from . import plugins, servers
+
+
+__all__ = ('bus', 'plugins', 'servers')
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/process/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/process/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..83a0c681e21ff69914446d54065db4f08d48b94b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/process/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/process/__pycache__/plugins.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/process/__pycache__/plugins.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..094883cf089f8bd61b7478221f285bbe05e18b77
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/process/__pycache__/plugins.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/process/__pycache__/servers.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/process/__pycache__/servers.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..aced89c9aa1f6d0dfb8b12afa40471cd3eb322c7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/process/__pycache__/servers.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/process/__pycache__/win32.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/process/__pycache__/win32.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7cecd415f7142d9e390df6e9a59c8aedd99e99e0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/process/__pycache__/win32.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/process/__pycache__/wspbus.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/process/__pycache__/wspbus.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1e6ec523e14afdd9d9b29326c96b0e71b3cde8e9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/process/__pycache__/wspbus.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/process/plugins.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/process/plugins.py
new file mode 100644
index 0000000000000000000000000000000000000000..2a9952de1b7ddee17fd0ef7e5af314e0aa3248db
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/process/plugins.py
@@ -0,0 +1,754 @@
+"""Site services for use with a Web Site Process Bus."""
+
+import os
+import re
+import signal as _signal
+import sys
+import time
+import threading
+import _thread
+
+from cherrypy._cpcompat import text_or_bytes
+from cherrypy._cpcompat import ntob
+
+# _module__file__base is used by Autoreload to make
+# absolute any filenames retrieved from sys.modules which are not
+# already absolute paths. This is to work around Python's quirk
+# of importing the startup script and using a relative filename
+# for it in sys.modules.
+#
+# Autoreload examines sys.modules afresh every time it runs. If an application
+# changes the current directory by executing os.chdir(), then the next time
+# Autoreload runs, it will not be able to find any filenames which are
+# not absolute paths, because the current directory is not the same as when the
+# module was first imported. Autoreload will then wrongly conclude the file
+# has "changed", and initiate the shutdown/re-exec sequence.
+# See ticket #917.
+# For this workaround to have a decent probability of success, this module
+# needs to be imported as early as possible, before the app has much chance
+# to change the working directory.
+_module__file__base = os.getcwd()
+
+
+class SimplePlugin(object):
+
+ """Plugin base class which auto-subscribes methods for known channels."""
+
+ bus = None
+ """A :class:`Bus <cherrypy.process.wspbus.Bus>`, usually cherrypy.engine.
+ """
+
+ def __init__(self, bus):
+ self.bus = bus
+
+ def subscribe(self):
+ """Register this object as a (multi-channel) listener on the bus."""
+ for channel in self.bus.listeners:
+ # Subscribe self.start, self.exit, etc. if present.
+ method = getattr(self, channel, None)
+ if method is not None:
+ self.bus.subscribe(channel, method)
+
+ def unsubscribe(self):
+ """Unregister this object as a listener on the bus."""
+ for channel in self.bus.listeners:
+ # Unsubscribe self.start, self.exit, etc. if present.
+ method = getattr(self, channel, None)
+ if method is not None:
+ self.bus.unsubscribe(channel, method)
+
+
+class SignalHandler(object):
+
+ """Register bus channels (and listeners) for system signals.
+
+ You can modify what signals your application listens for, and what it does
+ when it receives signals, by modifying :attr:`SignalHandler.handlers`,
+ a dict of {signal name: callback} pairs. The default set is::
+
+ handlers = {'SIGTERM': self.bus.exit,
+ 'SIGHUP': self.handle_SIGHUP,
+ 'SIGUSR1': self.bus.graceful,
+ }
+
+ The :func:`SignalHandler.handle_SIGHUP`` method calls
+ :func:`bus.restart()<cherrypy.process.wspbus.Bus.restart>`
+ if the process is daemonized, but
+ :func:`bus.exit()<cherrypy.process.wspbus.Bus.exit>`
+ if the process is attached to a TTY. This is because Unix window
+ managers tend to send SIGHUP to terminal windows when the user closes them.
+
+ Feel free to add signals which are not available on every platform.
+ The :class:`SignalHandler` will ignore errors raised from attempting
+ to register handlers for unknown signals.
+ """
+
+ handlers = {}
+ """A map from signal names (e.g. 'SIGTERM') to handlers (e.g. bus.exit)."""
+
+ signals = {}
+ """A map from signal numbers to names."""
+
+ for k, v in vars(_signal).items():
+ if k.startswith('SIG') and not k.startswith('SIG_'):
+ signals[v] = k
+ del k, v
+
+ def __init__(self, bus):
+ self.bus = bus
+ # Set default handlers
+ self.handlers = {'SIGTERM': self.bus.exit,
+ 'SIGHUP': self.handle_SIGHUP,
+ 'SIGUSR1': self.bus.graceful,
+ }
+
+ if sys.platform[:4] == 'java':
+ del self.handlers['SIGUSR1']
+ self.handlers['SIGUSR2'] = self.bus.graceful
+ self.bus.log('SIGUSR1 cannot be set on the JVM platform. '
+ 'Using SIGUSR2 instead.')
+ self.handlers['SIGINT'] = self._jython_SIGINT_handler
+
+ self._previous_handlers = {}
+ # used to determine is the process is a daemon in `self._is_daemonized`
+ self._original_pid = os.getpid()
+
+ def _jython_SIGINT_handler(self, signum=None, frame=None):
+ # See http://bugs.jython.org/issue1313
+ self.bus.log('Keyboard Interrupt: shutting down bus')
+ self.bus.exit()
+
+ def _is_daemonized(self):
+ """Return boolean indicating if the current process is
+ running as a daemon.
+
+ The criteria to determine the `daemon` condition is to verify
+ if the current pid is not the same as the one that got used on
+ the initial construction of the plugin *and* the stdin is not
+ connected to a terminal.
+
+ The sole validation of the tty is not enough when the plugin
+ is executing inside other process like in a CI tool
+ (Buildbot, Jenkins).
+ """
+ return (
+ self._original_pid != os.getpid() and
+ not os.isatty(sys.stdin.fileno())
+ )
+
+ def subscribe(self):
+ """Subscribe self.handlers to signals."""
+ for sig, func in self.handlers.items():
+ try:
+ self.set_handler(sig, func)
+ except ValueError:
+ pass
+
+ def unsubscribe(self):
+ """Unsubscribe self.handlers from signals."""
+ for signum, handler in self._previous_handlers.items():
+ signame = self.signals[signum]
+
+ if handler is None:
+ self.bus.log('Restoring %s handler to SIG_DFL.' % signame)
+ handler = _signal.SIG_DFL
+ else:
+ self.bus.log('Restoring %s handler %r.' % (signame, handler))
+
+ try:
+ our_handler = _signal.signal(signum, handler)
+ if our_handler is None:
+ self.bus.log('Restored old %s handler %r, but our '
+ 'handler was not registered.' %
+ (signame, handler), level=30)
+ except ValueError:
+ self.bus.log('Unable to restore %s handler %r.' %
+ (signame, handler), level=40, traceback=True)
+
+ def set_handler(self, signal, listener=None):
+ """Subscribe a handler for the given signal (number or name).
+
+ If the optional 'listener' argument is provided, it will be
+ subscribed as a listener for the given signal's channel.
+
+ If the given signal name or number is not available on the current
+ platform, ValueError is raised.
+ """
+ if isinstance(signal, text_or_bytes):
+ signum = getattr(_signal, signal, None)
+ if signum is None:
+ raise ValueError('No such signal: %r' % signal)
+ signame = signal
+ else:
+ try:
+ signame = self.signals[signal]
+ except KeyError:
+ raise ValueError('No such signal: %r' % signal)
+ signum = signal
+
+ prev = _signal.signal(signum, self._handle_signal)
+ self._previous_handlers[signum] = prev
+
+ if listener is not None:
+ self.bus.log('Listening for %s.' % signame)
+ self.bus.subscribe(signame, listener)
+
+ def _handle_signal(self, signum=None, frame=None):
+ """Python signal handler (self.set_handler subscribes it for you)."""
+ signame = self.signals[signum]
+ self.bus.log('Caught signal %s.' % signame)
+ self.bus.publish(signame)
+
+ def handle_SIGHUP(self):
+ """Restart if daemonized, else exit."""
+ if self._is_daemonized():
+ self.bus.log('SIGHUP caught while daemonized. Restarting.')
+ self.bus.restart()
+ else:
+ # not daemonized (may be foreground or background)
+ self.bus.log('SIGHUP caught but not daemonized. Exiting.')
+ self.bus.exit()
+
+
+try:
+ import pwd
+ import grp
+except ImportError:
+ pwd, grp = None, None
+
+
+class DropPrivileges(SimplePlugin):
+
+ """Drop privileges. uid/gid arguments not available on Windows.
+
+ Special thanks to `Gavin Baker
+ <http://antonym.org/2005/12/dropping-privileges-in-python.html>`_
+ """
+
+ def __init__(self, bus, umask=None, uid=None, gid=None):
+ SimplePlugin.__init__(self, bus)
+ self.finalized = False
+ self.uid = uid
+ self.gid = gid
+ self.umask = umask
+
+ @property
+ def uid(self):
+ """The uid under which to run. Availability: Unix."""
+ return self._uid
+
+ @uid.setter
+ def uid(self, val):
+ if val is not None:
+ if pwd is None:
+ self.bus.log('pwd module not available; ignoring uid.',
+ level=30)
+ val = None
+ elif isinstance(val, text_or_bytes):
+ val = pwd.getpwnam(val)[2]
+ self._uid = val
+
+ @property
+ def gid(self):
+ """The gid under which to run. Availability: Unix."""
+ return self._gid
+
+ @gid.setter
+ def gid(self, val):
+ if val is not None:
+ if grp is None:
+ self.bus.log('grp module not available; ignoring gid.',
+ level=30)
+ val = None
+ elif isinstance(val, text_or_bytes):
+ val = grp.getgrnam(val)[2]
+ self._gid = val
+
+ @property
+ def umask(self):
+ """The default permission mode for newly created files and directories.
+
+ Usually expressed in octal format, for example, ``0644``.
+ Availability: Unix, Windows.
+ """
+ return self._umask
+
+ @umask.setter
+ def umask(self, val):
+ if val is not None:
+ try:
+ os.umask
+ except AttributeError:
+ self.bus.log('umask function not available; ignoring umask.',
+ level=30)
+ val = None
+ self._umask = val
+
+ def start(self):
+ # uid/gid
+ def current_ids():
+ """Return the current (uid, gid) if available."""
+ name, group = None, None
+ if pwd:
+ name = pwd.getpwuid(os.getuid())[0]
+ if grp:
+ group = grp.getgrgid(os.getgid())[0]
+ return name, group
+
+ if self.finalized:
+ if not (self.uid is None and self.gid is None):
+ self.bus.log('Already running as uid: %r gid: %r' %
+ current_ids())
+ else:
+ if self.uid is None and self.gid is None:
+ if pwd or grp:
+ self.bus.log('uid/gid not set', level=30)
+ else:
+ self.bus.log('Started as uid: %r gid: %r' % current_ids())
+ if self.gid is not None:
+ os.setgid(self.gid)
+ os.setgroups([])
+ if self.uid is not None:
+ os.setuid(self.uid)
+ self.bus.log('Running as uid: %r gid: %r' % current_ids())
+
+ # umask
+ if self.finalized:
+ if self.umask is not None:
+ self.bus.log('umask already set to: %03o' % self.umask)
+ else:
+ if self.umask is None:
+ self.bus.log('umask not set', level=30)
+ else:
+ old_umask = os.umask(self.umask)
+ self.bus.log('umask old: %03o, new: %03o' %
+ (old_umask, self.umask))
+
+ self.finalized = True
+ # This is slightly higher than the priority for server.start
+ # in order to facilitate the most common use: starting on a low
+ # port (which requires root) and then dropping to another user.
+ start.priority = 77
+
+
+class Daemonizer(SimplePlugin):
+
+ """Daemonize the running script.
+
+ Use this with a Web Site Process Bus via::
+
+ Daemonizer(bus).subscribe()
+
+ When this component finishes, the process is completely decoupled from
+ the parent environment. Please note that when this component is used,
+ the return code from the parent process will still be 0 if a startup
+ error occurs in the forked children. Errors in the initial daemonizing
+ process still return proper exit codes. Therefore, if you use this
+ plugin to daemonize, don't use the return code as an accurate indicator
+ of whether the process fully started. In fact, that return code only
+ indicates if the process successfully finished the first fork.
+ """
+
+ def __init__(self, bus, stdin='/dev/null', stdout='/dev/null',
+ stderr='/dev/null'):
+ SimplePlugin.__init__(self, bus)
+ self.stdin = stdin
+ self.stdout = stdout
+ self.stderr = stderr
+ self.finalized = False
+
+ def start(self):
+ if self.finalized:
+ self.bus.log('Already deamonized.')
+
+ # forking has issues with threads:
+ # http://www.opengroup.org/onlinepubs/000095399/functions/fork.html
+ # "The general problem with making fork() work in a multi-threaded
+ # world is what to do with all of the threads..."
+ # So we check for active threads:
+ if threading.active_count() != 1:
+ self.bus.log('There are %r active threads. '
+ 'Daemonizing now may cause strange failures.' %
+ threading.enumerate(), level=30)
+
+ self.daemonize(self.stdin, self.stdout, self.stderr, self.bus.log)
+
+ self.finalized = True
+ start.priority = 65
+
+ @staticmethod
+ def daemonize(
+ stdin='/dev/null', stdout='/dev/null', stderr='/dev/null',
+ logger=lambda msg: None):
+ # See http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16
+ # (or http://www.faqs.org/faqs/unix-faq/programmer/faq/ section 1.7)
+ # and http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66012
+
+ # Finish up with the current stdout/stderr
+ sys.stdout.flush()
+ sys.stderr.flush()
+
+ error_tmpl = (
+ '{sys.argv[0]}: fork #{n} failed: ({exc.errno}) {exc.strerror}\n'
+ )
+
+ for fork in range(2):
+ msg = ['Forking once.', 'Forking twice.'][fork]
+ try:
+ pid = os.fork()
+ if pid > 0:
+ # This is the parent; exit.
+ logger(msg)
+ os._exit(0)
+ except OSError as exc:
+ # Python raises OSError rather than returning negative numbers.
+ sys.exit(error_tmpl.format(sys=sys, exc=exc, n=fork + 1))
+ if fork == 0:
+ os.setsid()
+
+ os.umask(0)
+
+ si = open(stdin, 'r')
+ so = open(stdout, 'a+')
+ se = open(stderr, 'a+')
+
+ # os.dup2(fd, fd2) will close fd2 if necessary,
+ # so we don't explicitly close stdin/out/err.
+ # See http://docs.python.org/lib/os-fd-ops.html
+ os.dup2(si.fileno(), sys.stdin.fileno())
+ os.dup2(so.fileno(), sys.stdout.fileno())
+ os.dup2(se.fileno(), sys.stderr.fileno())
+
+ logger('Daemonized to PID: %s' % os.getpid())
+
+
+class PIDFile(SimplePlugin):
+
+ """Maintain a PID file via a WSPBus."""
+
+ def __init__(self, bus, pidfile):
+ SimplePlugin.__init__(self, bus)
+ self.pidfile = pidfile
+ self.finalized = False
+
+ def start(self):
+ pid = os.getpid()
+ if self.finalized:
+ self.bus.log('PID %r already written to %r.' % (pid, self.pidfile))
+ else:
+ open(self.pidfile, 'wb').write(ntob('%s\n' % pid, 'utf8'))
+ self.bus.log('PID %r written to %r.' % (pid, self.pidfile))
+ self.finalized = True
+ start.priority = 70
+
+ def exit(self):
+ try:
+ os.remove(self.pidfile)
+ self.bus.log('PID file removed: %r.' % self.pidfile)
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except Exception:
+ pass
+
+
+class PerpetualTimer(threading.Timer):
+
+ """A responsive subclass of threading.Timer whose run() method repeats.
+
+ Use this timer only when you really need a very interruptible timer;
+ this checks its 'finished' condition up to 20 times a second, which can
+ results in pretty high CPU usage
+ """
+
+ def __init__(self, *args, **kwargs):
+ "Override parent constructor to allow 'bus' to be provided."
+ self.bus = kwargs.pop('bus', None)
+ super(PerpetualTimer, self).__init__(*args, **kwargs)
+
+ def run(self):
+ while True:
+ self.finished.wait(self.interval)
+ if self.finished.isSet():
+ return
+ try:
+ self.function(*self.args, **self.kwargs)
+ except Exception:
+ if self.bus:
+ self.bus.log(
+ 'Error in perpetual timer thread function %r.' %
+ self.function, level=40, traceback=True)
+ # Quit on first error to avoid massive logs.
+ raise
+
+
+class BackgroundTask(threading.Thread):
+
+ """A subclass of threading.Thread whose run() method repeats.
+
+ Use this class for most repeating tasks. It uses time.sleep() to wait
+ for each interval, which isn't very responsive; that is, even if you call
+ self.cancel(), you'll have to wait until the sleep() call finishes before
+ the thread stops. To compensate, it defaults to being daemonic, which means
+ it won't delay stopping the whole process.
+ """
+
+ def __init__(self, interval, function, args=[], kwargs={}, bus=None):
+ super(BackgroundTask, self).__init__()
+ self.interval = interval
+ self.function = function
+ self.args = args
+ self.kwargs = kwargs
+ self.running = False
+ self.bus = bus
+
+ # default to daemonic
+ self.daemon = True
+
+ def cancel(self):
+ self.running = False
+
+ def run(self):
+ self.running = True
+ while self.running:
+ time.sleep(self.interval)
+ if not self.running:
+ return
+ try:
+ self.function(*self.args, **self.kwargs)
+ except Exception:
+ if self.bus:
+ self.bus.log('Error in background task thread function %r.'
+ % self.function, level=40, traceback=True)
+ # Quit on first error to avoid massive logs.
+ raise
+
+
+class Monitor(SimplePlugin):
+
+ """WSPBus listener to periodically run a callback in its own thread."""
+
+ callback = None
+ """The function to call at intervals."""
+
+ frequency = 60
+ """The time in seconds between callback runs."""
+
+ thread = None
+ """A :class:`BackgroundTask<cherrypy.process.plugins.BackgroundTask>`
+ thread.
+ """
+
+ def __init__(self, bus, callback, frequency=60, name=None):
+ SimplePlugin.__init__(self, bus)
+ self.callback = callback
+ self.frequency = frequency
+ self.thread = None
+ self.name = name
+
+ def start(self):
+ """Start our callback in its own background thread."""
+ if self.frequency > 0:
+ threadname = self.name or self.__class__.__name__
+ if self.thread is None:
+ self.thread = BackgroundTask(self.frequency, self.callback,
+ bus=self.bus)
+ self.thread.name = threadname
+ self.thread.start()
+ self.bus.log('Started monitor thread %r.' % threadname)
+ else:
+ self.bus.log('Monitor thread %r already started.' % threadname)
+ start.priority = 70
+
+ def stop(self):
+ """Stop our callback's background task thread."""
+ if self.thread is None:
+ self.bus.log('No thread running for %s.' %
+ self.name or self.__class__.__name__)
+ else:
+ if self.thread is not threading.current_thread():
+ name = self.thread.name
+ self.thread.cancel()
+ if not self.thread.daemon:
+ self.bus.log('Joining %r' % name)
+ self.thread.join()
+ self.bus.log('Stopped thread %r.' % name)
+ self.thread = None
+
+ def graceful(self):
+ """Stop the callback's background task thread and restart it."""
+ self.stop()
+ self.start()
+
+
+class Autoreloader(Monitor):
+
+ """Monitor which re-executes the process when files change.
+
+ This :ref:`plugin<plugins>` restarts the process (via :func:`os.execv`)
+ if any of the files it monitors change (or is deleted). By default, the
+ autoreloader monitors all imported modules; you can add to the
+ set by adding to ``autoreload.files``::
+
+ cherrypy.engine.autoreload.files.add(myFile)
+
+ If there are imported files you do *not* wish to monitor, you can
+ adjust the ``match`` attribute, a regular expression. For example,
+ to stop monitoring cherrypy itself::
+
+ cherrypy.engine.autoreload.match = r'^(?!cherrypy).+'
+
+ Like all :class:`Monitor<cherrypy.process.plugins.Monitor>` plugins,
+ the autoreload plugin takes a ``frequency`` argument. The default is
+ 1 second; that is, the autoreloader will examine files once each second.
+ """
+
+ files = None
+ """The set of files to poll for modifications."""
+
+ frequency = 1
+ """The interval in seconds at which to poll for modified files."""
+
+ match = '.*'
+ """A regular expression by which to match filenames."""
+
+ def __init__(self, bus, frequency=1, match='.*'):
+ self.mtimes = {}
+ self.files = set()
+ self.match = match
+ Monitor.__init__(self, bus, self.run, frequency)
+
+ def start(self):
+ """Start our own background task thread for self.run."""
+ if self.thread is None:
+ self.mtimes = {}
+ Monitor.start(self)
+ start.priority = 70
+
+ def sysfiles(self):
+ """Return a Set of sys.modules filenames to monitor."""
+ search_mod_names = filter(
+ re.compile(self.match).match,
+ list(sys.modules.keys()),
+ )
+ mods = map(sys.modules.get, search_mod_names)
+ return set(filter(None, map(self._file_for_module, mods)))
+
+ @classmethod
+ def _file_for_module(cls, module):
+ """Return the relevant file for the module."""
+ return (
+ cls._archive_for_zip_module(module)
+ or cls._file_for_file_module(module)
+ )
+
+ @staticmethod
+ def _archive_for_zip_module(module):
+ """Return the archive filename for the module if relevant."""
+ try:
+ return module.__loader__.archive
+ except AttributeError:
+ pass
+
+ @classmethod
+ def _file_for_file_module(cls, module):
+ """Return the file for the module."""
+ try:
+ return module.__file__ and cls._make_absolute(module.__file__)
+ except AttributeError:
+ pass
+
+ @staticmethod
+ def _make_absolute(filename):
+ """Ensure filename is absolute to avoid effect of os.chdir."""
+ return filename if os.path.isabs(filename) else (
+ os.path.normpath(os.path.join(_module__file__base, filename))
+ )
+
+ def run(self):
+ """Reload the process if registered files have been modified."""
+ for filename in self.sysfiles() | self.files:
+ if filename:
+ if filename.endswith('.pyc'):
+ filename = filename[:-1]
+
+ oldtime = self.mtimes.get(filename, 0)
+ if oldtime is None:
+ # Module with no .py file. Skip it.
+ continue
+
+ try:
+ mtime = os.stat(filename).st_mtime
+ except OSError:
+ # Either a module with no .py file, or it's been deleted.
+ mtime = None
+
+ if filename not in self.mtimes:
+ # If a module has no .py file, this will be None.
+ self.mtimes[filename] = mtime
+ else:
+ if mtime is None or mtime > oldtime:
+ # The file has been deleted or modified.
+ self.bus.log('Restarting because %s changed.' %
+ filename)
+ self.thread.cancel()
+ self.bus.log('Stopped thread %r.' %
+ self.thread.name)
+ self.bus.restart()
+ return
+
+
+class ThreadManager(SimplePlugin):
+
+ """Manager for HTTP request threads.
+
+ If you have control over thread creation and destruction, publish to
+ the 'acquire_thread' and 'release_thread' channels (for each thread).
+ This will register/unregister the current thread and publish to
+ 'start_thread' and 'stop_thread' listeners in the bus as needed.
+
+ If threads are created and destroyed by code you do not control
+ (e.g., Apache), then, at the beginning of every HTTP request,
+ publish to 'acquire_thread' only. You should not publish to
+ 'release_thread' in this case, since you do not know whether
+ the thread will be re-used or not. The bus will call
+ 'stop_thread' listeners for you when it stops.
+ """
+
+ threads = None
+ """A map of {thread ident: index number} pairs."""
+
+ def __init__(self, bus):
+ self.threads = {}
+ SimplePlugin.__init__(self, bus)
+ self.bus.listeners.setdefault('acquire_thread', set())
+ self.bus.listeners.setdefault('start_thread', set())
+ self.bus.listeners.setdefault('release_thread', set())
+ self.bus.listeners.setdefault('stop_thread', set())
+
+ def acquire_thread(self):
+ """Run 'start_thread' listeners for the current thread.
+
+ If the current thread has already been seen, any 'start_thread'
+ listeners will not be run again.
+ """
+ thread_ident = _thread.get_ident()
+ if thread_ident not in self.threads:
+ # We can't just use get_ident as the thread ID
+ # because some platforms reuse thread ID's.
+ i = len(self.threads) + 1
+ self.threads[thread_ident] = i
+ self.bus.publish('start_thread', i)
+
+ def release_thread(self):
+ """Release the current thread and run 'stop_thread' listeners."""
+ thread_ident = _thread.get_ident()
+ i = self.threads.pop(thread_ident, None)
+ if i is not None:
+ self.bus.publish('stop_thread', i)
+
+ def stop(self):
+ """Release all threads and run all 'stop_thread' listeners."""
+ for thread_ident, i in self.threads.items():
+ self.bus.publish('stop_thread', i)
+ self.threads.clear()
+ graceful = stop
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/process/servers.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/process/servers.py
new file mode 100644
index 0000000000000000000000000000000000000000..717a8de0faa80675bcf4ab4c0e1e090dcb434fc7
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/process/servers.py
@@ -0,0 +1,416 @@
+r"""
+Starting in CherryPy 3.1, cherrypy.server is implemented as an
+:ref:`Engine Plugin<plugins>`. It's an instance of
+:class:`cherrypy._cpserver.Server`, which is a subclass of
+:class:`cherrypy.process.servers.ServerAdapter`. The ``ServerAdapter`` class
+is designed to control other servers, as well.
+
+Multiple servers/ports
+======================
+
+If you need to start more than one HTTP server (to serve on multiple ports, or
+protocols, etc.), you can manually register each one and then start them all
+with engine.start::
+
+ s1 = ServerAdapter(
+ cherrypy.engine,
+ MyWSGIServer(host='0.0.0.0', port=80)
+ )
+ s2 = ServerAdapter(
+ cherrypy.engine,
+ another.HTTPServer(host='127.0.0.1', SSL=True)
+ )
+ s1.subscribe()
+ s2.subscribe()
+ cherrypy.engine.start()
+
+.. index:: SCGI
+
+FastCGI/SCGI
+============
+
+There are also Flup\ **F**\ CGIServer and Flup\ **S**\ CGIServer classes in
+:mod:`cherrypy.process.servers`. To start an fcgi server, for example,
+wrap an instance of it in a ServerAdapter::
+
+ addr = ('0.0.0.0', 4000)
+ f = servers.FlupFCGIServer(application=cherrypy.tree, bindAddress=addr)
+ s = servers.ServerAdapter(cherrypy.engine, httpserver=f, bind_addr=addr)
+ s.subscribe()
+
+The :doc:`cherryd</deployguide/cherryd>` startup script will do the above for
+you via its `-f` flag.
+Note that you need to download and install `flup <http://trac.saddi.com/flup>`_
+yourself, whether you use ``cherryd`` or not.
+
+.. _fastcgi:
+.. index:: FastCGI
+
+FastCGI
+-------
+
+A very simple setup lets your cherry run with FastCGI.
+You just need the flup library,
+plus a running Apache server (with ``mod_fastcgi``) or lighttpd server.
+
+CherryPy code
+^^^^^^^^^^^^^
+
+hello.py::
+
+ #!/usr/bin/python
+ import cherrypy
+
+ class HelloWorld:
+ '''Sample request handler class.'''
+ @cherrypy.expose
+ def index(self):
+ return "Hello world!"
+
+ cherrypy.tree.mount(HelloWorld())
+ # CherryPy autoreload must be disabled for the flup server to work
+ cherrypy.config.update({'engine.autoreload.on':False})
+
+Then run :doc:`/deployguide/cherryd` with the '-f' arg::
+
+ cherryd -c <myconfig> -d -f -i hello.py
+
+Apache
+^^^^^^
+
+At the top level in httpd.conf::
+
+ FastCgiIpcDir /tmp
+ FastCgiServer /path/to/cherry.fcgi -idle-timeout 120 -processes 4
+
+And inside the relevant VirtualHost section::
+
+ # FastCGI config
+ AddHandler fastcgi-script .fcgi
+ ScriptAliasMatch (.*$) /path/to/cherry.fcgi$1
+
+Lighttpd
+^^^^^^^^
+
+For `Lighttpd <http://www.lighttpd.net/>`_ you can follow these
+instructions. Within ``lighttpd.conf`` make sure ``mod_fastcgi`` is
+active within ``server.modules``. Then, within your ``$HTTP["host"]``
+directive, configure your fastcgi script like the following::
+
+ $HTTP["url"] =~ "" {
+ fastcgi.server = (
+ "/" => (
+ "script.fcgi" => (
+ "bin-path" => "/path/to/your/script.fcgi",
+ "socket" => "/tmp/script.sock",
+ "check-local" => "disable",
+ "disable-time" => 1,
+ "min-procs" => 1,
+ "max-procs" => 1, # adjust as needed
+ ),
+ ),
+ )
+ } # end of $HTTP["url"] =~ "^/"
+
+Please see `Lighttpd FastCGI Docs
+<http://redmine.lighttpd.net/wiki/lighttpd/Docs:ModFastCGI>`_ for
+an explanation of the possible configuration options.
+"""
+
+import os
+import sys
+import time
+import warnings
+import contextlib
+
+import portend
+
+
+class Timeouts:
+ occupied = 5
+ free = 1
+
+
+class ServerAdapter(object):
+
+ """Adapter for an HTTP server.
+
+ If you need to start more than one HTTP server (to serve on multiple
+ ports, or protocols, etc.), you can manually register each one and then
+ start them all with bus.start::
+
+ s1 = ServerAdapter(bus, MyWSGIServer(host='0.0.0.0', port=80))
+ s2 = ServerAdapter(bus, another.HTTPServer(host='127.0.0.1', SSL=True))
+ s1.subscribe()
+ s2.subscribe()
+ bus.start()
+ """
+
+ def __init__(self, bus, httpserver=None, bind_addr=None):
+ self.bus = bus
+ self.httpserver = httpserver
+ self.bind_addr = bind_addr
+ self.interrupt = None
+ self.running = False
+
+ def subscribe(self):
+ self.bus.subscribe('start', self.start)
+ self.bus.subscribe('stop', self.stop)
+
+ def unsubscribe(self):
+ self.bus.unsubscribe('start', self.start)
+ self.bus.unsubscribe('stop', self.stop)
+
+ def start(self):
+ """Start the HTTP server."""
+ if self.running:
+ self.bus.log('Already serving on %s' % self.description)
+ return
+
+ self.interrupt = None
+ if not self.httpserver:
+ raise ValueError('No HTTP server has been created.')
+
+ if not os.environ.get('LISTEN_PID', None):
+ # Start the httpserver in a new thread.
+ if isinstance(self.bind_addr, tuple):
+ portend.free(*self.bind_addr, timeout=Timeouts.free)
+
+ import threading
+ t = threading.Thread(target=self._start_http_thread)
+ t.name = 'HTTPServer ' + t.name
+ t.start()
+
+ self.wait()
+ self.running = True
+ self.bus.log('Serving on %s' % self.description)
+ start.priority = 75
+
+ @property
+ def description(self):
+ """
+ A description about where this server is bound.
+ """
+ if self.bind_addr is None:
+ on_what = 'unknown interface (dynamic?)'
+ elif isinstance(self.bind_addr, tuple):
+ on_what = self._get_base()
+ else:
+ on_what = 'socket file: %s' % self.bind_addr
+ return on_what
+
+ def _get_base(self):
+ if not self.httpserver:
+ return ''
+ host, port = self.bound_addr
+ if getattr(self.httpserver, 'ssl_adapter', None):
+ scheme = 'https'
+ if port != 443:
+ host += ':%s' % port
+ else:
+ scheme = 'http'
+ if port != 80:
+ host += ':%s' % port
+
+ return '%s://%s' % (scheme, host)
+
+ def _start_http_thread(self):
+ """HTTP servers MUST be running in new threads, so that the
+ main thread persists to receive KeyboardInterrupt's. If an
+ exception is raised in the httpserver's thread then it's
+ trapped here, and the bus (and therefore our httpserver)
+ are shut down.
+ """
+ try:
+ self.httpserver.start()
+ except KeyboardInterrupt:
+ self.bus.log('<Ctrl-C> hit: shutting down HTTP server')
+ self.interrupt = sys.exc_info()[1]
+ self.bus.exit()
+ except SystemExit:
+ self.bus.log('SystemExit raised: shutting down HTTP server')
+ self.interrupt = sys.exc_info()[1]
+ self.bus.exit()
+ raise
+ except Exception:
+ self.interrupt = sys.exc_info()[1]
+ self.bus.log('Error in HTTP server: shutting down',
+ traceback=True, level=40)
+ self.bus.exit()
+ raise
+
+ def wait(self):
+ """Wait until the HTTP server is ready to receive requests."""
+ while not getattr(self.httpserver, 'ready', False):
+ if self.interrupt:
+ raise self.interrupt
+ time.sleep(.1)
+
+ # bypass check when LISTEN_PID is set
+ if os.environ.get('LISTEN_PID', None):
+ return
+
+ # bypass check when running via socket-activation
+ # (for socket-activation the port will be managed by systemd)
+ if not isinstance(self.bind_addr, tuple):
+ return
+
+ # wait for port to be occupied
+ with _safe_wait(*self.bound_addr):
+ portend.occupied(*self.bound_addr, timeout=Timeouts.occupied)
+
+ @property
+ def bound_addr(self):
+ """
+ The bind address, or if it's an ephemeral port and the
+ socket has been bound, return the actual port bound.
+ """
+ host, port = self.bind_addr
+ if port == 0 and self.httpserver.socket:
+ # Bound to ephemeral port. Get the actual port allocated.
+ port = self.httpserver.socket.getsockname()[1]
+ return host, port
+
+ def stop(self):
+ """Stop the HTTP server."""
+ if self.running:
+ # stop() MUST block until the server is *truly* stopped.
+ self.httpserver.stop()
+ # Wait for the socket to be truly freed.
+ if isinstance(self.bind_addr, tuple):
+ portend.free(*self.bound_addr, timeout=Timeouts.free)
+ self.running = False
+ self.bus.log('HTTP Server %s shut down' % self.httpserver)
+ else:
+ self.bus.log('HTTP Server %s already shut down' % self.httpserver)
+ stop.priority = 25
+
+ def restart(self):
+ """Restart the HTTP server."""
+ self.stop()
+ self.start()
+
+
+class FlupCGIServer(object):
+
+ """Adapter for a flup.server.cgi.WSGIServer."""
+
+ def __init__(self, *args, **kwargs):
+ self.args = args
+ self.kwargs = kwargs
+ self.ready = False
+
+ def start(self):
+ """Start the CGI server."""
+ # We have to instantiate the server class here because its __init__
+ # starts a threadpool. If we do it too early, daemonize won't work.
+ from flup.server.cgi import WSGIServer
+
+ self.cgiserver = WSGIServer(*self.args, **self.kwargs)
+ self.ready = True
+ self.cgiserver.run()
+
+ def stop(self):
+ """Stop the HTTP server."""
+ self.ready = False
+
+
+class FlupFCGIServer(object):
+
+ """Adapter for a flup.server.fcgi.WSGIServer."""
+
+ def __init__(self, *args, **kwargs):
+ if kwargs.get('bindAddress', None) is None:
+ import socket
+ if not hasattr(socket, 'fromfd'):
+ raise ValueError(
+ 'Dynamic FCGI server not available on this platform. '
+ 'You must use a static or external one by providing a '
+ 'legal bindAddress.')
+ self.args = args
+ self.kwargs = kwargs
+ self.ready = False
+
+ def start(self):
+ """Start the FCGI server."""
+ # We have to instantiate the server class here because its __init__
+ # starts a threadpool. If we do it too early, daemonize won't work.
+ from flup.server.fcgi import WSGIServer
+ self.fcgiserver = WSGIServer(*self.args, **self.kwargs)
+ # TODO: report this bug upstream to flup.
+ # If we don't set _oldSIGs on Windows, we get:
+ # File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py",
+ # line 108, in run
+ # self._restoreSignalHandlers()
+ # File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py",
+ # line 156, in _restoreSignalHandlers
+ # for signum,handler in self._oldSIGs:
+ # AttributeError: 'WSGIServer' object has no attribute '_oldSIGs'
+ self.fcgiserver._installSignalHandlers = lambda: None
+ self.fcgiserver._oldSIGs = []
+ self.ready = True
+ self.fcgiserver.run()
+
+ def stop(self):
+ """Stop the HTTP server."""
+ # Forcibly stop the fcgi server main event loop.
+ self.fcgiserver._keepGoing = False
+ # Force all worker threads to die off.
+ self.fcgiserver._threadPool.maxSpare = (
+ self.fcgiserver._threadPool._idleCount)
+ self.ready = False
+
+
+class FlupSCGIServer(object):
+
+ """Adapter for a flup.server.scgi.WSGIServer."""
+
+ def __init__(self, *args, **kwargs):
+ self.args = args
+ self.kwargs = kwargs
+ self.ready = False
+
+ def start(self):
+ """Start the SCGI server."""
+ # We have to instantiate the server class here because its __init__
+ # starts a threadpool. If we do it too early, daemonize won't work.
+ from flup.server.scgi import WSGIServer
+ self.scgiserver = WSGIServer(*self.args, **self.kwargs)
+ # TODO: report this bug upstream to flup.
+ # If we don't set _oldSIGs on Windows, we get:
+ # File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py",
+ # line 108, in run
+ # self._restoreSignalHandlers()
+ # File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py",
+ # line 156, in _restoreSignalHandlers
+ # for signum,handler in self._oldSIGs:
+ # AttributeError: 'WSGIServer' object has no attribute '_oldSIGs'
+ self.scgiserver._installSignalHandlers = lambda: None
+ self.scgiserver._oldSIGs = []
+ self.ready = True
+ self.scgiserver.run()
+
+ def stop(self):
+ """Stop the HTTP server."""
+ self.ready = False
+ # Forcibly stop the scgi server main event loop.
+ self.scgiserver._keepGoing = False
+ # Force all worker threads to die off.
+ self.scgiserver._threadPool.maxSpare = 0
+
+
+@contextlib.contextmanager
+def _safe_wait(host, port):
+ """
+ On systems where a loopback interface is not available and the
+ server is bound to all interfaces, it's difficult to determine
+ whether the server is in fact occupying the port. In this case,
+ just issue a warning and move on. See issue #1100.
+ """
+ try:
+ yield
+ except portend.Timeout:
+ if host == portend.client_host(host):
+ raise
+ msg = 'Unable to verify that the server is bound on %r' % port
+ warnings.warn(msg)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/process/win32.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/process/win32.py
new file mode 100644
index 0000000000000000000000000000000000000000..b7a79b1b05ada8fdc36992fd25bdb5bf7fe91269
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/process/win32.py
@@ -0,0 +1,183 @@
+"""Windows service. Requires pywin32."""
+
+import os
+import win32api
+import win32con
+import win32event
+import win32service
+import win32serviceutil
+
+from cherrypy.process import wspbus, plugins
+
+
+class ConsoleCtrlHandler(plugins.SimplePlugin):
+
+ """A WSPBus plugin for handling Win32 console events (like Ctrl-C)."""
+
+ def __init__(self, bus):
+ self.is_set = False
+ plugins.SimplePlugin.__init__(self, bus)
+
+ def start(self):
+ if self.is_set:
+ self.bus.log('Handler for console events already set.', level=20)
+ return
+
+ result = win32api.SetConsoleCtrlHandler(self.handle, 1)
+ if result == 0:
+ self.bus.log('Could not SetConsoleCtrlHandler (error %r)' %
+ win32api.GetLastError(), level=40)
+ else:
+ self.bus.log('Set handler for console events.', level=20)
+ self.is_set = True
+
+ def stop(self):
+ if not self.is_set:
+ self.bus.log('Handler for console events already off.', level=20)
+ return
+
+ try:
+ result = win32api.SetConsoleCtrlHandler(self.handle, 0)
+ except ValueError:
+ # "ValueError: The object has not been registered"
+ result = 1
+
+ if result == 0:
+ self.bus.log('Could not remove SetConsoleCtrlHandler (error %r)' %
+ win32api.GetLastError(), level=40)
+ else:
+ self.bus.log('Removed handler for console events.', level=20)
+ self.is_set = False
+
+ def handle(self, event):
+ """Handle console control events (like Ctrl-C)."""
+ if event in (win32con.CTRL_C_EVENT, win32con.CTRL_LOGOFF_EVENT,
+ win32con.CTRL_BREAK_EVENT, win32con.CTRL_SHUTDOWN_EVENT,
+ win32con.CTRL_CLOSE_EVENT):
+ self.bus.log('Console event %s: shutting down bus' % event)
+
+ # Remove self immediately so repeated Ctrl-C doesn't re-call it.
+ try:
+ self.stop()
+ except ValueError:
+ pass
+
+ self.bus.exit()
+ # 'First to return True stops the calls'
+ return 1
+ return 0
+
+
+class Win32Bus(wspbus.Bus):
+
+ """A Web Site Process Bus implementation for Win32.
+
+ Instead of time.sleep, this bus blocks using native win32event objects.
+ """
+
+ def __init__(self):
+ self.events = {}
+ wspbus.Bus.__init__(self)
+
+ def _get_state_event(self, state):
+ """Return a win32event for the given state (creating it if needed)."""
+ try:
+ return self.events[state]
+ except KeyError:
+ event = win32event.CreateEvent(None, 0, 0,
+ 'WSPBus %s Event (pid=%r)' %
+ (state.name, os.getpid()))
+ self.events[state] = event
+ return event
+
+ @property
+ def state(self):
+ return self._state
+
+ @state.setter
+ def state(self, value):
+ self._state = value
+ event = self._get_state_event(value)
+ win32event.PulseEvent(event)
+
+ def wait(self, state, interval=0.1, channel=None):
+ """Wait for the given state(s), KeyboardInterrupt or SystemExit.
+
+ Since this class uses native win32event objects, the interval
+ argument is ignored.
+ """
+ if isinstance(state, (tuple, list)):
+ # Don't wait for an event that beat us to the punch ;)
+ if self.state not in state:
+ events = tuple([self._get_state_event(s) for s in state])
+ win32event.WaitForMultipleObjects(
+ events, 0, win32event.INFINITE)
+ else:
+ # Don't wait for an event that beat us to the punch ;)
+ if self.state != state:
+ event = self._get_state_event(state)
+ win32event.WaitForSingleObject(event, win32event.INFINITE)
+
+
+class _ControlCodes(dict):
+
+ """Control codes used to "signal" a service via ControlService.
+
+ User-defined control codes are in the range 128-255. We generally use
+ the standard Python value for the Linux signal and add 128. Example:
+
+ >>> signal.SIGUSR1
+ 10
+ control_codes['graceful'] = 128 + 10
+ """
+
+ def key_for(self, obj):
+ """For the given value, return its corresponding key."""
+ for key, val in self.items():
+ if val is obj:
+ return key
+ raise ValueError('The given object could not be found: %r' % obj)
+
+
+control_codes = _ControlCodes({'graceful': 138})
+
+
+def signal_child(service, command):
+ if command == 'stop':
+ win32serviceutil.StopService(service)
+ elif command == 'restart':
+ win32serviceutil.RestartService(service)
+ else:
+ win32serviceutil.ControlService(service, control_codes[command])
+
+
+class PyWebService(win32serviceutil.ServiceFramework):
+
+ """Python Web Service."""
+
+ _svc_name_ = 'Python Web Service'
+ _svc_display_name_ = 'Python Web Service'
+ _svc_deps_ = None # sequence of service names on which this depends
+ _exe_name_ = 'pywebsvc'
+ _exe_args_ = None # Default to no arguments
+
+ # Only exists on Windows 2000 or later, ignored on windows NT
+ _svc_description_ = 'Python Web Service'
+
+ def SvcDoRun(self):
+ from cherrypy import process
+ process.bus.start()
+ process.bus.block()
+
+ def SvcStop(self):
+ from cherrypy import process
+ self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
+ process.bus.exit()
+
+ def SvcOther(self, control):
+ from cherrypy import process
+ process.bus.publish(control_codes.key_for(control))
+
+
+if __name__ == '__main__':
+ win32serviceutil.HandleCommandLine(PyWebService)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/process/wspbus.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/process/wspbus.py
new file mode 100644
index 0000000000000000000000000000000000000000..1d2789b1f89ed531c6f2dd6bf0278bd88178da82
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/process/wspbus.py
@@ -0,0 +1,587 @@
+r"""An implementation of the Web Site Process Bus.
+
+This module is completely standalone, depending only on the stdlib.
+
+Web Site Process Bus
+--------------------
+
+A Bus object is used to contain and manage site-wide behavior:
+daemonization, HTTP server start/stop, process reload, signal handling,
+drop privileges, PID file management, logging for all of these,
+and many more.
+
+In addition, a Bus object provides a place for each web framework
+to register code that runs in response to site-wide events (like
+process start and stop), or which controls or otherwise interacts with
+the site-wide components mentioned above. For example, a framework which
+uses file-based templates would add known template filenames to an
+autoreload component.
+
+Ideally, a Bus object will be flexible enough to be useful in a variety
+of invocation scenarios:
+
+ 1. The deployer starts a site from the command line via a
+ framework-neutral deployment script; applications from multiple frameworks
+ are mixed in a single site. Command-line arguments and configuration
+ files are used to define site-wide components such as the HTTP server,
+ WSGI component graph, autoreload behavior, signal handling, etc.
+ 2. The deployer starts a site via some other process, such as Apache;
+ applications from multiple frameworks are mixed in a single site.
+ Autoreload and signal handling (from Python at least) are disabled.
+ 3. The deployer starts a site via a framework-specific mechanism;
+ for example, when running tests, exploring tutorials, or deploying
+ single applications from a single framework. The framework controls
+ which site-wide components are enabled as it sees fit.
+
+The Bus object in this package uses topic-based publish-subscribe
+messaging to accomplish all this. A few topic channels are built in
+('start', 'stop', 'exit', 'graceful', 'log', and 'main'). Frameworks and
+site containers are free to define their own. If a message is sent to a
+channel that has not been defined or has no listeners, there is no effect.
+
+In general, there should only ever be a single Bus object per process.
+Frameworks and site containers share a single Bus object by publishing
+messages and subscribing listeners.
+
+The Bus object works as a finite state machine which models the current
+state of the process. Bus methods move it from one state to another;
+those methods then publish to subscribed listeners on the channel for
+the new state.::
+
+ O
+ |
+ V
+ STOPPING --> STOPPED --> EXITING -> X
+ A A |
+ | \___ |
+ | \ |
+ | V V
+ STARTED <-- STARTING
+
+"""
+
+import atexit
+
+try:
+ import ctypes
+except ImportError:
+ """Google AppEngine is shipped without ctypes
+
+ :seealso: http://stackoverflow.com/a/6523777/70170
+ """
+ ctypes = None
+
+import operator
+import os
+import sys
+import threading
+import time
+import traceback as _traceback
+import warnings
+import subprocess
+import functools
+
+from more_itertools import always_iterable
+
+
+# Here I save the value of os.getcwd(), which, if I am imported early enough,
+# will be the directory from which the startup script was run. This is needed
+# by _do_execv(), to change back to the original directory before execv()ing a
+# new process. This is a defense against the application having changed the
+# current working directory (which could make sys.executable "not found" if
+# sys.executable is a relative-path, and/or cause other problems).
+_startup_cwd = os.getcwd()
+
+
+class ChannelFailures(Exception):
+ """Exception raised during errors on Bus.publish()."""
+
+ delimiter = '\n'
+
+ def __init__(self, *args, **kwargs):
+ """Initialize ChannelFailures errors wrapper."""
+ super(ChannelFailures, self).__init__(*args, **kwargs)
+ self._exceptions = list()
+
+ def handle_exception(self):
+ """Append the current exception to self."""
+ self._exceptions.append(sys.exc_info()[1])
+
+ def get_instances(self):
+ """Return a list of seen exception instances."""
+ return self._exceptions[:]
+
+ def __str__(self):
+ """Render the list of errors, which happened in channel."""
+ exception_strings = map(repr, self.get_instances())
+ return self.delimiter.join(exception_strings)
+
+ __repr__ = __str__
+
+ def __bool__(self):
+ """Determine whether any error happened in channel."""
+ return bool(self._exceptions)
+ __nonzero__ = __bool__
+
+# Use a flag to indicate the state of the bus.
+
+
+class _StateEnum(object):
+
+ class State(object):
+ name = None
+
+ def __repr__(self):
+ return 'states.%s' % self.name
+
+ def __setattr__(self, key, value):
+ if isinstance(value, self.State):
+ value.name = key
+ object.__setattr__(self, key, value)
+
+
+states = _StateEnum()
+states.STOPPED = states.State()
+states.STARTING = states.State()
+states.STARTED = states.State()
+states.STOPPING = states.State()
+states.EXITING = states.State()
+
+
+try:
+ import fcntl
+except ImportError:
+ max_files = 0
+else:
+ try:
+ max_files = os.sysconf('SC_OPEN_MAX')
+ except AttributeError:
+ max_files = 1024
+
+
+class Bus(object):
+ """Process state-machine and messenger for HTTP site deployment.
+
+ All listeners for a given channel are guaranteed to be called even
+ if others at the same channel fail. Each failure is logged, but
+ execution proceeds on to the next listener. The only way to stop all
+ processing from inside a listener is to raise SystemExit and stop the
+ whole server.
+ """
+
+ states = states
+ state = states.STOPPED
+ execv = False
+ max_cloexec_files = max_files
+
+ def __init__(self):
+ """Initialize pub/sub bus."""
+ self.execv = False
+ self.state = states.STOPPED
+ channels = 'start', 'stop', 'exit', 'graceful', 'log', 'main'
+ self.listeners = dict(
+ (channel, set())
+ for channel in channels
+ )
+ self._priorities = {}
+
+ def subscribe(self, channel, callback=None, priority=None):
+ """Add the given callback at the given channel (if not present).
+
+ If callback is None, return a partial suitable for decorating
+ the callback.
+ """
+ if callback is None:
+ return functools.partial(
+ self.subscribe,
+ channel,
+ priority=priority,
+ )
+
+ ch_listeners = self.listeners.setdefault(channel, set())
+ ch_listeners.add(callback)
+
+ if priority is None:
+ priority = getattr(callback, 'priority', 50)
+ self._priorities[(channel, callback)] = priority
+
+ def unsubscribe(self, channel, callback):
+ """Discard the given callback (if present)."""
+ listeners = self.listeners.get(channel)
+ if listeners and callback in listeners:
+ listeners.discard(callback)
+ del self._priorities[(channel, callback)]
+
+ def publish(self, channel, *args, **kwargs):
+ """Return output of all subscribers for the given channel."""
+ if channel not in self.listeners:
+ return []
+
+ exc = ChannelFailures()
+ output = []
+
+ raw_items = (
+ (self._priorities[(channel, listener)], listener)
+ for listener in self.listeners[channel]
+ )
+ items = sorted(raw_items, key=operator.itemgetter(0))
+ for priority, listener in items:
+ try:
+ output.append(listener(*args, **kwargs))
+ except KeyboardInterrupt:
+ raise
+ except SystemExit:
+ e = sys.exc_info()[1]
+ # If we have previous errors ensure the exit code is non-zero
+ if exc and e.code == 0:
+ e.code = 1
+ raise
+ except Exception:
+ exc.handle_exception()
+ if channel == 'log':
+ # Assume any further messages to 'log' will fail.
+ pass
+ else:
+ self.log('Error in %r listener %r' % (channel, listener),
+ level=40, traceback=True)
+ if exc:
+ raise exc
+ return output
+
+ def _clean_exit(self):
+ """Assert that the Bus is not running in atexit handler callback."""
+ if self.state != states.EXITING:
+ warnings.warn(
+ 'The main thread is exiting, but the Bus is in the %r state; '
+ 'shutting it down automatically now. You must either call '
+ 'bus.block() after start(), or call bus.exit() before the '
+ 'main thread exits.' % self.state, RuntimeWarning)
+ self.exit()
+
+ def start(self):
+ """Start all services."""
+ atexit.register(self._clean_exit)
+
+ self.state = states.STARTING
+ self.log('Bus STARTING')
+ try:
+ self.publish('start')
+ self.state = states.STARTED
+ self.log('Bus STARTED')
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except Exception:
+ self.log('Shutting down due to error in start listener:',
+ level=40, traceback=True)
+ e_info = sys.exc_info()[1]
+ try:
+ self.exit()
+ except Exception:
+ # Any stop/exit errors will be logged inside publish().
+ pass
+ # Re-raise the original error
+ raise e_info
+
+ def exit(self):
+ """Stop all services and prepare to exit the process."""
+ exitstate = self.state
+ EX_SOFTWARE = 70
+ try:
+ self.stop()
+
+ self.state = states.EXITING
+ self.log('Bus EXITING')
+ self.publish('exit')
+ # This isn't strictly necessary, but it's better than seeing
+ # "Waiting for child threads to terminate..." and then nothing.
+ self.log('Bus EXITED')
+ except Exception:
+ # This method is often called asynchronously (whether thread,
+ # signal handler, console handler, or atexit handler), so we
+ # can't just let exceptions propagate out unhandled.
+ # Assume it's been logged and just die.
+ os._exit(EX_SOFTWARE)
+
+ if exitstate == states.STARTING:
+ # exit() was called before start() finished, possibly due to
+ # Ctrl-C because a start listener got stuck. In this case,
+ # we could get stuck in a loop where Ctrl-C never exits the
+ # process, so we just call os.exit here.
+ os._exit(EX_SOFTWARE)
+
+ def restart(self):
+ """Restart the process (may close connections).
+
+ This method does not restart the process from the calling thread;
+ instead, it stops the bus and asks the main thread to call execv.
+ """
+ self.execv = True
+ self.exit()
+
+ def graceful(self):
+ """Advise all services to reload."""
+ self.log('Bus graceful')
+ self.publish('graceful')
+
+ def block(self, interval=0.1):
+ """Wait for the EXITING state, KeyboardInterrupt or SystemExit.
+
+ This function is intended to be called only by the main thread.
+ After waiting for the EXITING state, it also waits for all threads
+ to terminate, and then calls os.execv if self.execv is True. This
+ design allows another thread to call bus.restart, yet have the main
+ thread perform the actual execv call (required on some platforms).
+ """
+ try:
+ self.wait(states.EXITING, interval=interval, channel='main')
+ except (KeyboardInterrupt, IOError):
+ # The time.sleep call might raise
+ # "IOError: [Errno 4] Interrupted function call" on KBInt.
+ self.log('Keyboard Interrupt: shutting down bus')
+ self.exit()
+ except SystemExit:
+ self.log('SystemExit raised: shutting down bus')
+ self.exit()
+ raise
+
+ # Waiting for ALL child threads to finish is necessary on OS X.
+ # See https://github.com/cherrypy/cherrypy/issues/581.
+ # It's also good to let them all shut down before allowing
+ # the main thread to call atexit handlers.
+ # See https://github.com/cherrypy/cherrypy/issues/751.
+ self.log('Waiting for child threads to terminate...')
+ for t in threading.enumerate():
+ # Validate the we're not trying to join the MainThread
+ # that will cause a deadlock and the case exist when
+ # implemented as a windows service and in any other case
+ # that another thread executes cherrypy.engine.exit()
+ if (
+ t != threading.current_thread() and
+ not isinstance(t, threading._MainThread) and
+ # Note that any dummy (external) threads are
+ # always daemonic.
+ not t.daemon
+ ):
+ self.log('Waiting for thread %s.' % t.name)
+ t.join()
+
+ if self.execv:
+ self._do_execv()
+
+ def wait(self, state, interval=0.1, channel=None):
+ """Poll for the given state(s) at intervals; publish to channel."""
+ states = set(always_iterable(state))
+
+ while self.state not in states:
+ time.sleep(interval)
+ self.publish(channel)
+
+ def _do_execv(self):
+ """Re-execute the current process.
+
+ This must be called from the main thread, because certain platforms
+ (OS X) don't allow execv to be called in a child thread very well.
+ """
+ try:
+ args = self._get_true_argv()
+ except NotImplementedError:
+ """It's probably win32 or GAE"""
+ args = [sys.executable] + self._get_interpreter_argv() + sys.argv
+
+ self.log('Re-spawning %s' % ' '.join(args))
+
+ self._extend_pythonpath(os.environ)
+
+ if sys.platform[:4] == 'java':
+ from _systemrestart import SystemRestart
+ raise SystemRestart
+ else:
+ if sys.platform == 'win32':
+ args = ['"%s"' % arg for arg in args]
+
+ os.chdir(_startup_cwd)
+ if self.max_cloexec_files:
+ self._set_cloexec()
+ os.execv(sys.executable, args)
+
+ @staticmethod
+ def _get_interpreter_argv():
+ """Retrieve current Python interpreter's arguments.
+
+ Returns empty tuple in case of frozen mode, uses built-in arguments
+ reproduction function otherwise.
+
+ Frozen mode is possible for the app has been packaged into a binary
+ executable using py2exe. In this case the interpreter's arguments are
+ already built-in into that executable.
+
+ :seealso: https://github.com/cherrypy/cherrypy/issues/1526
+ Ref: https://pythonhosted.org/PyInstaller/runtime-information.html
+ """
+ return ([]
+ if getattr(sys, 'frozen', False)
+ else subprocess._args_from_interpreter_flags())
+
+ @staticmethod
+ def _get_true_argv():
+ """Retrieve all real arguments of the python interpreter.
+
+ ...even those not listed in ``sys.argv``
+
+ :seealso: http://stackoverflow.com/a/28338254/595220
+ :seealso: http://stackoverflow.com/a/6683222/595220
+ :seealso: http://stackoverflow.com/a/28414807/595220
+ """
+ try:
+ char_p = ctypes.c_wchar_p
+
+ argv = ctypes.POINTER(char_p)()
+ argc = ctypes.c_int()
+
+ ctypes.pythonapi.Py_GetArgcArgv(
+ ctypes.byref(argc),
+ ctypes.byref(argv),
+ )
+
+ _argv = argv[:argc.value]
+
+ # The code below is trying to correctly handle special cases.
+ # `-c`'s argument interpreted by Python itself becomes `-c` as
+ # well. Same applies to `-m`. This snippet is trying to survive
+ # at least the case with `-m`
+ # Ref: https://github.com/cherrypy/cherrypy/issues/1545
+ # Ref: python/cpython@418baf9
+ argv_len, is_command, is_module = len(_argv), False, False
+
+ try:
+ m_ind = _argv.index('-m')
+ if m_ind < argv_len - 1 and _argv[m_ind + 1] in ('-c', '-m'):
+ """
+ In some older Python versions `-m`'s argument may be
+ substituted with `-c`, not `-m`
+ """
+ is_module = True
+ except (IndexError, ValueError):
+ m_ind = None
+
+ try:
+ c_ind = _argv.index('-c')
+ if c_ind < argv_len - 1 and _argv[c_ind + 1] == '-c':
+ is_command = True
+ except (IndexError, ValueError):
+ c_ind = None
+
+ if is_module:
+ """It's containing `-m -m` sequence of arguments"""
+ if is_command and c_ind < m_ind:
+ """There's `-c -c` before `-m`"""
+ raise RuntimeError(
+ "Cannot reconstruct command from '-c'. Ref: "
+ 'https://github.com/cherrypy/cherrypy/issues/1545')
+ # Survive module argument here
+ original_module = sys.argv[0]
+ if not os.access(original_module, os.R_OK):
+ """There's no such module exist"""
+ raise AttributeError(
+ "{} doesn't seem to be a module "
+ 'accessible by current user'.format(original_module))
+ del _argv[m_ind:m_ind + 2] # remove `-m -m`
+ # ... and substitute it with the original module path:
+ _argv.insert(m_ind, original_module)
+ elif is_command:
+ """It's containing just `-c -c` sequence of arguments"""
+ raise RuntimeError(
+ "Cannot reconstruct command from '-c'. "
+ 'Ref: https://github.com/cherrypy/cherrypy/issues/1545')
+ except AttributeError:
+ """It looks Py_GetArgcArgv is completely absent in some environments
+
+ It is known, that there's no Py_GetArgcArgv in MS Windows and
+ ``ctypes`` module is completely absent in Google AppEngine
+
+ :seealso: https://github.com/cherrypy/cherrypy/issues/1506
+ :seealso: https://github.com/cherrypy/cherrypy/issues/1512
+ :ref: http://bit.ly/2gK6bXK
+ """
+ raise NotImplementedError
+ else:
+ return _argv
+
+ @staticmethod
+ def _extend_pythonpath(env):
+ """Prepend current working dir to PATH environment variable if needed.
+
+ If sys.path[0] is an empty string, the interpreter was likely
+ invoked with -m and the effective path is about to change on
+ re-exec. Add the current directory to $PYTHONPATH to ensure
+ that the new process sees the same path.
+
+ This issue cannot be addressed in the general case because
+ Python cannot reliably reconstruct the
+ original command line (http://bugs.python.org/issue14208).
+
+ (This idea filched from tornado.autoreload)
+ """
+ path_prefix = '.' + os.pathsep
+ existing_path = env.get('PYTHONPATH', '')
+ needs_patch = (
+ sys.path[0] == '' and
+ not existing_path.startswith(path_prefix)
+ )
+
+ if needs_patch:
+ env['PYTHONPATH'] = path_prefix + existing_path
+
+ def _set_cloexec(self):
+ """Set the CLOEXEC flag on all open files (except stdin/out/err).
+
+ If self.max_cloexec_files is an integer (the default), then on
+ platforms which support it, it represents the max open files setting
+ for the operating system. This function will be called just before
+ the process is restarted via os.execv() to prevent open files
+ from persisting into the new process.
+
+ Set self.max_cloexec_files to 0 to disable this behavior.
+ """
+ for fd in range(3, self.max_cloexec_files): # skip stdin/out/err
+ try:
+ flags = fcntl.fcntl(fd, fcntl.F_GETFD)
+ except IOError:
+ continue
+ fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC)
+
+ def stop(self):
+ """Stop all services."""
+ self.state = states.STOPPING
+ self.log('Bus STOPPING')
+ self.publish('stop')
+ self.state = states.STOPPED
+ self.log('Bus STOPPED')
+
+ def start_with_callback(self, func, args=None, kwargs=None):
+ """Start 'func' in a new thread T, then start self (and return T)."""
+ if args is None:
+ args = ()
+ if kwargs is None:
+ kwargs = {}
+ args = (func,) + args
+
+ def _callback(func, *a, **kw):
+ self.wait(states.STARTED)
+ func(*a, **kw)
+ t = threading.Thread(target=_callback, args=args, kwargs=kwargs)
+ t.name = 'Bus Callback ' + t.name
+ t.start()
+
+ self.start()
+
+ return t
+
+ def log(self, msg='', level=20, traceback=False):
+ """Log the given message. Append the last traceback if requested."""
+ if traceback:
+ msg += '\n' + ''.join(_traceback.format_exception(*sys.exc_info()))
+ self.publish('log', msg, level)
+
+
+bus = Bus()
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/scaffold/__init__.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/scaffold/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..bcddba2db85b0b509b26371ef4046b62e6c688f4
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/scaffold/__init__.py
@@ -0,0 +1,63 @@
+"""<MyProject>, a CherryPy application.
+
+Use this as a base for creating new CherryPy applications. When you want
+to make a new app, copy and paste this folder to some other location
+(maybe site-packages) and rename it to the name of your project,
+then tweak as desired.
+
+Even before any tweaking, this should serve a few demonstration pages.
+Change to this directory and run:
+
+ cherryd -c site.conf
+
+"""
+
+import cherrypy
+from cherrypy import tools, url
+
+import os
+local_dir = os.path.join(os.getcwd(), os.path.dirname(__file__))
+
+
+@cherrypy.config(**{'tools.log_tracebacks.on': True})
+class Root:
+ """Declaration of the CherryPy app URI structure."""
+
+ @cherrypy.expose
+ def index(self):
+ """Render HTML-template at the root path of the web-app."""
+ return """<html>
+<body>Try some <a href='%s?a=7'>other</a> path,
+or a <a href='%s?n=14'>default</a> path.<br />
+Or, just look at the pretty picture:<br />
+<img src='%s' />
+</body></html>""" % (url('other'), url('else'),
+ url('files/made_with_cherrypy_small.png'))
+
+ @cherrypy.expose
+ def default(self, *args, **kwargs):
+ """Render catch-all args and kwargs."""
+ return 'args: %s kwargs: %s' % (args, kwargs)
+
+ @cherrypy.expose
+ def other(self, a=2, b='bananas', c=None):
+ """Render number of fruits based on third argument."""
+ cherrypy.response.headers['Content-Type'] = 'text/plain'
+ if c is None:
+ return 'Have %d %s.' % (int(a), b)
+ else:
+ return 'Have %d %s, %s.' % (int(a), b, c)
+
+ files = tools.staticdir.handler(
+ section='/files',
+ dir=os.path.join(local_dir, 'static'),
+ # Ignore .php files, etc.
+ match=r'\.(css|gif|html?|ico|jpe?g|js|png|swf|xml)$',
+ )
+
+
+root = Root()
+
+# Uncomment the following to use your own favicon instead of CP's default.
+# favicon_path = os.path.join(local_dir, "favicon.ico")
+# root.favicon_ico = tools.staticfile.handler(filename=favicon_path)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/scaffold/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/scaffold/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1de5e5f4949c12b0d8061ff6644d787289e26a0a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/scaffold/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/scaffold/apache-fcgi.conf b/monEnvTP/lib/python3.8/site-packages/cherrypy/scaffold/apache-fcgi.conf
new file mode 100644
index 0000000000000000000000000000000000000000..6e4f144cf45ae1e1e982e28c31c605e373ec407a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/scaffold/apache-fcgi.conf
@@ -0,0 +1,22 @@
+# Apache2 server conf file for using CherryPy with mod_fcgid.
+
+# This doesn't have to be "C:/", but it has to be a directory somewhere, and
+# MUST match the directory used in the FastCgiExternalServer directive, below.
+DocumentRoot "C:/"
+
+ServerName 127.0.0.1
+Listen 80
+LoadModule fastcgi_module modules/mod_fastcgi.dll
+LoadModule rewrite_module modules/mod_rewrite.so
+
+Options ExecCGI
+SetHandler fastcgi-script
+RewriteEngine On
+# Send requests for any URI to our fastcgi handler.
+RewriteRule ^(.*)$ /fastcgi.pyc [L]
+
+# The FastCgiExternalServer directive defines filename as an external FastCGI application.
+# If filename does not begin with a slash (/) then it is assumed to be relative to the ServerRoot.
+# The filename does not have to exist in the local filesystem. URIs that Apache resolves to this
+# filename will be handled by this external FastCGI application.
+FastCgiExternalServer "C:/fastcgi.pyc" -host 127.0.0.1:8088
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/scaffold/example.conf b/monEnvTP/lib/python3.8/site-packages/cherrypy/scaffold/example.conf
new file mode 100644
index 0000000000000000000000000000000000000000..63250fe3a36a01d553d7b57902f77d0df8f7da1e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/scaffold/example.conf
@@ -0,0 +1,3 @@
+[/]
+log.error_file: "error.log"
+log.access_file: "access.log"
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/scaffold/site.conf b/monEnvTP/lib/python3.8/site-packages/cherrypy/scaffold/site.conf
new file mode 100644
index 0000000000000000000000000000000000000000..6ed3898373838a85d3dc1bf5725def8662646cbe
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/scaffold/site.conf
@@ -0,0 +1,14 @@
+[global]
+# Uncomment this when you're done developing
+#environment: "production"
+
+server.socket_host: "0.0.0.0"
+server.socket_port: 8088
+
+# Uncomment the following lines to run on HTTPS at the same time
+#server.2.socket_host: "0.0.0.0"
+#server.2.socket_port: 8433
+#server.2.ssl_certificate: '../test/test.pem'
+#server.2.ssl_private_key: '../test/test.pem'
+
+tree.myapp: cherrypy.Application(scaffold.root, "/", "example.conf")
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/scaffold/static/made_with_cherrypy_small.png b/monEnvTP/lib/python3.8/site-packages/cherrypy/scaffold/static/made_with_cherrypy_small.png
new file mode 100644
index 0000000000000000000000000000000000000000..724f9d72d9ca5aede0b788fc1216286aa967e212
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/scaffold/static/made_with_cherrypy_small.png differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__init__.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..068382be1401a1629968f234be9bdb09de6889ca
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__init__.py
@@ -0,0 +1,24 @@
+"""
+Regression test suite for CherryPy.
+"""
+
+import os
+import sys
+
+
+def newexit():
+ os._exit(1)
+
+
+def setup():
+ # We want to monkey patch sys.exit so that we can get some
+ # information about where exit is being called.
+ newexit._old = sys.exit
+ sys.exit = newexit
+
+
+def teardown():
+ try:
+ sys.exit = sys.exit._old
+ except AttributeError:
+ sys.exit = sys._exit
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c166cdcf1df3f261585cdb684155d722dc9c2a1e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/_test_decorators.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/_test_decorators.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3c66bab1b3979c83d75d0694c7e746bf87554c23
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/_test_decorators.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/_test_states_demo.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/_test_states_demo.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7dca10fdec97263b16ceaf30af3e049bd5c86ad4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/_test_states_demo.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/benchmark.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/benchmark.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e7bf4544200dba2cef735c2dd22113568207ab24
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/benchmark.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/checkerdemo.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/checkerdemo.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e6ca5a1ed3ed357daae399bd5937f8d4e81abfce
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/checkerdemo.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/helper.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/helper.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..86c2c922e5109f9da5b9b55555044fd860befc19
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/helper.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/logtest.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/logtest.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1d06cf66769ab12949f05987d2dcfb86075119b5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/logtest.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/modfastcgi.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/modfastcgi.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..27de5380dd6decedf6bfeefdba85a5899ea47aee
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/modfastcgi.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/modfcgid.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/modfcgid.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9774e4c0623fcc27f2e9ddfc1d3b6b581a14d9d0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/modfcgid.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/modpy.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/modpy.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d0d31230e835fd138c2293bab9e827fd1a7bfa85
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/modpy.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/modwsgi.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/modwsgi.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f5fa4b730f652fd897b2848870bc5213e1c4637f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/modwsgi.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/sessiondemo.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/sessiondemo.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..da0b63e89d3b5881e93525fc5597806ff58620c0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/sessiondemo.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_auth_basic.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_auth_basic.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..44128586c4df3639d1b9190690c8c878763aa3f1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_auth_basic.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_auth_digest.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_auth_digest.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..865cfe54ead1c3da928f83b26f0e9fcd18f98c94
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_auth_digest.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_bus.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_bus.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5537f2325b2cf013f9574b32f9620c308dd014b1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_bus.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_caching.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_caching.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..da6db64f4081eed6c7c75d846c5f2de18a7c7798
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_caching.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_config.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_config.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..87326de1e6f4d60d9e0fe6a490419311dd626d08
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_config.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_config_server.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_config_server.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f3c8315c3257ed2d4345d523f446fd45c8279276
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_config_server.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_conn.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_conn.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4fd67a56c9e20c6218f35c5361c5037a00dff104
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_conn.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_core.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_core.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8c0d18fa2997e28df0511ecdc75bc78ab0e7371f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_core.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_dynamicobjectmapping.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_dynamicobjectmapping.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f05994bf5fc3de2d475729a29487208b68d725e9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_dynamicobjectmapping.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_encoding.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_encoding.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..21dcd03caba4ee222f89bd9e9f4cbc01ced4a615
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_encoding.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_etags.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_etags.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..34247963a217a78c2814dbc62626c6695187a93d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_etags.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_http.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_http.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..aa0fe17f5f77c6f4f1c6da82ddd0015500f99315
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_http.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_httputil.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_httputil.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c40caa337442da1b15a8a00b3139b7becb5570a8
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_httputil.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_iterator.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_iterator.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0831180a4d05ce86c4a6471797f9c3602e8a711a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_iterator.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_json.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_json.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c45c7d38e2dad109b75ccab5a9ae0ed0ba055ee7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_json.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_logging.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_logging.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9508be3f0c44d7defe52db77bb8b1b7ca4a55430
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_logging.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_mime.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_mime.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..74cbdf24c76617e19638f504c479cd6e5c1465d4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_mime.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_misc_tools.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_misc_tools.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..50fb1a05ff04c9688791efd9ad624f8fe3f27047
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_misc_tools.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_native.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_native.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1f8afe3348ff6410f11b186fb73d0f43c4272a2d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_native.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_objectmapping.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_objectmapping.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..abc2ada05509bd89af660dd0e96991c0b70e430d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_objectmapping.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_params.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_params.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..efc94176fe60ebcc0b43fb932a5850201ba7e706
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_params.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_plugins.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_plugins.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..46be33c046ab5c0457582a162200682ae6669961
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_plugins.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_proxy.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_proxy.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f56e3ff7104fead4e215827df561adc6bc3f813d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_proxy.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_refleaks.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_refleaks.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1d6c4642c02a366f680f3fe996076c6a5673d877
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_refleaks.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_request_obj.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_request_obj.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fb857ce93efc3e8c1e5bd46d6fa56cacba02cffd
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_request_obj.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_routes.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_routes.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2a2e6adcbb1b95b4c56142da5e7ad5cfb15958ad
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_routes.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_session.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_session.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..871c66396c10b3e8113c39f7680265432a39851f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_session.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_sessionauthenticate.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_sessionauthenticate.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f8e039380385f503b7e9a3de02c378e9cfe202ff
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_sessionauthenticate.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_states.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_states.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f1a7e9485a8b0395311b63ebaaf9f9ba5df62dcd
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_states.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_static.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_static.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9bb6cfd09e48b6aaf5b042243bc8387428a1191c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_static.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_tools.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_tools.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2eb35c8ff0a5bb634303a1fd22258de404dffca2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_tools.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_tutorials.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_tutorials.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b4568d41a3a500bf5d74421bd67853c4b5323ec1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_tutorials.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_virtualhost.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_virtualhost.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ae0ef4a5b6b0aa55e217944259d2b73b5a1cf296
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_virtualhost.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_wsgi_ns.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_wsgi_ns.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e3601cd2730916bcee9a389e7631ed29a28b705e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_wsgi_ns.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_wsgi_unix_socket.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_wsgi_unix_socket.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5237cd44d1b43bc7079a6c49a1fb3a9432677b27
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_wsgi_unix_socket.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_wsgi_vhost.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_wsgi_vhost.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..38a6b4eef286bd86f722e69002e9f243283e9900
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_wsgi_vhost.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_wsgiapps.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_wsgiapps.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e716643be31fa05cbc89eb9c9364c05023319553
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_wsgiapps.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_xmlrpc.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_xmlrpc.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3f02a92c7a4fc6ef8a3b7cefba1d19576d60b251
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/test_xmlrpc.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/webtest.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/webtest.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e9b0bc6328c1385d1885d7a4d97b7ac2479b0390
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/__pycache__/webtest.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/_test_decorators.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/_test_decorators.py
new file mode 100644
index 0000000000000000000000000000000000000000..74832e40ac03ba7c7215679fc149731f8c36f4a7
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/_test_decorators.py
@@ -0,0 +1,39 @@
+"""Test module for the @-decorator syntax, which is version-specific"""
+
+import cherrypy
+from cherrypy import expose, tools
+
+
+class ExposeExamples(object):
+
+ @expose
+ def no_call(self):
+ return 'Mr E. R. Bradshaw'
+
+ @expose()
+ def call_empty(self):
+ return 'Mrs. B.J. Smegma'
+
+ @expose('call_alias')
+ def nesbitt(self):
+ return 'Mr Nesbitt'
+
+ @expose(['alias1', 'alias2'])
+ def andrews(self):
+ return 'Mr Ken Andrews'
+
+ @expose(alias='alias3')
+ def watson(self):
+ return 'Mr. and Mrs. Watson'
+
+
+class ToolExamples(object):
+
+ @expose
+ # This is here to demonstrate that using the config decorator
+ # does not overwrite other config attributes added by the Tool
+ # decorator (in this case response_headers).
+ @cherrypy.config(**{'response.stream': True})
+ @tools.response_headers(headers=[('Content-Type', 'application/data')])
+ def blah(self):
+ yield b'blah'
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/_test_states_demo.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/_test_states_demo.py
new file mode 100644
index 0000000000000000000000000000000000000000..a49407baf30180ca7b54a62770c14ae2cfc12f34
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/_test_states_demo.py
@@ -0,0 +1,69 @@
+import os
+import sys
+import time
+
+import cherrypy
+
+starttime = time.time()
+
+
+class Root:
+
+ @cherrypy.expose
+ def index(self):
+ return 'Hello World'
+
+ @cherrypy.expose
+ def mtimes(self):
+ return repr(cherrypy.engine.publish('Autoreloader', 'mtimes'))
+
+ @cherrypy.expose
+ def pid(self):
+ return str(os.getpid())
+
+ @cherrypy.expose
+ def start(self):
+ return repr(starttime)
+
+ @cherrypy.expose
+ def exit(self):
+ # This handler might be called before the engine is STARTED if an
+ # HTTP worker thread handles it before the HTTP server returns
+ # control to engine.start. We avoid that race condition here
+ # by waiting for the Bus to be STARTED.
+ cherrypy.engine.wait(state=cherrypy.engine.states.STARTED)
+ cherrypy.engine.exit()
+
+
+@cherrypy.engine.subscribe('start', priority=100)
+def unsub_sig():
+ cherrypy.log('unsubsig: %s' % cherrypy.config.get('unsubsig', False))
+ if cherrypy.config.get('unsubsig', False):
+ cherrypy.log('Unsubscribing the default cherrypy signal handler')
+ cherrypy.engine.signal_handler.unsubscribe()
+ try:
+ from signal import signal, SIGTERM
+ except ImportError:
+ pass
+ else:
+ def old_term_handler(signum=None, frame=None):
+ cherrypy.log('I am an old SIGTERM handler.')
+ sys.exit(0)
+ cherrypy.log('Subscribing the new one.')
+ signal(SIGTERM, old_term_handler)
+
+
+@cherrypy.engine.subscribe('start', priority=6)
+def starterror():
+ if cherrypy.config.get('starterror', False):
+ 1 / 0
+
+
+@cherrypy.engine.subscribe('start', priority=6)
+def log_test_case_name():
+ if cherrypy.config.get('test_case_name', False):
+ cherrypy.log('STARTED FROM: %s' %
+ cherrypy.config.get('test_case_name'))
+
+
+cherrypy.tree.mount(Root(), '/', {'/': {}})
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/benchmark.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/benchmark.py
new file mode 100644
index 0000000000000000000000000000000000000000..44dfeff12a3919138772ba4f86b1b9bd4b562b99
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/benchmark.py
@@ -0,0 +1,425 @@
+"""CherryPy Benchmark Tool
+
+ Usage:
+ benchmark.py [options]
+
+ --null: use a null Request object (to bench the HTTP server only)
+ --notests: start the server but do not run the tests; this allows
+ you to check the tested pages with a browser
+ --help: show this help message
+ --cpmodpy: run tests via apache on 54583 (with the builtin _cpmodpy)
+ --modpython: run tests via apache on 54583 (with modpython_gateway)
+ --ab=path: Use the ab script/executable at 'path' (see below)
+ --apache=path: Use the apache script/exe at 'path' (see below)
+
+ To run the benchmarks, the Apache Benchmark tool "ab" must either be on
+ your system path, or specified via the --ab=path option.
+
+ To run the modpython tests, the "apache" executable or script must be
+ on your system path, or provided via the --apache=path option. On some
+ platforms, "apache" may be called "apachectl" or "apache2ctl"--create
+ a symlink to them if needed.
+"""
+
+import getopt
+import os
+import re
+import sys
+import time
+
+import cherrypy
+from cherrypy import _cperror, _cpmodpy
+from cherrypy.lib import httputil
+
+
+curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
+
+AB_PATH = ''
+APACHE_PATH = 'apache'
+SCRIPT_NAME = '/cpbench/users/rdelon/apps/blog'
+
+__all__ = ['ABSession', 'Root', 'print_report',
+ 'run_standard_benchmarks', 'safe_threads',
+ 'size_report', 'thread_report',
+ ]
+
+size_cache = {}
+
+
+class Root:
+
+ @cherrypy.expose
+ def index(self):
+ return """<html>
+<head>
+ <title>CherryPy Benchmark</title>
+</head>
+<body>
+ <ul>
+ <li><a href="hello">Hello, world! (14 byte dynamic)</a></li>
+ <li><a href="static/index.html">Static file (14 bytes static)</a></li>
+ <li><form action="sizer">Response of length:
+ <input type='text' name='size' value='10' /></form>
+ </li>
+ </ul>
+</body>
+</html>"""
+
+ @cherrypy.expose
+ def hello(self):
+ return 'Hello, world\r\n'
+
+ @cherrypy.expose
+ def sizer(self, size):
+ resp = size_cache.get(size, None)
+ if resp is None:
+ size_cache[size] = resp = 'X' * int(size)
+ return resp
+
+
+def init():
+
+ cherrypy.config.update({
+ 'log.error.file': '',
+ 'environment': 'production',
+ 'server.socket_host': '127.0.0.1',
+ 'server.socket_port': 54583,
+ 'server.max_request_header_size': 0,
+ 'server.max_request_body_size': 0,
+ })
+
+ # Cheat mode on ;)
+ del cherrypy.config['tools.log_tracebacks.on']
+ del cherrypy.config['tools.log_headers.on']
+ del cherrypy.config['tools.trailing_slash.on']
+
+ appconf = {
+ '/static': {
+ 'tools.staticdir.on': True,
+ 'tools.staticdir.dir': 'static',
+ 'tools.staticdir.root': curdir,
+ },
+ }
+ globals().update(
+ app=cherrypy.tree.mount(Root(), SCRIPT_NAME, appconf),
+ )
+
+
+class NullRequest:
+
+ """A null HTTP request class, returning 200 and an empty body."""
+
+ def __init__(self, local, remote, scheme='http'):
+ pass
+
+ def close(self):
+ pass
+
+ def run(self, method, path, query_string, protocol, headers, rfile):
+ cherrypy.response.status = '200 OK'
+ cherrypy.response.header_list = [('Content-Type', 'text/html'),
+ ('Server', 'Null CherryPy'),
+ ('Date', httputil.HTTPDate()),
+ ('Content-Length', '0'),
+ ]
+ cherrypy.response.body = ['']
+ return cherrypy.response
+
+
+class NullResponse:
+ pass
+
+
+class ABSession:
+
+ """A session of 'ab', the Apache HTTP server benchmarking tool.
+
+Example output from ab:
+
+This is ApacheBench, Version 2.0.40-dev <$Revision: 1.121.2.1 $> apache-2.0
+Copyright (c) 1996 Adam Twiss, Zeus Technology Ltd, http://www.zeustech.net/
+Copyright (c) 1998-2002 The Apache Software Foundation, http://www.apache.org/
+
+Benchmarking 127.0.0.1 (be patient)
+Completed 100 requests
+Completed 200 requests
+Completed 300 requests
+Completed 400 requests
+Completed 500 requests
+Completed 600 requests
+Completed 700 requests
+Completed 800 requests
+Completed 900 requests
+
+
+Server Software: CherryPy/3.1beta
+Server Hostname: 127.0.0.1
+Server Port: 54583
+
+Document Path: /static/index.html
+Document Length: 14 bytes
+
+Concurrency Level: 10
+Time taken for tests: 9.643867 seconds
+Complete requests: 1000
+Failed requests: 0
+Write errors: 0
+Total transferred: 189000 bytes
+HTML transferred: 14000 bytes
+Requests per second: 103.69 [#/sec] (mean)
+Time per request: 96.439 [ms] (mean)
+Time per request: 9.644 [ms] (mean, across all concurrent requests)
+Transfer rate: 19.08 [Kbytes/sec] received
+
+Connection Times (ms)
+ min mean[+/-sd] median max
+Connect: 0 0 2.9 0 10
+Processing: 20 94 7.3 90 130
+Waiting: 0 43 28.1 40 100
+Total: 20 95 7.3 100 130
+
+Percentage of the requests served within a certain time (ms)
+ 50% 100
+ 66% 100
+ 75% 100
+ 80% 100
+ 90% 100
+ 95% 100
+ 98% 100
+ 99% 110
+ 100% 130 (longest request)
+Finished 1000 requests
+"""
+
+ parse_patterns = [
+ ('complete_requests', 'Completed',
+ br'^Complete requests:\s*(\d+)'),
+ ('failed_requests', 'Failed',
+ br'^Failed requests:\s*(\d+)'),
+ ('requests_per_second', 'req/sec',
+ br'^Requests per second:\s*([0-9.]+)'),
+ ('time_per_request_concurrent', 'msec/req',
+ br'^Time per request:\s*([0-9.]+).*concurrent requests\)$'),
+ ('transfer_rate', 'KB/sec',
+ br'^Transfer rate:\s*([0-9.]+)')
+ ]
+
+ def __init__(self, path=SCRIPT_NAME + '/hello', requests=1000,
+ concurrency=10):
+ self.path = path
+ self.requests = requests
+ self.concurrency = concurrency
+
+ def args(self):
+ port = cherrypy.server.socket_port
+ assert self.concurrency > 0
+ assert self.requests > 0
+ # Don't use "localhost".
+ # Cf
+ # http://mail.python.org/pipermail/python-win32/2008-March/007050.html
+ return ('-k -n %s -c %s http://127.0.0.1:%s%s' %
+ (self.requests, self.concurrency, port, self.path))
+
+ def run(self):
+ # Parse output of ab, setting attributes on self
+ try:
+ self.output = _cpmodpy.read_process(AB_PATH or 'ab', self.args())
+ except Exception:
+ print(_cperror.format_exc())
+ raise
+
+ for attr, name, pattern in self.parse_patterns:
+ val = re.search(pattern, self.output, re.MULTILINE)
+ if val:
+ val = val.group(1)
+ setattr(self, attr, val)
+ else:
+ setattr(self, attr, None)
+
+
+safe_threads = (25, 50, 100, 200, 400)
+if sys.platform in ('win32',):
+ # For some reason, ab crashes with > 50 threads on my Win2k laptop.
+ safe_threads = (10, 20, 30, 40, 50)
+
+
+def thread_report(path=SCRIPT_NAME + '/hello', concurrency=safe_threads):
+ sess = ABSession(path)
+ attrs, names, patterns = list(zip(*sess.parse_patterns))
+ avg = dict.fromkeys(attrs, 0.0)
+
+ yield ('threads',) + names
+ for c in concurrency:
+ sess.concurrency = c
+ sess.run()
+ row = [c]
+ for attr in attrs:
+ val = getattr(sess, attr)
+ if val is None:
+ print(sess.output)
+ row = None
+ break
+ val = float(val)
+ avg[attr] += float(val)
+ row.append(val)
+ if row:
+ yield row
+
+ # Add a row of averages.
+ yield ['Average'] + [str(avg[attr] / len(concurrency)) for attr in attrs]
+
+
+def size_report(sizes=(10, 100, 1000, 10000, 100000, 100000000),
+ concurrency=50):
+ sess = ABSession(concurrency=concurrency)
+ attrs, names, patterns = list(zip(*sess.parse_patterns))
+ yield ('bytes',) + names
+ for sz in sizes:
+ sess.path = '%s/sizer?size=%s' % (SCRIPT_NAME, sz)
+ sess.run()
+ yield [sz] + [getattr(sess, attr) for attr in attrs]
+
+
+def print_report(rows):
+ for row in rows:
+ print('')
+ for val in row:
+ sys.stdout.write(str(val).rjust(10) + ' | ')
+ print('')
+
+
+def run_standard_benchmarks():
+ print('')
+ print('Client Thread Report (1000 requests, 14 byte response body, '
+ '%s server threads):' % cherrypy.server.thread_pool)
+ print_report(thread_report())
+
+ print('')
+ print('Client Thread Report (1000 requests, 14 bytes via staticdir, '
+ '%s server threads):' % cherrypy.server.thread_pool)
+ print_report(thread_report('%s/static/index.html' % SCRIPT_NAME))
+
+ print('')
+ print('Size Report (1000 requests, 50 client threads, '
+ '%s server threads):' % cherrypy.server.thread_pool)
+ print_report(size_report())
+
+
+# modpython and other WSGI #
+
+def startup_modpython(req=None):
+ """Start the CherryPy app server in 'serverless' mode (for modpython/WSGI).
+ """
+ if cherrypy.engine.state == cherrypy._cpengine.STOPPED:
+ if req:
+ if 'nullreq' in req.get_options():
+ cherrypy.engine.request_class = NullRequest
+ cherrypy.engine.response_class = NullResponse
+ ab_opt = req.get_options().get('ab', '')
+ if ab_opt:
+ global AB_PATH
+ AB_PATH = ab_opt
+ cherrypy.engine.start()
+ if cherrypy.engine.state == cherrypy._cpengine.STARTING:
+ cherrypy.engine.wait()
+ return 0 # apache.OK
+
+
+def run_modpython(use_wsgi=False):
+ print('Starting mod_python...')
+ pyopts = []
+
+ # Pass the null and ab=path options through Apache
+ if '--null' in opts:
+ pyopts.append(('nullreq', ''))
+
+ if '--ab' in opts:
+ pyopts.append(('ab', opts['--ab']))
+
+ s = _cpmodpy.ModPythonServer
+ if use_wsgi:
+ pyopts.append(('wsgi.application', 'cherrypy::tree'))
+ pyopts.append(
+ ('wsgi.startup', 'cherrypy.test.benchmark::startup_modpython'))
+ handler = 'modpython_gateway::handler'
+ s = s(port=54583, opts=pyopts,
+ apache_path=APACHE_PATH, handler=handler)
+ else:
+ pyopts.append(
+ ('cherrypy.setup', 'cherrypy.test.benchmark::startup_modpython'))
+ s = s(port=54583, opts=pyopts, apache_path=APACHE_PATH)
+
+ try:
+ s.start()
+ run()
+ finally:
+ s.stop()
+
+
+if __name__ == '__main__':
+ init()
+
+ longopts = ['cpmodpy', 'modpython', 'null', 'notests',
+ 'help', 'ab=', 'apache=']
+ try:
+ switches, args = getopt.getopt(sys.argv[1:], '', longopts)
+ opts = dict(switches)
+ except getopt.GetoptError:
+ print(__doc__)
+ sys.exit(2)
+
+ if '--help' in opts:
+ print(__doc__)
+ sys.exit(0)
+
+ if '--ab' in opts:
+ AB_PATH = opts['--ab']
+
+ if '--notests' in opts:
+ # Return without stopping the server, so that the pages
+ # can be tested from a standard web browser.
+ def run():
+ port = cherrypy.server.socket_port
+ print('You may now open http://127.0.0.1:%s%s/' %
+ (port, SCRIPT_NAME))
+
+ if '--null' in opts:
+ print('Using null Request object')
+ else:
+ def run():
+ end = time.time() - start
+ print('Started in %s seconds' % end)
+ if '--null' in opts:
+ print('\nUsing null Request object')
+ try:
+ try:
+ run_standard_benchmarks()
+ except Exception:
+ print(_cperror.format_exc())
+ raise
+ finally:
+ cherrypy.engine.exit()
+
+ print('Starting CherryPy app server...')
+
+ class NullWriter(object):
+
+ """Suppresses the printing of socket errors."""
+
+ def write(self, data):
+ pass
+ sys.stderr = NullWriter()
+
+ start = time.time()
+
+ if '--cpmodpy' in opts:
+ run_modpython()
+ elif '--modpython' in opts:
+ run_modpython(use_wsgi=True)
+ else:
+ if '--null' in opts:
+ cherrypy.server.request_class = NullRequest
+ cherrypy.server.response_class = NullResponse
+
+ cherrypy.engine.start_with_callback(run)
+ cherrypy.engine.block()
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/checkerdemo.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/checkerdemo.py
new file mode 100644
index 0000000000000000000000000000000000000000..3438bd0c505a8b93e50c349fd412420fe4dab951
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/checkerdemo.py
@@ -0,0 +1,49 @@
+"""Demonstration app for cherrypy.checker.
+
+This application is intentionally broken and badly designed.
+To demonstrate the output of the CherryPy Checker, simply execute
+this module.
+"""
+
+import os
+import cherrypy
+thisdir = os.path.dirname(os.path.abspath(__file__))
+
+
+class Root:
+ pass
+
+
+if __name__ == '__main__':
+ conf = {'/base': {'tools.staticdir.root': thisdir,
+ # Obsolete key.
+ 'throw_errors': True,
+ },
+ # This entry should be OK.
+ '/base/static': {'tools.staticdir.on': True,
+ 'tools.staticdir.dir': 'static'},
+ # Warn on missing folder.
+ '/base/js': {'tools.staticdir.on': True,
+ 'tools.staticdir.dir': 'js'},
+ # Warn on dir with an abs path even though we provide root.
+ '/base/static2': {'tools.staticdir.on': True,
+ 'tools.staticdir.dir': '/static'},
+ # Warn on dir with a relative path with no root.
+ '/static3': {'tools.staticdir.on': True,
+ 'tools.staticdir.dir': 'static'},
+ # Warn on unknown namespace
+ '/unknown': {'toobles.gzip.on': True},
+ # Warn special on cherrypy.<known ns>.*
+ '/cpknown': {'cherrypy.tools.encode.on': True},
+ # Warn on mismatched types
+ '/conftype': {'request.show_tracebacks': 14},
+ # Warn on unknown tool.
+ '/web': {'tools.unknown.on': True},
+ # Warn on server.* in app config.
+ '/app1': {'server.socket_host': '0.0.0.0'},
+ # Warn on 'localhost'
+ 'global': {'server.socket_host': 'localhost'},
+ # Warn on '[name]'
+ '[/extra_brackets]': {},
+ }
+ cherrypy.quickstart(Root(), config=conf)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/fastcgi.conf b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/fastcgi.conf
new file mode 100644
index 0000000000000000000000000000000000000000..e5c5163caf5f75a8b5eab05fd26aa846f7714e17
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/fastcgi.conf
@@ -0,0 +1,18 @@
+
+# Apache2 server conf file for testing CherryPy with mod_fastcgi.
+# fumanchu: I had to hard-code paths due to crazy Debian layouts :(
+ServerRoot /usr/lib/apache2
+User #1000
+ErrorLog /usr/lib/python2.5/site-packages/cproot/trunk/cherrypy/test/mod_fastcgi.error.log
+
+DocumentRoot "/usr/lib/python2.5/site-packages/cproot/trunk/cherrypy/test"
+ServerName 127.0.0.1
+Listen 8080
+LoadModule fastcgi_module modules/mod_fastcgi.so
+LoadModule rewrite_module modules/mod_rewrite.so
+
+Options +ExecCGI
+SetHandler fastcgi-script
+RewriteEngine On
+RewriteRule ^(.*)$ /fastcgi.pyc [L]
+FastCgiExternalServer "/usr/lib/python2.5/site-packages/cproot/trunk/cherrypy/test/fastcgi.pyc" -host 127.0.0.1:4000
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/fcgi.conf b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/fcgi.conf
new file mode 100644
index 0000000000000000000000000000000000000000..3062eb35b2c0eea697f87beecc5c4fdd3a68089c
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/fcgi.conf
@@ -0,0 +1,14 @@
+
+# Apache2 server conf file for testing CherryPy with mod_fcgid.
+
+DocumentRoot "/usr/lib/python2.6/site-packages/cproot/trunk/cherrypy/test"
+ServerName 127.0.0.1
+Listen 8080
+LoadModule fastcgi_module modules/mod_fastcgi.dll
+LoadModule rewrite_module modules/mod_rewrite.so
+
+Options ExecCGI
+SetHandler fastcgi-script
+RewriteEngine On
+RewriteRule ^(.*)$ /fastcgi.pyc [L]
+FastCgiExternalServer "/usr/lib/python2.6/site-packages/cproot/trunk/cherrypy/test/fastcgi.pyc" -host 127.0.0.1:4000
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/helper.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..c1ca453536450bfa124fad3cc4f9978826e854d7
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/helper.py
@@ -0,0 +1,519 @@
+"""A library of helper functions for the CherryPy test suite."""
+
+import datetime
+import io
+import logging
+import os
+import re
+import subprocess
+import sys
+import time
+import unittest
+import warnings
+import contextlib
+
+import portend
+import pytest
+
+from cheroot.test import webtest
+
+import cherrypy
+from cherrypy._cpcompat import text_or_bytes, HTTPSConnection, ntob
+from cherrypy.lib import httputil
+from cherrypy.lib import gctools
+
+log = logging.getLogger(__name__)
+thisdir = os.path.abspath(os.path.dirname(__file__))
+serverpem = os.path.join(os.getcwd(), thisdir, 'test.pem')
+
+
+class Supervisor(object):
+
+ """Base class for modeling and controlling servers during testing."""
+
+ def __init__(self, **kwargs):
+ for k, v in kwargs.items():
+ if k == 'port':
+ setattr(self, k, int(v))
+ setattr(self, k, v)
+
+
+def log_to_stderr(msg, level):
+ return sys.stderr.write(msg + os.linesep)
+
+
+class LocalSupervisor(Supervisor):
+
+ """Base class for modeling/controlling servers which run in the same
+ process.
+
+ When the server side runs in a different process, start/stop can dump all
+ state between each test module easily. When the server side runs in the
+ same process as the client, however, we have to do a bit more work to
+ ensure config and mounted apps are reset between tests.
+ """
+
+ using_apache = False
+ using_wsgi = False
+
+ def __init__(self, **kwargs):
+ for k, v in kwargs.items():
+ setattr(self, k, v)
+
+ cherrypy.server.httpserver = self.httpserver_class
+
+ # This is perhaps the wrong place for this call but this is the only
+ # place that i've found so far that I KNOW is early enough to set this.
+ cherrypy.config.update({'log.screen': False})
+ engine = cherrypy.engine
+ if hasattr(engine, 'signal_handler'):
+ engine.signal_handler.subscribe()
+ if hasattr(engine, 'console_control_handler'):
+ engine.console_control_handler.subscribe()
+
+ def start(self, modulename=None):
+ """Load and start the HTTP server."""
+ if modulename:
+ # Unhook httpserver so cherrypy.server.start() creates a new
+ # one (with config from setup_server, if declared).
+ cherrypy.server.httpserver = None
+
+ cherrypy.engine.start()
+
+ self.sync_apps()
+
+ def sync_apps(self):
+ """Tell the server about any apps which the setup functions mounted."""
+ pass
+
+ def stop(self):
+ td = getattr(self, 'teardown', None)
+ if td:
+ td()
+
+ cherrypy.engine.exit()
+
+ for name, server in getattr(cherrypy, 'servers', {}).copy().items():
+ server.unsubscribe()
+ del cherrypy.servers[name]
+
+
+class NativeServerSupervisor(LocalSupervisor):
+
+ """Server supervisor for the builtin HTTP server."""
+
+ httpserver_class = 'cherrypy._cpnative_server.CPHTTPServer'
+ using_apache = False
+ using_wsgi = False
+
+ def __str__(self):
+ return 'Builtin HTTP Server on %s:%s' % (self.host, self.port)
+
+
+class LocalWSGISupervisor(LocalSupervisor):
+
+ """Server supervisor for the builtin WSGI server."""
+
+ httpserver_class = 'cherrypy._cpwsgi_server.CPWSGIServer'
+ using_apache = False
+ using_wsgi = True
+
+ def __str__(self):
+ return 'Builtin WSGI Server on %s:%s' % (self.host, self.port)
+
+ def sync_apps(self):
+ """Hook a new WSGI app into the origin server."""
+ cherrypy.server.httpserver.wsgi_app = self.get_app()
+
+ def get_app(self, app=None):
+ """Obtain a new (decorated) WSGI app to hook into the origin server."""
+ if app is None:
+ app = cherrypy.tree
+
+ if self.validate:
+ try:
+ from wsgiref import validate
+ except ImportError:
+ warnings.warn(
+ 'Error importing wsgiref. The validator will not run.')
+ else:
+ # wraps the app in the validator
+ app = validate.validator(app)
+
+ return app
+
+
+def get_cpmodpy_supervisor(**options):
+ from cherrypy.test import modpy
+ sup = modpy.ModPythonSupervisor(**options)
+ sup.template = modpy.conf_cpmodpy
+ return sup
+
+
+def get_modpygw_supervisor(**options):
+ from cherrypy.test import modpy
+ sup = modpy.ModPythonSupervisor(**options)
+ sup.template = modpy.conf_modpython_gateway
+ sup.using_wsgi = True
+ return sup
+
+
+def get_modwsgi_supervisor(**options):
+ from cherrypy.test import modwsgi
+ return modwsgi.ModWSGISupervisor(**options)
+
+
+def get_modfcgid_supervisor(**options):
+ from cherrypy.test import modfcgid
+ return modfcgid.ModFCGISupervisor(**options)
+
+
+def get_modfastcgi_supervisor(**options):
+ from cherrypy.test import modfastcgi
+ return modfastcgi.ModFCGISupervisor(**options)
+
+
+def get_wsgi_u_supervisor(**options):
+ cherrypy.server.wsgi_version = ('u', 0)
+ return LocalWSGISupervisor(**options)
+
+
+class CPWebCase(webtest.WebCase):
+
+ script_name = ''
+ scheme = 'http'
+
+ available_servers = {'wsgi': LocalWSGISupervisor,
+ 'wsgi_u': get_wsgi_u_supervisor,
+ 'native': NativeServerSupervisor,
+ 'cpmodpy': get_cpmodpy_supervisor,
+ 'modpygw': get_modpygw_supervisor,
+ 'modwsgi': get_modwsgi_supervisor,
+ 'modfcgid': get_modfcgid_supervisor,
+ 'modfastcgi': get_modfastcgi_supervisor,
+ }
+ default_server = 'wsgi'
+
+ @classmethod
+ def _setup_server(cls, supervisor, conf):
+ v = sys.version.split()[0]
+ log.info('Python version used to run this test script: %s' % v)
+ log.info('CherryPy version: %s' % cherrypy.__version__)
+ if supervisor.scheme == 'https':
+ ssl = ' (ssl)'
+ else:
+ ssl = ''
+ log.info('HTTP server version: %s%s' % (supervisor.protocol, ssl))
+ log.info('PID: %s' % os.getpid())
+
+ cherrypy.server.using_apache = supervisor.using_apache
+ cherrypy.server.using_wsgi = supervisor.using_wsgi
+
+ if sys.platform[:4] == 'java':
+ cherrypy.config.update({'server.nodelay': False})
+
+ if isinstance(conf, text_or_bytes):
+ parser = cherrypy.lib.reprconf.Parser()
+ conf = parser.dict_from_file(conf).get('global', {})
+ else:
+ conf = conf or {}
+ baseconf = conf.copy()
+ baseconf.update({'server.socket_host': supervisor.host,
+ 'server.socket_port': supervisor.port,
+ 'server.protocol_version': supervisor.protocol,
+ 'environment': 'test_suite',
+ })
+ if supervisor.scheme == 'https':
+ # baseconf['server.ssl_module'] = 'builtin'
+ baseconf['server.ssl_certificate'] = serverpem
+ baseconf['server.ssl_private_key'] = serverpem
+
+ # helper must be imported lazily so the coverage tool
+ # can run against module-level statements within cherrypy.
+ # Also, we have to do "from cherrypy.test import helper",
+ # exactly like each test module does, because a relative import
+ # would stick a second instance of webtest in sys.modules,
+ # and we wouldn't be able to globally override the port anymore.
+ if supervisor.scheme == 'https':
+ webtest.WebCase.HTTP_CONN = HTTPSConnection
+ return baseconf
+
+ @classmethod
+ def setup_class(cls):
+ ''
+ # Creates a server
+ conf = {
+ 'scheme': 'http',
+ 'protocol': 'HTTP/1.1',
+ 'port': 54583,
+ 'host': '127.0.0.1',
+ 'validate': False,
+ 'server': 'wsgi',
+ }
+ supervisor_factory = cls.available_servers.get(
+ conf.get('server', 'wsgi'))
+ if supervisor_factory is None:
+ raise RuntimeError('Unknown server in config: %s' % conf['server'])
+ supervisor = supervisor_factory(**conf)
+
+ # Copied from "run_test_suite"
+ cherrypy.config.reset()
+ baseconf = cls._setup_server(supervisor, conf)
+ cherrypy.config.update(baseconf)
+ setup_client()
+
+ if hasattr(cls, 'setup_server'):
+ # Clear the cherrypy tree and clear the wsgi server so that
+ # it can be updated with the new root
+ cherrypy.tree = cherrypy._cptree.Tree()
+ cherrypy.server.httpserver = None
+ cls.setup_server()
+ # Add a resource for verifying there are no refleaks
+ # to *every* test class.
+ cherrypy.tree.mount(gctools.GCRoot(), '/gc')
+ cls.do_gc_test = True
+ supervisor.start(cls.__module__)
+
+ cls.supervisor = supervisor
+
+ @classmethod
+ def teardown_class(cls):
+ ''
+ if hasattr(cls, 'setup_server'):
+ cls.supervisor.stop()
+
+ do_gc_test = False
+
+ def test_gc(self):
+ if not self.do_gc_test:
+ return
+
+ self.getPage('/gc/stats')
+ try:
+ self.assertBody('Statistics:')
+ except Exception:
+ 'Failures occur intermittently. See #1420'
+
+ def prefix(self):
+ return self.script_name.rstrip('/')
+
+ def base(self):
+ if ((self.scheme == 'http' and self.PORT == 80) or
+ (self.scheme == 'https' and self.PORT == 443)):
+ port = ''
+ else:
+ port = ':%s' % self.PORT
+
+ return '%s://%s%s%s' % (self.scheme, self.HOST, port,
+ self.script_name.rstrip('/'))
+
+ def exit(self):
+ sys.exit()
+
+ def getPage(self, url, *args, **kwargs):
+ """Open the url.
+ """
+ if self.script_name:
+ url = httputil.urljoin(self.script_name, url)
+ return webtest.WebCase.getPage(self, url, *args, **kwargs)
+
+ def skip(self, msg='skipped '):
+ pytest.skip(msg)
+
+ def assertErrorPage(self, status, message=None, pattern=''):
+ """Compare the response body with a built in error page.
+
+ The function will optionally look for the regexp pattern,
+ within the exception embedded in the error page."""
+
+ # This will never contain a traceback
+ page = cherrypy._cperror.get_error_page(status, message=message)
+
+ # First, test the response body without checking the traceback.
+ # Stick a match-all group (.*) in to grab the traceback.
+ def esc(text):
+ return re.escape(ntob(text))
+ epage = re.escape(page)
+ epage = epage.replace(
+ esc('<pre id="traceback"></pre>'),
+ esc('<pre id="traceback">') + b'(.*)' + esc('</pre>'))
+ m = re.match(epage, self.body, re.DOTALL)
+ if not m:
+ self._handlewebError(
+ 'Error page does not match; expected:\n' + page)
+ return
+
+ # Now test the pattern against the traceback
+ if pattern is None:
+ # Special-case None to mean that there should be *no* traceback.
+ if m and m.group(1):
+ self._handlewebError('Error page contains traceback')
+ else:
+ if (m is None) or (
+ not re.search(ntob(re.escape(pattern), self.encoding),
+ m.group(1))):
+ msg = 'Error page does not contain %s in traceback'
+ self._handlewebError(msg % repr(pattern))
+
+ date_tolerance = 2
+
+ def assertEqualDates(self, dt1, dt2, seconds=None):
+ """Assert abs(dt1 - dt2) is within Y seconds."""
+ if seconds is None:
+ seconds = self.date_tolerance
+
+ if dt1 > dt2:
+ diff = dt1 - dt2
+ else:
+ diff = dt2 - dt1
+ if not diff < datetime.timedelta(seconds=seconds):
+ raise AssertionError('%r and %r are not within %r seconds.' %
+ (dt1, dt2, seconds))
+
+
+def _test_method_sorter(_, x, y):
+ """Monkeypatch the test sorter to always run test_gc last in each suite."""
+ if x == 'test_gc':
+ return 1
+ if y == 'test_gc':
+ return -1
+ if x > y:
+ return 1
+ if x < y:
+ return -1
+ return 0
+
+
+unittest.TestLoader.sortTestMethodsUsing = _test_method_sorter
+
+
+def setup_client():
+ """Set up the WebCase classes to match the server's socket settings."""
+ webtest.WebCase.PORT = cherrypy.server.socket_port
+ webtest.WebCase.HOST = cherrypy.server.socket_host
+ if cherrypy.server.ssl_certificate:
+ CPWebCase.scheme = 'https'
+
+# --------------------------- Spawning helpers --------------------------- #
+
+
+class CPProcess(object):
+
+ pid_file = os.path.join(thisdir, 'test.pid')
+ config_file = os.path.join(thisdir, 'test.conf')
+ config_template = """[global]
+server.socket_host: '%(host)s'
+server.socket_port: %(port)s
+checker.on: False
+log.screen: False
+log.error_file: r'%(error_log)s'
+log.access_file: r'%(access_log)s'
+%(ssl)s
+%(extra)s
+"""
+ error_log = os.path.join(thisdir, 'test.error.log')
+ access_log = os.path.join(thisdir, 'test.access.log')
+
+ def __init__(self, wait=False, daemonize=False, ssl=False,
+ socket_host=None, socket_port=None):
+ self.wait = wait
+ self.daemonize = daemonize
+ self.ssl = ssl
+ self.host = socket_host or cherrypy.server.socket_host
+ self.port = socket_port or cherrypy.server.socket_port
+
+ def write_conf(self, extra=''):
+ if self.ssl:
+ serverpem = os.path.join(thisdir, 'test.pem')
+ ssl = """
+server.ssl_certificate: r'%s'
+server.ssl_private_key: r'%s'
+""" % (serverpem, serverpem)
+ else:
+ ssl = ''
+
+ conf = self.config_template % {
+ 'host': self.host,
+ 'port': self.port,
+ 'error_log': self.error_log,
+ 'access_log': self.access_log,
+ 'ssl': ssl,
+ 'extra': extra,
+ }
+ with io.open(self.config_file, 'w', encoding='utf-8') as f:
+ f.write(str(conf))
+
+ def start(self, imports=None):
+ """Start cherryd in a subprocess."""
+ portend.free(self.host, self.port, timeout=1)
+
+ args = [
+ '-m',
+ 'cherrypy',
+ '-c', self.config_file,
+ '-p', self.pid_file,
+ ]
+ r"""
+ Command for running cherryd server with autoreload enabled
+
+ Using
+
+ ```
+ ['-c',
+ "__requires__ = 'CherryPy'; \
+ import pkg_resources, re, sys; \
+ sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]); \
+ sys.exit(\
+ pkg_resources.load_entry_point(\
+ 'CherryPy', 'console_scripts', 'cherryd')())"]
+ ```
+
+ doesn't work as it's impossible to reconstruct the `-c`'s contents.
+ Ref: https://github.com/cherrypy/cherrypy/issues/1545
+ """
+
+ if not isinstance(imports, (list, tuple)):
+ imports = [imports]
+ for i in imports:
+ if i:
+ args.append('-i')
+ args.append(i)
+
+ if self.daemonize:
+ args.append('-d')
+
+ env = os.environ.copy()
+ # Make sure we import the cherrypy package in which this module is
+ # defined.
+ grandparentdir = os.path.abspath(os.path.join(thisdir, '..', '..'))
+ if env.get('PYTHONPATH', ''):
+ env['PYTHONPATH'] = os.pathsep.join(
+ (grandparentdir, env['PYTHONPATH']))
+ else:
+ env['PYTHONPATH'] = grandparentdir
+ self._proc = subprocess.Popen([sys.executable] + args, env=env)
+ if self.wait:
+ self.exit_code = self._proc.wait()
+ else:
+ portend.occupied(self.host, self.port, timeout=5)
+
+ # Give the engine a wee bit more time to finish STARTING
+ if self.daemonize:
+ time.sleep(2)
+ else:
+ time.sleep(1)
+
+ def get_pid(self):
+ if self.daemonize:
+ return int(open(self.pid_file, 'rb').read())
+ return self._proc.pid
+
+ def join(self):
+ """Wait for the process to exit."""
+ if self.daemonize:
+ return self._join_daemon()
+ self._proc.wait()
+
+ def _join_daemon(self):
+ with contextlib.suppress(IOError):
+ os.waitpid(self.get_pid(), 0)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/logtest.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/logtest.py
new file mode 100644
index 0000000000000000000000000000000000000000..344be98779f5dc9ef9476d9a676d6d4e2cdc2020
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/logtest.py
@@ -0,0 +1,231 @@
+"""logtest, a unittest.TestCase helper for testing log output."""
+
+import sys
+import time
+from uuid import UUID
+
+import pytest
+
+from cherrypy._cpcompat import text_or_bytes
+
+
+try:
+ # On Windows, msvcrt.getch reads a single char without output.
+ import msvcrt
+
+ def getchar():
+ return msvcrt.getch()
+except ImportError:
+ # Unix getchr
+ import tty
+ import termios
+
+ def getchar():
+ fd = sys.stdin.fileno()
+ old_settings = termios.tcgetattr(fd)
+ try:
+ tty.setraw(sys.stdin.fileno())
+ ch = sys.stdin.read(1)
+ finally:
+ termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
+ return ch
+
+
+class LogCase(object):
+
+ """unittest.TestCase mixin for testing log messages.
+
+ logfile: a filename for the desired log. Yes, I know modes are evil,
+ but it makes the test functions so much cleaner to set this once.
+
+ lastmarker: the last marker in the log. This can be used to search for
+ messages since the last marker.
+
+ markerPrefix: a string with which to prefix log markers. This should be
+ unique enough from normal log output to use for marker identification.
+ """
+
+ interactive = False
+ logfile = None
+ lastmarker = None
+ markerPrefix = b'test suite marker: '
+
+ def _handleLogError(self, msg, data, marker, pattern):
+ print('')
+ print(' ERROR: %s' % msg)
+
+ if not self.interactive:
+ raise pytest.fail(msg)
+
+ p = (' Show: '
+ '[L]og [M]arker [P]attern; '
+ '[I]gnore, [R]aise, or sys.e[X]it >> ')
+ sys.stdout.write(p + ' ')
+ # ARGH
+ sys.stdout.flush()
+ while True:
+ i = getchar().upper()
+ if i not in 'MPLIRX':
+ continue
+ print(i.upper()) # Also prints new line
+ if i == 'L':
+ for x, line in enumerate(data):
+ if (x + 1) % self.console_height == 0:
+ # The \r and comma should make the next line overwrite
+ sys.stdout.write('<-- More -->\r ')
+ m = getchar().lower()
+ # Erase our "More" prompt
+ sys.stdout.write(' \r ')
+ if m == 'q':
+ break
+ print(line.rstrip())
+ elif i == 'M':
+ print(repr(marker or self.lastmarker))
+ elif i == 'P':
+ print(repr(pattern))
+ elif i == 'I':
+ # return without raising the normal exception
+ return
+ elif i == 'R':
+ raise pytest.fail(msg)
+ elif i == 'X':
+ self.exit()
+ sys.stdout.write(p + ' ')
+
+ def exit(self):
+ sys.exit()
+
+ def emptyLog(self):
+ """Overwrite self.logfile with 0 bytes."""
+ open(self.logfile, 'wb').write('')
+
+ def markLog(self, key=None):
+ """Insert a marker line into the log and set self.lastmarker."""
+ if key is None:
+ key = str(time.time())
+ self.lastmarker = key
+
+ open(self.logfile, 'ab+').write(
+ b'%s%s\n'
+ % (self.markerPrefix, key.encode('utf-8'))
+ )
+
+ def _read_marked_region(self, marker=None):
+ """Return lines from self.logfile in the marked region.
+
+ If marker is None, self.lastmarker is used. If the log hasn't
+ been marked (using self.markLog), the entire log will be returned.
+ """
+# Give the logger time to finish writing?
+# time.sleep(0.5)
+
+ logfile = self.logfile
+ marker = marker or self.lastmarker
+ if marker is None:
+ return open(logfile, 'rb').readlines()
+
+ if isinstance(marker, str):
+ marker = marker.encode('utf-8')
+ data = []
+ in_region = False
+ for line in open(logfile, 'rb'):
+ if in_region:
+ if line.startswith(self.markerPrefix) and marker not in line:
+ break
+ else:
+ data.append(line)
+ elif marker in line:
+ in_region = True
+ return data
+
+ def assertInLog(self, line, marker=None):
+ """Fail if the given (partial) line is not in the log.
+
+ The log will be searched from the given marker to the next marker.
+ If marker is None, self.lastmarker is used. If the log hasn't
+ been marked (using self.markLog), the entire log will be searched.
+ """
+ data = self._read_marked_region(marker)
+ for logline in data:
+ if line in logline:
+ return
+ msg = '%r not found in log' % line
+ self._handleLogError(msg, data, marker, line)
+
+ def assertNotInLog(self, line, marker=None):
+ """Fail if the given (partial) line is in the log.
+
+ The log will be searched from the given marker to the next marker.
+ If marker is None, self.lastmarker is used. If the log hasn't
+ been marked (using self.markLog), the entire log will be searched.
+ """
+ data = self._read_marked_region(marker)
+ for logline in data:
+ if line in logline:
+ msg = '%r found in log' % line
+ self._handleLogError(msg, data, marker, line)
+
+ def assertValidUUIDv4(self, marker=None):
+ """Fail if the given UUIDv4 is not valid.
+
+ The log will be searched from the given marker to the next marker.
+ If marker is None, self.lastmarker is used. If the log hasn't
+ been marked (using self.markLog), the entire log will be searched.
+ """
+ data = self._read_marked_region(marker)
+ data = [
+ chunk.decode('utf-8').rstrip('\n').rstrip('\r')
+ for chunk in data
+ ]
+ for log_chunk in data:
+ try:
+ uuid_log = data[-1]
+ uuid_obj = UUID(uuid_log, version=4)
+ except (TypeError, ValueError):
+ pass # it might be in other chunk
+ else:
+ if str(uuid_obj) == uuid_log:
+ return
+ msg = '%r is not a valid UUIDv4' % uuid_log
+ self._handleLogError(msg, data, marker, log_chunk)
+
+ msg = 'UUIDv4 not found in log'
+ self._handleLogError(msg, data, marker, log_chunk)
+
+ def assertLog(self, sliceargs, lines, marker=None):
+ """Fail if log.readlines()[sliceargs] is not contained in 'lines'.
+
+ The log will be searched from the given marker to the next marker.
+ If marker is None, self.lastmarker is used. If the log hasn't
+ been marked (using self.markLog), the entire log will be searched.
+ """
+ data = self._read_marked_region(marker)
+ if isinstance(sliceargs, int):
+ # Single arg. Use __getitem__ and allow lines to be str or list.
+ if isinstance(lines, (tuple, list)):
+ lines = lines[0]
+ if isinstance(lines, str):
+ lines = lines.encode('utf-8')
+ if lines not in data[sliceargs]:
+ msg = '%r not found on log line %r' % (lines, sliceargs)
+ self._handleLogError(
+ msg,
+ [data[sliceargs], '--EXTRA CONTEXT--'] + data[
+ sliceargs + 1:sliceargs + 6],
+ marker,
+ lines)
+ else:
+ # Multiple args. Use __getslice__ and require lines to be list.
+ if isinstance(lines, tuple):
+ lines = list(lines)
+ elif isinstance(lines, text_or_bytes):
+ raise TypeError("The 'lines' arg must be a list when "
+ "'sliceargs' is a tuple.")
+
+ start, stop = sliceargs
+ for line, logline in zip(lines, data[start:stop]):
+ if isinstance(line, str):
+ line = line.encode('utf-8')
+ if line not in logline:
+ msg = '%r not found in log' % line
+ self._handleLogError(msg, data[start:stop], marker, line)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/modfastcgi.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/modfastcgi.py
new file mode 100644
index 0000000000000000000000000000000000000000..79ec3d1827e5c1294569459f1f94cac6a434b22b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/modfastcgi.py
@@ -0,0 +1,136 @@
+"""Wrapper for mod_fastcgi, for use as a CherryPy HTTP server when testing.
+
+To autostart fastcgi, the "apache" executable or script must be
+on your system path, or you must override the global APACHE_PATH.
+On some platforms, "apache" may be called "apachectl", "apache2ctl",
+or "httpd"--create a symlink to them if needed.
+
+You'll also need the WSGIServer from flup.servers.
+See http://projects.amor.org/misc/wiki/ModPythonGateway
+
+
+KNOWN BUGS
+==========
+
+1. Apache processes Range headers automatically; CherryPy's truncated
+ output is then truncated again by Apache. See test_core.testRanges.
+ This was worked around in http://www.cherrypy.org/changeset/1319.
+2. Apache does not allow custom HTTP methods like CONNECT as per the spec.
+ See test_core.testHTTPMethods.
+3. Max request header and body settings do not work with Apache.
+4. Apache replaces status "reason phrases" automatically. For example,
+ CherryPy may set "304 Not modified" but Apache will write out
+ "304 Not Modified" (capital "M").
+5. Apache does not allow custom error codes as per the spec.
+6. Apache (or perhaps modpython, or modpython_gateway) unquotes %xx in the
+ Request-URI too early.
+7. mod_python will not read request bodies which use the "chunked"
+ transfer-coding (it passes REQUEST_CHUNKED_ERROR to ap_setup_client_block
+ instead of REQUEST_CHUNKED_DECHUNK, see Apache2's http_protocol.c and
+ mod_python's requestobject.c).
+8. Apache will output a "Content-Length: 0" response header even if there's
+ no response entity body. This isn't really a bug; it just differs from
+ the CherryPy default.
+"""
+
+import os
+import re
+
+import cherrypy
+from cherrypy.process import servers
+from cherrypy.test import helper
+
+curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
+
+
+def read_process(cmd, args=''):
+ pipein, pipeout = os.popen4('%s %s' % (cmd, args))
+ try:
+ firstline = pipeout.readline()
+ if (re.search(r'(not recognized|No such file|not found)', firstline,
+ re.IGNORECASE)):
+ raise IOError('%s must be on your system path.' % cmd)
+ output = firstline + pipeout.read()
+ finally:
+ pipeout.close()
+ return output
+
+
+APACHE_PATH = 'apache2ctl'
+CONF_PATH = 'fastcgi.conf'
+
+conf_fastcgi = """
+# Apache2 server conf file for testing CherryPy with mod_fastcgi.
+# fumanchu: I had to hard-code paths due to crazy Debian layouts :(
+ServerRoot /usr/lib/apache2
+User #1000
+ErrorLog %(root)s/mod_fastcgi.error.log
+
+DocumentRoot "%(root)s"
+ServerName 127.0.0.1
+Listen %(port)s
+LoadModule fastcgi_module modules/mod_fastcgi.so
+LoadModule rewrite_module modules/mod_rewrite.so
+
+Options +ExecCGI
+SetHandler fastcgi-script
+RewriteEngine On
+RewriteRule ^(.*)$ /fastcgi.pyc [L]
+FastCgiExternalServer "%(server)s" -host 127.0.0.1:4000
+"""
+
+
+def erase_script_name(environ, start_response):
+ environ['SCRIPT_NAME'] = ''
+ return cherrypy.tree(environ, start_response)
+
+
+class ModFCGISupervisor(helper.LocalWSGISupervisor):
+
+ httpserver_class = 'cherrypy.process.servers.FlupFCGIServer'
+ using_apache = True
+ using_wsgi = True
+ template = conf_fastcgi
+
+ def __str__(self):
+ return 'FCGI Server on %s:%s' % (self.host, self.port)
+
+ def start(self, modulename):
+ cherrypy.server.httpserver = servers.FlupFCGIServer(
+ application=erase_script_name, bindAddress=('127.0.0.1', 4000))
+ cherrypy.server.httpserver.bind_addr = ('127.0.0.1', 4000)
+ cherrypy.server.socket_port = 4000
+ # For FCGI, we both start apache...
+ self.start_apache()
+ # ...and our local server
+ cherrypy.engine.start()
+ self.sync_apps()
+
+ def start_apache(self):
+ fcgiconf = CONF_PATH
+ if not os.path.isabs(fcgiconf):
+ fcgiconf = os.path.join(curdir, fcgiconf)
+
+ # Write the Apache conf file.
+ f = open(fcgiconf, 'wb')
+ try:
+ server = repr(os.path.join(curdir, 'fastcgi.pyc'))[1:-1]
+ output = self.template % {'port': self.port, 'root': curdir,
+ 'server': server}
+ output = output.replace('\r\n', '\n')
+ f.write(output)
+ finally:
+ f.close()
+
+ result = read_process(APACHE_PATH, '-k start -f %s' % fcgiconf)
+ if result:
+ print(result)
+
+ def stop(self):
+ """Gracefully shutdown a server that is serving forever."""
+ read_process(APACHE_PATH, '-k stop')
+ helper.LocalWSGISupervisor.stop(self)
+
+ def sync_apps(self):
+ cherrypy.server.httpserver.fcgiserver.application = self.get_app(
+ erase_script_name)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/modfcgid.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/modfcgid.py
new file mode 100644
index 0000000000000000000000000000000000000000..d101bd67f216b319ab6ac51d36e22b1257c9aee3
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/modfcgid.py
@@ -0,0 +1,124 @@
+"""Wrapper for mod_fcgid, for use as a CherryPy HTTP server when testing.
+
+To autostart fcgid, the "apache" executable or script must be
+on your system path, or you must override the global APACHE_PATH.
+On some platforms, "apache" may be called "apachectl", "apache2ctl",
+or "httpd"--create a symlink to them if needed.
+
+You'll also need the WSGIServer from flup.servers.
+See http://projects.amor.org/misc/wiki/ModPythonGateway
+
+
+KNOWN BUGS
+==========
+
+1. Apache processes Range headers automatically; CherryPy's truncated
+ output is then truncated again by Apache. See test_core.testRanges.
+ This was worked around in http://www.cherrypy.org/changeset/1319.
+2. Apache does not allow custom HTTP methods like CONNECT as per the spec.
+ See test_core.testHTTPMethods.
+3. Max request header and body settings do not work with Apache.
+4. Apache replaces status "reason phrases" automatically. For example,
+ CherryPy may set "304 Not modified" but Apache will write out
+ "304 Not Modified" (capital "M").
+5. Apache does not allow custom error codes as per the spec.
+6. Apache (or perhaps modpython, or modpython_gateway) unquotes %xx in the
+ Request-URI too early.
+7. mod_python will not read request bodies which use the "chunked"
+ transfer-coding (it passes REQUEST_CHUNKED_ERROR to ap_setup_client_block
+ instead of REQUEST_CHUNKED_DECHUNK, see Apache2's http_protocol.c and
+ mod_python's requestobject.c).
+8. Apache will output a "Content-Length: 0" response header even if there's
+ no response entity body. This isn't really a bug; it just differs from
+ the CherryPy default.
+"""
+
+import os
+import re
+
+import cherrypy
+from cherrypy._cpcompat import ntob
+from cherrypy.process import servers
+from cherrypy.test import helper
+
+curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
+
+
+def read_process(cmd, args=''):
+ pipein, pipeout = os.popen4('%s %s' % (cmd, args))
+ try:
+ firstline = pipeout.readline()
+ if (re.search(r'(not recognized|No such file|not found)', firstline,
+ re.IGNORECASE)):
+ raise IOError('%s must be on your system path.' % cmd)
+ output = firstline + pipeout.read()
+ finally:
+ pipeout.close()
+ return output
+
+
+APACHE_PATH = 'httpd'
+CONF_PATH = 'fcgi.conf'
+
+conf_fcgid = """
+# Apache2 server conf file for testing CherryPy with mod_fcgid.
+
+DocumentRoot "%(root)s"
+ServerName 127.0.0.1
+Listen %(port)s
+LoadModule fastcgi_module modules/mod_fastcgi.dll
+LoadModule rewrite_module modules/mod_rewrite.so
+
+Options ExecCGI
+SetHandler fastcgi-script
+RewriteEngine On
+RewriteRule ^(.*)$ /fastcgi.pyc [L]
+FastCgiExternalServer "%(server)s" -host 127.0.0.1:4000
+"""
+
+
+class ModFCGISupervisor(helper.LocalSupervisor):
+
+ using_apache = True
+ using_wsgi = True
+ template = conf_fcgid
+
+ def __str__(self):
+ return 'FCGI Server on %s:%s' % (self.host, self.port)
+
+ def start(self, modulename):
+ cherrypy.server.httpserver = servers.FlupFCGIServer(
+ application=cherrypy.tree, bindAddress=('127.0.0.1', 4000))
+ cherrypy.server.httpserver.bind_addr = ('127.0.0.1', 4000)
+ # For FCGI, we both start apache...
+ self.start_apache()
+ # ...and our local server
+ helper.LocalServer.start(self, modulename)
+
+ def start_apache(self):
+ fcgiconf = CONF_PATH
+ if not os.path.isabs(fcgiconf):
+ fcgiconf = os.path.join(curdir, fcgiconf)
+
+ # Write the Apache conf file.
+ f = open(fcgiconf, 'wb')
+ try:
+ server = repr(os.path.join(curdir, 'fastcgi.pyc'))[1:-1]
+ output = self.template % {'port': self.port, 'root': curdir,
+ 'server': server}
+ output = ntob(output.replace('\r\n', '\n'))
+ f.write(output)
+ finally:
+ f.close()
+
+ result = read_process(APACHE_PATH, '-k start -f %s' % fcgiconf)
+ if result:
+ print(result)
+
+ def stop(self):
+ """Gracefully shutdown a server that is serving forever."""
+ read_process(APACHE_PATH, '-k stop')
+ helper.LocalServer.stop(self)
+
+ def sync_apps(self):
+ cherrypy.server.httpserver.fcgiserver.application = self.get_app()
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/modpy.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/modpy.py
new file mode 100644
index 0000000000000000000000000000000000000000..7c288d2c078410f75960adefd2a354ec0bd3c011
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/modpy.py
@@ -0,0 +1,164 @@
+"""Wrapper for mod_python, for use as a CherryPy HTTP server when testing.
+
+To autostart modpython, the "apache" executable or script must be
+on your system path, or you must override the global APACHE_PATH.
+On some platforms, "apache" may be called "apachectl" or "apache2ctl"--
+create a symlink to them if needed.
+
+If you wish to test the WSGI interface instead of our _cpmodpy interface,
+you also need the 'modpython_gateway' module at:
+http://projects.amor.org/misc/wiki/ModPythonGateway
+
+
+KNOWN BUGS
+==========
+
+1. Apache processes Range headers automatically; CherryPy's truncated
+ output is then truncated again by Apache. See test_core.testRanges.
+ This was worked around in http://www.cherrypy.org/changeset/1319.
+2. Apache does not allow custom HTTP methods like CONNECT as per the spec.
+ See test_core.testHTTPMethods.
+3. Max request header and body settings do not work with Apache.
+4. Apache replaces status "reason phrases" automatically. For example,
+ CherryPy may set "304 Not modified" but Apache will write out
+ "304 Not Modified" (capital "M").
+5. Apache does not allow custom error codes as per the spec.
+6. Apache (or perhaps modpython, or modpython_gateway) unquotes %xx in the
+ Request-URI too early.
+7. mod_python will not read request bodies which use the "chunked"
+ transfer-coding (it passes REQUEST_CHUNKED_ERROR to ap_setup_client_block
+ instead of REQUEST_CHUNKED_DECHUNK, see Apache2's http_protocol.c and
+ mod_python's requestobject.c).
+8. Apache will output a "Content-Length: 0" response header even if there's
+ no response entity body. This isn't really a bug; it just differs from
+ the CherryPy default.
+"""
+
+import os
+import re
+
+import cherrypy
+from cherrypy.test import helper
+
+curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
+
+
+def read_process(cmd, args=''):
+ pipein, pipeout = os.popen4('%s %s' % (cmd, args))
+ try:
+ firstline = pipeout.readline()
+ if (re.search(r'(not recognized|No such file|not found)', firstline,
+ re.IGNORECASE)):
+ raise IOError('%s must be on your system path.' % cmd)
+ output = firstline + pipeout.read()
+ finally:
+ pipeout.close()
+ return output
+
+
+APACHE_PATH = 'httpd'
+CONF_PATH = 'test_mp.conf'
+
+conf_modpython_gateway = """
+# Apache2 server conf file for testing CherryPy with modpython_gateway.
+
+ServerName 127.0.0.1
+DocumentRoot "/"
+Listen %(port)s
+LoadModule python_module modules/mod_python.so
+
+SetHandler python-program
+PythonFixupHandler cherrypy.test.modpy::wsgisetup
+PythonOption testmod %(modulename)s
+PythonHandler modpython_gateway::handler
+PythonOption wsgi.application cherrypy::tree
+PythonOption socket_host %(host)s
+PythonDebug On
+"""
+
+conf_cpmodpy = """
+# Apache2 server conf file for testing CherryPy with _cpmodpy.
+
+ServerName 127.0.0.1
+DocumentRoot "/"
+Listen %(port)s
+LoadModule python_module modules/mod_python.so
+
+SetHandler python-program
+PythonFixupHandler cherrypy.test.modpy::cpmodpysetup
+PythonHandler cherrypy._cpmodpy::handler
+PythonOption cherrypy.setup cherrypy.test.%(modulename)s::setup_server
+PythonOption socket_host %(host)s
+PythonDebug On
+"""
+
+
+class ModPythonSupervisor(helper.Supervisor):
+
+ using_apache = True
+ using_wsgi = False
+ template = None
+
+ def __str__(self):
+ return 'ModPython Server on %s:%s' % (self.host, self.port)
+
+ def start(self, modulename):
+ mpconf = CONF_PATH
+ if not os.path.isabs(mpconf):
+ mpconf = os.path.join(curdir, mpconf)
+
+ f = open(mpconf, 'wb')
+ try:
+ f.write(self.template %
+ {'port': self.port, 'modulename': modulename,
+ 'host': self.host})
+ finally:
+ f.close()
+
+ result = read_process(APACHE_PATH, '-k start -f %s' % mpconf)
+ if result:
+ print(result)
+
+ def stop(self):
+ """Gracefully shutdown a server that is serving forever."""
+ read_process(APACHE_PATH, '-k stop')
+
+
+loaded = False
+
+
+def wsgisetup(req):
+ global loaded
+ if not loaded:
+ loaded = True
+ options = req.get_options()
+
+ cherrypy.config.update({
+ 'log.error_file': os.path.join(curdir, 'test.log'),
+ 'environment': 'test_suite',
+ 'server.socket_host': options['socket_host'],
+ })
+
+ modname = options['testmod']
+ mod = __import__(modname, globals(), locals(), [''])
+ mod.setup_server()
+
+ cherrypy.server.unsubscribe()
+ cherrypy.engine.start()
+ from mod_python import apache
+ return apache.OK
+
+
+def cpmodpysetup(req):
+ global loaded
+ if not loaded:
+ loaded = True
+ options = req.get_options()
+
+ cherrypy.config.update({
+ 'log.error_file': os.path.join(curdir, 'test.log'),
+ 'environment': 'test_suite',
+ 'server.socket_host': options['socket_host'],
+ })
+ from mod_python import apache
+ return apache.OK
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/modwsgi.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/modwsgi.py
new file mode 100644
index 0000000000000000000000000000000000000000..da7d240b58994434afd648eab8d67d72024b877b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/modwsgi.py
@@ -0,0 +1,154 @@
+"""Wrapper for mod_wsgi, for use as a CherryPy HTTP server.
+
+To autostart modwsgi, the "apache" executable or script must be
+on your system path, or you must override the global APACHE_PATH.
+On some platforms, "apache" may be called "apachectl" or "apache2ctl"--
+create a symlink to them if needed.
+
+
+KNOWN BUGS
+==========
+
+1. Apache processes Range headers automatically; CherryPy's truncated
+ output is then truncated again by Apache. See test_core.testRanges.
+ This was worked around in http://www.cherrypy.org/changeset/1319.
+2. Apache does not allow custom HTTP methods like CONNECT as per the spec.
+ See test_core.testHTTPMethods.
+3. Max request header and body settings do not work with Apache.
+4. Apache replaces status "reason phrases" automatically. For example,
+ CherryPy may set "304 Not modified" but Apache will write out
+ "304 Not Modified" (capital "M").
+5. Apache does not allow custom error codes as per the spec.
+6. Apache (or perhaps modpython, or modpython_gateway) unquotes %xx in the
+ Request-URI too early.
+7. mod_wsgi will not read request bodies which use the "chunked"
+ transfer-coding (it passes REQUEST_CHUNKED_ERROR to ap_setup_client_block
+ instead of REQUEST_CHUNKED_DECHUNK, see Apache2's http_protocol.c and
+ mod_python's requestobject.c).
+8. When responding with 204 No Content, mod_wsgi adds a Content-Length
+ header for you.
+9. When an error is raised, mod_wsgi has no facility for printing a
+ traceback as the response content (it's sent to the Apache log instead).
+10. Startup and shutdown of Apache when running mod_wsgi seems slow.
+"""
+
+import os
+import re
+import sys
+import time
+
+import portend
+
+from cheroot.test import webtest
+
+import cherrypy
+from cherrypy.test import helper
+
+curdir = os.path.abspath(os.path.dirname(__file__))
+
+
+def read_process(cmd, args=''):
+ pipein, pipeout = os.popen4('%s %s' % (cmd, args))
+ try:
+ firstline = pipeout.readline()
+ if (re.search(r'(not recognized|No such file|not found)', firstline,
+ re.IGNORECASE)):
+ raise IOError('%s must be on your system path.' % cmd)
+ output = firstline + pipeout.read()
+ finally:
+ pipeout.close()
+ return output
+
+
+if sys.platform == 'win32':
+ APACHE_PATH = 'httpd'
+else:
+ APACHE_PATH = 'apache'
+
+CONF_PATH = 'test_mw.conf'
+
+conf_modwsgi = r"""
+# Apache2 server conf file for testing CherryPy with modpython_gateway.
+
+ServerName 127.0.0.1
+DocumentRoot "/"
+Listen %(port)s
+
+AllowEncodedSlashes On
+LoadModule rewrite_module modules/mod_rewrite.so
+RewriteEngine on
+RewriteMap escaping int:escape
+
+LoadModule log_config_module modules/mod_log_config.so
+LogFormat "%%h %%l %%u %%t \"%%r\" %%>s %%b \"%%{Referer}i\" \"%%{User-agent}i\"" combined
+CustomLog "%(curdir)s/apache.access.log" combined
+ErrorLog "%(curdir)s/apache.error.log"
+LogLevel debug
+
+LoadModule wsgi_module modules/mod_wsgi.so
+LoadModule env_module modules/mod_env.so
+
+WSGIScriptAlias / "%(curdir)s/modwsgi.py"
+SetEnv testmod %(testmod)s
+""" # noqa E501
+
+
+class ModWSGISupervisor(helper.Supervisor):
+
+ """Server Controller for ModWSGI and CherryPy."""
+
+ using_apache = True
+ using_wsgi = True
+ template = conf_modwsgi
+
+ def __str__(self):
+ return 'ModWSGI Server on %s:%s' % (self.host, self.port)
+
+ def start(self, modulename):
+ mpconf = CONF_PATH
+ if not os.path.isabs(mpconf):
+ mpconf = os.path.join(curdir, mpconf)
+
+ f = open(mpconf, 'wb')
+ try:
+ output = (self.template %
+ {'port': self.port, 'testmod': modulename,
+ 'curdir': curdir})
+ f.write(output)
+ finally:
+ f.close()
+
+ result = read_process(APACHE_PATH, '-k start -f %s' % mpconf)
+ if result:
+ print(result)
+
+ # Make a request so mod_wsgi starts up our app.
+ # If we don't, concurrent initial requests will 404.
+ portend.occupied('127.0.0.1', self.port, timeout=5)
+ webtest.openURL('/ihopetheresnodefault', port=self.port)
+ time.sleep(1)
+
+ def stop(self):
+ """Gracefully shutdown a server that is serving forever."""
+ read_process(APACHE_PATH, '-k stop')
+
+
+loaded = False
+
+
+def application(environ, start_response):
+ global loaded
+ if not loaded:
+ loaded = True
+ modname = 'cherrypy.test.' + environ['testmod']
+ mod = __import__(modname, globals(), locals(), [''])
+ mod.setup_server()
+
+ cherrypy.config.update({
+ 'log.error_file': os.path.join(curdir, 'test.error.log'),
+ 'log.access_file': os.path.join(curdir, 'test.access.log'),
+ 'environment': 'test_suite',
+ 'engine.SIGHUP': None,
+ 'engine.SIGTERM': None,
+ })
+ return cherrypy.tree(environ, start_response)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/sessiondemo.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/sessiondemo.py
new file mode 100644
index 0000000000000000000000000000000000000000..3849a25937074d953eb543e440fe17e90212434b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/sessiondemo.py
@@ -0,0 +1,159 @@
+#!/usr/bin/python
+"""A session demonstration app."""
+
+import calendar
+from datetime import datetime
+import sys
+
+import cherrypy
+from cherrypy.lib import sessions
+
+
+page = """
+<html>
+<head>
+<style type='text/css'>
+table { border-collapse: collapse; border: 1px solid #663333; }
+th { text-align: right; background-color: #663333; color: white; padding: 0.5em; }
+td { white-space: pre-wrap; font-family: monospace; padding: 0.5em;
+ border: 1px solid #663333; }
+.warn { font-family: serif; color: #990000; }
+</style>
+<script type="text/javascript">
+<!--
+function twodigit(d) { return d < 10 ? "0" + d : d; }
+function formattime(t) {
+ var month = t.getUTCMonth() + 1;
+ var day = t.getUTCDate();
+ var year = t.getUTCFullYear();
+ var hours = t.getUTCHours();
+ var minutes = t.getUTCMinutes();
+ return (year + "/" + twodigit(month) + "/" + twodigit(day) + " " +
+ hours + ":" + twodigit(minutes) + " UTC");
+}
+
+function interval(s) {
+ // Return the given interval (in seconds) as an English phrase
+ var seconds = s %% 60;
+ s = Math.floor(s / 60);
+ var minutes = s %% 60;
+ s = Math.floor(s / 60);
+ var hours = s %% 24;
+ var v = twodigit(hours) + ":" + twodigit(minutes) + ":" + twodigit(seconds);
+ var days = Math.floor(s / 24);
+ if (days != 0) v = days + ' days, ' + v;
+ return v;
+}
+
+var fudge_seconds = 5;
+
+function init() {
+ // Set the content of the 'btime' cell.
+ var currentTime = new Date();
+ var bunixtime = Math.floor(currentTime.getTime() / 1000);
+
+ var v = formattime(currentTime);
+ v += " (Unix time: " + bunixtime + ")";
+
+ var diff = Math.abs(%(serverunixtime)s - bunixtime);
+ if (diff > fudge_seconds) v += "<p class='warn'>Browser and Server times disagree.</p>";
+
+ document.getElementById('btime').innerHTML = v;
+
+ // Warn if response cookie expires is not close to one hour in the future.
+ // Yes, we want this to happen when wit hit the 'Expire' link, too.
+ var expires = Date.parse("%(expires)s") / 1000;
+ var onehour = (60 * 60);
+ if (Math.abs(expires - (bunixtime + onehour)) > fudge_seconds) {
+ diff = Math.floor(expires - bunixtime);
+ if (expires > (bunixtime + onehour)) {
+ var msg = "Response cookie 'expires' date is " + interval(diff) + " in the future.";
+ } else {
+ var msg = "Response cookie 'expires' date is " + interval(0 - diff) + " in the past.";
+ }
+ document.getElementById('respcookiewarn').innerHTML = msg;
+ }
+}
+//-->
+</script>
+</head>
+
+<body onload='init()'>
+<h2>Session Demo</h2>
+<p>Reload this page. The session ID should not change from one reload to the next</p>
+<p><a href='../'>Index</a> | <a href='expire'>Expire</a> | <a href='regen'>Regenerate</a></p>
+<table>
+ <tr><th>Session ID:</th><td>%(sessionid)s<p class='warn'>%(changemsg)s</p></td></tr>
+ <tr><th>Request Cookie</th><td>%(reqcookie)s</td></tr>
+ <tr><th>Response Cookie</th><td>%(respcookie)s<p id='respcookiewarn' class='warn'></p></td></tr>
+ <tr><th>Session Data</th><td>%(sessiondata)s</td></tr>
+ <tr><th>Server Time</th><td id='stime'>%(servertime)s (Unix time: %(serverunixtime)s)</td></tr>
+ <tr><th>Browser Time</th><td id='btime'> </td></tr>
+ <tr><th>Cherrypy Version:</th><td>%(cpversion)s</td></tr>
+ <tr><th>Python Version:</th><td>%(pyversion)s</td></tr>
+</table>
+</body></html>
+""" # noqa E501
+
+
+class Root(object):
+
+ def page(self):
+ changemsg = []
+ if cherrypy.session.id != cherrypy.session.originalid:
+ if cherrypy.session.originalid is None:
+ changemsg.append(
+ 'Created new session because no session id was given.')
+ if cherrypy.session.missing:
+ changemsg.append(
+ 'Created new session due to missing '
+ '(expired or malicious) session.')
+ if cherrypy.session.regenerated:
+ changemsg.append('Application generated a new session.')
+
+ try:
+ expires = cherrypy.response.cookie['session_id']['expires']
+ except KeyError:
+ expires = ''
+
+ return page % {
+ 'sessionid': cherrypy.session.id,
+ 'changemsg': '<br>'.join(changemsg),
+ 'respcookie': cherrypy.response.cookie.output(),
+ 'reqcookie': cherrypy.request.cookie.output(),
+ 'sessiondata': list(cherrypy.session.items()),
+ 'servertime': (
+ datetime.utcnow().strftime('%Y/%m/%d %H:%M') + ' UTC'
+ ),
+ 'serverunixtime': calendar.timegm(datetime.utcnow().timetuple()),
+ 'cpversion': cherrypy.__version__,
+ 'pyversion': sys.version,
+ 'expires': expires,
+ }
+
+ @cherrypy.expose
+ def index(self):
+ # Must modify data or the session will not be saved.
+ cherrypy.session['color'] = 'green'
+ return self.page()
+
+ @cherrypy.expose
+ def expire(self):
+ sessions.expire()
+ return self.page()
+
+ @cherrypy.expose
+ def regen(self):
+ cherrypy.session.regenerate()
+ # Must modify data or the session will not be saved.
+ cherrypy.session['color'] = 'yellow'
+ return self.page()
+
+
+if __name__ == '__main__':
+ cherrypy.config.update({
+ # 'environment': 'production',
+ 'log.screen': True,
+ 'tools.sessions.on': True,
+ })
+ cherrypy.quickstart(Root())
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/static/404.html b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/static/404.html
new file mode 100644
index 0000000000000000000000000000000000000000..01b17b096314883a82e310bf7e25ba069e811452
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/static/404.html
@@ -0,0 +1,5 @@
+<html>
+ <body>
+ <h1>I couldn't find that thing you were looking for!</h1>
+ </body>
+</html>
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/static/dirback.jpg b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/static/dirback.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..80403dc227c19b9192158420f5288121e7f2669a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/static/dirback.jpg differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/static/index.html b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/static/index.html
new file mode 100644
index 0000000000000000000000000000000000000000..b9f5f097d23a4653fe1d9330b5c5fc7fb4c26001
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/static/index.html
@@ -0,0 +1 @@
+Hello, world
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/style.css b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/style.css
new file mode 100644
index 0000000000000000000000000000000000000000..b266e93de719869049d3991eb6fe1c13691089f4
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/style.css
@@ -0,0 +1 @@
+Dummy stylesheet
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test.pem b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test.pem
new file mode 100644
index 0000000000000000000000000000000000000000..47a47042b603d660e6640ab4adf689967f09ddb4
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test.pem
@@ -0,0 +1,38 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIICXAIBAAKBgQDBKo554mzIMY+AByUNpaUOP9bJnQ7ZLQe9XgHwoLJR4VzpyZZZ
+R9L4WtImEew05FY3Izerfm3MN3+MC0tJ6yQU9sOiU3vBW6RrLIMlfKsnRwBRZ0Kn
+da+O6xldVSosu8Ev3z9VZ94iC/ZgKzrH7Mjj/U8/MQO7RBS/LAqee8bFNQIDAQAB
+AoGAWOCF0ZrWxn3XMucWq2LNwPKqlvVGwbIwX3cDmX22zmnM4Fy6arXbYh4XlyCj
+9+ofqRrxIFz5k/7tFriTmZ0xag5+Jdx+Kwg0/twiP7XCNKipFogwe1Hznw8OFAoT
+enKBdj2+/n2o0Bvo/tDB59m9L/538d46JGQUmJlzMyqYikECQQDyoq+8CtMNvE18
+8VgHcR/KtApxWAjj4HpaHYL637ATjThetUZkW92mgDgowyplthusxdNqhHWyv7E8
+tWNdYErZAkEAy85ShTR0M5aWmrE7o0r0SpWInAkNBH9aXQRRARFYsdBtNfRu6I0i
+0lvU9wiu3eF57FMEC86yViZ5UBnQfTu7vQJAVesj/Zt7pwaCDfdMa740OsxMUlyR
+MVhhGx4OLpYdPJ8qUecxGQKq13XZ7R1HGyNEY4bd2X80Smq08UFuATfC6QJAH8UB
+yBHtKz2GLIcELOg6PIYizW/7v3+6rlVF60yw7sb2vzpjL40QqIn4IKoR2DSVtOkb
+8FtAIX3N21aq0VrGYQJBAIPiaEc2AZ8Bq2GC4F3wOz/BxJ/izvnkiotR12QK4fh5
+yjZMhTjWCas5zwHR5PDjlD88AWGDMsZ1PicD4348xJQ=
+-----END RSA PRIVATE KEY-----
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAy6gAwIBAgIJAI18BD7eQxlGMA0GCSqGSIb3DQEBBAUAMIGeMQswCQYD
+VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTESMBAGA1UEBxMJU2FuIERpZWdv
+MRkwFwYDVQQKExBDaGVycnlQeSBQcm9qZWN0MREwDwYDVQQLEwhkZXYtdGVzdDEW
+MBQGA1UEAxMNQ2hlcnJ5UHkgVGVhbTEgMB4GCSqGSIb3DQEJARYRcmVtaUBjaGVy
+cnlweS5vcmcwHhcNMDYwOTA5MTkyMDIwWhcNMzQwMTI0MTkyMDIwWjCBnjELMAkG
+A1UEBhMCVVMxEzARBgNVBAgTCkNhbGlmb3JuaWExEjAQBgNVBAcTCVNhbiBEaWVn
+bzEZMBcGA1UEChMQQ2hlcnJ5UHkgUHJvamVjdDERMA8GA1UECxMIZGV2LXRlc3Qx
+FjAUBgNVBAMTDUNoZXJyeVB5IFRlYW0xIDAeBgkqhkiG9w0BCQEWEXJlbWlAY2hl
+cnJ5cHkub3JnMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDBKo554mzIMY+A
+ByUNpaUOP9bJnQ7ZLQe9XgHwoLJR4VzpyZZZR9L4WtImEew05FY3Izerfm3MN3+M
+C0tJ6yQU9sOiU3vBW6RrLIMlfKsnRwBRZ0Knda+O6xldVSosu8Ev3z9VZ94iC/Zg
+KzrH7Mjj/U8/MQO7RBS/LAqee8bFNQIDAQABo4IBBzCCAQMwHQYDVR0OBBYEFDIQ
+2feb71tVZCWpU0qJ/Tw+wdtoMIHTBgNVHSMEgcswgciAFDIQ2feb71tVZCWpU0qJ
+/Tw+wdtooYGkpIGhMIGeMQswCQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5p
+YTESMBAGA1UEBxMJU2FuIERpZWdvMRkwFwYDVQQKExBDaGVycnlQeSBQcm9qZWN0
+MREwDwYDVQQLEwhkZXYtdGVzdDEWMBQGA1UEAxMNQ2hlcnJ5UHkgVGVhbTEgMB4G
+CSqGSIb3DQEJARYRcmVtaUBjaGVycnlweS5vcmeCCQCNfAQ+3kMZRjAMBgNVHRME
+BTADAQH/MA0GCSqGSIb3DQEBBAUAA4GBAL7AAQz7IePV48ZTAFHKr88ntPALsL5S
+8vHCZPNMevNkLTj3DYUw2BcnENxMjm1kou2F2BkvheBPNZKIhc6z4hAml3ed1xa2
+D7w6e6OTcstdK/+KrPDDHeOP1dhMWNs2JE1bNlfF1LiXzYKSXpe88eCKjCXsCT/T
+NluCaWQys3MS
+-----END CERTIFICATE-----
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_auth_basic.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_auth_basic.py
new file mode 100644
index 0000000000000000000000000000000000000000..d7e69a9b450942a9bf73e9bbb55c7d691cb7e24b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_auth_basic.py
@@ -0,0 +1,135 @@
+# This file is part of CherryPy <http://www.cherrypy.org/>
+# -*- coding: utf-8 -*-
+# vim:ts=4:sw=4:expandtab:fileencoding=utf-8
+
+from hashlib import md5
+
+import cherrypy
+from cherrypy._cpcompat import ntob
+from cherrypy.lib import auth_basic
+from cherrypy.test import helper
+
+
+class BasicAuthTest(helper.CPWebCase):
+
+ @staticmethod
+ def setup_server():
+ class Root:
+
+ @cherrypy.expose
+ def index(self):
+ return 'This is public.'
+
+ class BasicProtected:
+
+ @cherrypy.expose
+ def index(self):
+ return "Hello %s, you've been authorized." % (
+ cherrypy.request.login)
+
+ class BasicProtected2:
+
+ @cherrypy.expose
+ def index(self):
+ return "Hello %s, you've been authorized." % (
+ cherrypy.request.login)
+
+ class BasicProtected2_u:
+
+ @cherrypy.expose
+ def index(self):
+ return "Hello %s, you've been authorized." % (
+ cherrypy.request.login)
+
+ userpassdict = {'xuser': 'xpassword'}
+ userhashdict = {'xuser': md5(b'xpassword').hexdigest()}
+ userhashdict_u = {'xюзер': md5(ntob('їжа', 'utf-8')).hexdigest()}
+
+ def checkpasshash(realm, user, password):
+ p = userhashdict.get(user)
+ return p and p == md5(ntob(password)).hexdigest() or False
+
+ def checkpasshash_u(realm, user, password):
+ p = userhashdict_u.get(user)
+ return p and p == md5(ntob(password, 'utf-8')).hexdigest() or False
+
+ basic_checkpassword_dict = auth_basic.checkpassword_dict(userpassdict)
+ conf = {
+ '/basic': {
+ 'tools.auth_basic.on': True,
+ 'tools.auth_basic.realm': 'wonderland',
+ 'tools.auth_basic.checkpassword': basic_checkpassword_dict
+ },
+ '/basic2': {
+ 'tools.auth_basic.on': True,
+ 'tools.auth_basic.realm': 'wonderland',
+ 'tools.auth_basic.checkpassword': checkpasshash,
+ 'tools.auth_basic.accept_charset': 'ISO-8859-1',
+ },
+ '/basic2_u': {
+ 'tools.auth_basic.on': True,
+ 'tools.auth_basic.realm': 'wonderland',
+ 'tools.auth_basic.checkpassword': checkpasshash_u,
+ 'tools.auth_basic.accept_charset': 'UTF-8',
+ },
+ }
+
+ root = Root()
+ root.basic = BasicProtected()
+ root.basic2 = BasicProtected2()
+ root.basic2_u = BasicProtected2_u()
+ cherrypy.tree.mount(root, config=conf)
+
+ def testPublic(self):
+ self.getPage('/')
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Type', 'text/html;charset=utf-8')
+ self.assertBody('This is public.')
+
+ def testBasic(self):
+ self.getPage('/basic/')
+ self.assertStatus(401)
+ self.assertHeader(
+ 'WWW-Authenticate',
+ 'Basic realm="wonderland", charset="UTF-8"'
+ )
+
+ self.getPage('/basic/',
+ [('Authorization', 'Basic eHVzZXI6eHBhc3N3b3JX')])
+ self.assertStatus(401)
+
+ self.getPage('/basic/',
+ [('Authorization', 'Basic eHVzZXI6eHBhc3N3b3Jk')])
+ self.assertStatus('200 OK')
+ self.assertBody("Hello xuser, you've been authorized.")
+
+ def testBasic2(self):
+ self.getPage('/basic2/')
+ self.assertStatus(401)
+ self.assertHeader('WWW-Authenticate', 'Basic realm="wonderland"')
+
+ self.getPage('/basic2/',
+ [('Authorization', 'Basic eHVzZXI6eHBhc3N3b3JX')])
+ self.assertStatus(401)
+
+ self.getPage('/basic2/',
+ [('Authorization', 'Basic eHVzZXI6eHBhc3N3b3Jk')])
+ self.assertStatus('200 OK')
+ self.assertBody("Hello xuser, you've been authorized.")
+
+ def testBasic2_u(self):
+ self.getPage('/basic2_u/')
+ self.assertStatus(401)
+ self.assertHeader(
+ 'WWW-Authenticate',
+ 'Basic realm="wonderland", charset="UTF-8"'
+ )
+
+ self.getPage('/basic2_u/',
+ [('Authorization', 'Basic eNGO0LfQtdGAOtGX0LbRgw==')])
+ self.assertStatus(401)
+
+ self.getPage('/basic2_u/',
+ [('Authorization', 'Basic eNGO0LfQtdGAOtGX0LbQsA==')])
+ self.assertStatus('200 OK')
+ self.assertBody("Hello xюзер, you've been authorized.")
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_auth_digest.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_auth_digest.py
new file mode 100644
index 0000000000000000000000000000000000000000..745f89e6c286a18774a9ce0113145bdac6fc6fac
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_auth_digest.py
@@ -0,0 +1,131 @@
+# This file is part of CherryPy <http://www.cherrypy.org/>
+# -*- coding: utf-8 -*-
+# vim:ts=4:sw=4:expandtab:fileencoding=utf-8
+
+
+import cherrypy
+from cherrypy.lib import auth_digest
+from cherrypy._cpcompat import ntob
+
+from cherrypy.test import helper
+
+
+def _fetch_users():
+ return {'test': 'test', '☃йюзер': 'їпароль'}
+
+
+get_ha1 = cherrypy.lib.auth_digest.get_ha1_dict_plain(_fetch_users())
+
+
+class DigestAuthTest(helper.CPWebCase):
+
+ @staticmethod
+ def setup_server():
+ class Root:
+
+ @cherrypy.expose
+ def index(self):
+ return 'This is public.'
+
+ class DigestProtected:
+
+ @cherrypy.expose
+ def index(self, *args, **kwargs):
+ return "Hello %s, you've been authorized." % (
+ cherrypy.request.login)
+
+ conf = {'/digest': {'tools.auth_digest.on': True,
+ 'tools.auth_digest.realm': 'localhost',
+ 'tools.auth_digest.get_ha1': get_ha1,
+ 'tools.auth_digest.key': 'a565c27146791cfb',
+ 'tools.auth_digest.debug': True,
+ 'tools.auth_digest.accept_charset': 'UTF-8'}}
+
+ root = Root()
+ root.digest = DigestProtected()
+ cherrypy.tree.mount(root, config=conf)
+
+ def testPublic(self):
+ self.getPage('/')
+ assert self.status == '200 OK'
+ self.assertHeader('Content-Type', 'text/html;charset=utf-8')
+ assert self.body == b'This is public.'
+
+ def _test_parametric_digest(self, username, realm):
+ test_uri = '/digest/?@/=%2F%40&%f0%9f%99%88=path'
+
+ self.getPage(test_uri)
+ assert self.status_code == 401
+
+ msg = 'Digest authentification scheme was not found'
+ www_auth_digest = tuple(filter(
+ lambda kv: kv[0].lower() == 'www-authenticate'
+ and kv[1].startswith('Digest '),
+ self.headers,
+ ))
+ assert len(www_auth_digest) == 1, msg
+
+ items = www_auth_digest[0][-1][7:].split(', ')
+ tokens = {}
+ for item in items:
+ key, value = item.split('=')
+ tokens[key.lower()] = value
+
+ assert tokens['realm'] == '"localhost"'
+ assert tokens['algorithm'] == '"MD5"'
+ assert tokens['qop'] == '"auth"'
+ assert tokens['charset'] == '"UTF-8"'
+
+ nonce = tokens['nonce'].strip('"')
+
+ # Test user agent response with a wrong value for 'realm'
+ base_auth = ('Digest username="%s", '
+ 'realm="%s", '
+ 'nonce="%s", '
+ 'uri="%s", '
+ 'algorithm=MD5, '
+ 'response="%s", '
+ 'qop=auth, '
+ 'nc=%s, '
+ 'cnonce="1522e61005789929"')
+
+ encoded_user = username
+ encoded_user = encoded_user.encode('utf-8')
+ encoded_user = encoded_user.decode('latin1')
+ auth_header = base_auth % (
+ encoded_user, realm, nonce, test_uri,
+ '11111111111111111111111111111111', '00000001',
+ )
+ auth = auth_digest.HttpDigestAuthorization(auth_header, 'GET')
+ # calculate the response digest
+ ha1 = get_ha1(auth.realm, auth.username)
+ response = auth.request_digest(ha1)
+ auth_header = base_auth % (
+ encoded_user, realm, nonce, test_uri,
+ response, '00000001',
+ )
+ self.getPage(test_uri, [('Authorization', auth_header)])
+
+ def test_wrong_realm(self):
+ # send response with correct response digest, but wrong realm
+ self._test_parametric_digest(username='test', realm='wrong realm')
+ assert self.status_code == 401
+
+ def test_ascii_user(self):
+ self._test_parametric_digest(username='test', realm='localhost')
+ assert self.status == '200 OK'
+ assert self.body == b"Hello test, you've been authorized."
+
+ def test_unicode_user(self):
+ self._test_parametric_digest(username='☃йюзер', realm='localhost')
+ assert self.status == '200 OK'
+ assert self.body == ntob(
+ "Hello ☃йюзер, you've been authorized.", 'utf-8',
+ )
+
+ def test_wrong_scheme(self):
+ basic_auth = {
+ 'Authorization': 'Basic foo:bar',
+ }
+ self.getPage('/digest/', headers=list(basic_auth.items()))
+ assert self.status_code == 401
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_bus.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_bus.py
new file mode 100644
index 0000000000000000000000000000000000000000..594023a23c278095d68fd0ba82dd60230def0010
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_bus.py
@@ -0,0 +1,327 @@
+"""Publish-subscribe bus tests."""
+# pylint: disable=redefined-outer-name
+
+import os
+import sys
+import threading
+import time
+import unittest.mock
+
+import pytest
+
+from cherrypy.process import wspbus
+
+
+CI_ON_MACOS = bool(os.getenv('CI')) and sys.platform == 'darwin'
+msg = 'Listener %d on channel %s: %s.' # pylint: disable=invalid-name
+
+
+@pytest.fixture
+def bus():
+ """Return a wspbus instance."""
+ return wspbus.Bus()
+
+
+@pytest.fixture
+def log_tracker(bus):
+ """Return an instance of bus log tracker."""
+ class LogTracker: # pylint: disable=too-few-public-methods
+ """Bus log tracker."""
+
+ log_entries = []
+
+ def __init__(self, bus):
+ def logit(msg, level): # pylint: disable=unused-argument
+ self.log_entries.append(msg)
+ bus.subscribe('log', logit)
+
+ return LogTracker(bus)
+
+
+@pytest.fixture
+def listener():
+ """Return an instance of bus response tracker."""
+ class Listner: # pylint: disable=too-few-public-methods
+ """Bus handler return value tracker."""
+
+ responses = []
+
+ def get_listener(self, channel, index):
+ """Return an argument tracking listener."""
+ def listener(arg=None):
+ self.responses.append(msg % (index, channel, arg))
+ return listener
+
+ return Listner()
+
+
+def test_builtin_channels(bus, listener):
+ """Test that built-in channels trigger corresponding listeners."""
+ expected = []
+
+ for channel in bus.listeners:
+ for index, priority in enumerate([100, 50, 0, 51]):
+ bus.subscribe(
+ channel,
+ listener.get_listener(channel, index),
+ priority,
+ )
+
+ for channel in bus.listeners:
+ bus.publish(channel)
+ expected.extend([msg % (i, channel, None) for i in (2, 1, 3, 0)])
+ bus.publish(channel, arg=79347)
+ expected.extend([msg % (i, channel, 79347) for i in (2, 1, 3, 0)])
+
+ assert listener.responses == expected
+
+
+def test_custom_channels(bus, listener):
+ """Test that custom pub-sub channels work as built-in ones."""
+ expected = []
+
+ custom_listeners = ('hugh', 'louis', 'dewey')
+ for channel in custom_listeners:
+ for index, priority in enumerate([None, 10, 60, 40]):
+ bus.subscribe(
+ channel,
+ listener.get_listener(channel, index),
+ priority,
+ )
+
+ for channel in custom_listeners:
+ bus.publish(channel, 'ah so')
+ expected.extend(msg % (i, channel, 'ah so') for i in (1, 3, 0, 2))
+ bus.publish(channel)
+ expected.extend(msg % (i, channel, None) for i in (1, 3, 0, 2))
+
+ assert listener.responses == expected
+
+
+def test_listener_errors(bus, listener):
+ """Test that unhandled exceptions raise channel failures."""
+ expected = []
+ channels = [c for c in bus.listeners if c != 'log']
+
+ for channel in channels:
+ bus.subscribe(channel, listener.get_listener(channel, 1))
+ # This will break since the lambda takes no args.
+ bus.subscribe(channel, lambda: None, priority=20)
+
+ for channel in channels:
+ with pytest.raises(wspbus.ChannelFailures):
+ bus.publish(channel, 123)
+ expected.append(msg % (1, channel, 123))
+
+ assert listener.responses == expected
+
+
+def test_start(bus, listener, log_tracker):
+ """Test that bus start sequence calls all listeners."""
+ num = 3
+ for index in range(num):
+ bus.subscribe('start', listener.get_listener('start', index))
+
+ bus.start()
+ try:
+ # The start method MUST call all 'start' listeners.
+ assert (
+ set(listener.responses) ==
+ set(msg % (i, 'start', None) for i in range(num)))
+ # The start method MUST move the state to STARTED
+ # (or EXITING, if errors occur)
+ assert bus.state == bus.states.STARTED
+ # The start method MUST log its states.
+ assert log_tracker.log_entries == ['Bus STARTING', 'Bus STARTED']
+ finally:
+ # Exit so the atexit handler doesn't complain.
+ bus.exit()
+
+
+def test_stop(bus, listener, log_tracker):
+ """Test that bus stop sequence calls all listeners."""
+ num = 3
+
+ for index in range(num):
+ bus.subscribe('stop', listener.get_listener('stop', index))
+
+ bus.stop()
+
+ # The stop method MUST call all 'stop' listeners.
+ assert (set(listener.responses) ==
+ set(msg % (i, 'stop', None) for i in range(num)))
+
+ # The stop method MUST move the state to STOPPED
+ assert bus.state == bus.states.STOPPED
+
+ # The stop method MUST log its states.
+ assert log_tracker.log_entries == ['Bus STOPPING', 'Bus STOPPED']
+
+
+def test_graceful(bus, listener, log_tracker):
+ """Test that bus graceful state triggers all listeners."""
+ num = 3
+
+ for index in range(num):
+ bus.subscribe('graceful', listener.get_listener('graceful', index))
+
+ bus.graceful()
+
+ # The graceful method MUST call all 'graceful' listeners.
+ assert (
+ set(listener.responses) ==
+ set(msg % (i, 'graceful', None) for i in range(num)))
+
+ # The graceful method MUST log its states.
+ assert log_tracker.log_entries == ['Bus graceful']
+
+
+def test_exit(bus, listener, log_tracker):
+ """Test that bus exit sequence is correct."""
+ num = 3
+
+ for index in range(num):
+ bus.subscribe('stop', listener.get_listener('stop', index))
+ bus.subscribe('exit', listener.get_listener('exit', index))
+
+ bus.exit()
+
+ # The exit method MUST call all 'stop' listeners,
+ # and then all 'exit' listeners.
+ assert (set(listener.responses) ==
+ set([msg % (i, 'stop', None) for i in range(num)] +
+ [msg % (i, 'exit', None) for i in range(num)]))
+
+ # The exit method MUST move the state to EXITING
+ assert bus.state == bus.states.EXITING
+
+ # The exit method MUST log its states.
+ assert (log_tracker.log_entries ==
+ ['Bus STOPPING', 'Bus STOPPED', 'Bus EXITING', 'Bus EXITED'])
+
+
+def test_wait(bus):
+ """Test that bus wait awaits for states."""
+ def f(method): # pylint: disable=invalid-name
+ time.sleep(0.2)
+ getattr(bus, method)()
+
+ flow = [
+ ('start', [bus.states.STARTED]),
+ ('stop', [bus.states.STOPPED]),
+ ('start', [bus.states.STARTING, bus.states.STARTED]),
+ ('exit', [bus.states.EXITING]),
+ ]
+
+ for method, states in flow:
+ threading.Thread(target=f, args=(method,)).start()
+ bus.wait(states)
+
+ # The wait method MUST wait for the given state(s).
+ assert bus.state in states, 'State %r not in %r' % (bus.state, states)
+
+
+@pytest.mark.xfail(CI_ON_MACOS, reason='continuous integration on macOS fails')
+def test_wait_publishes_periodically(bus):
+ """Test that wait publishes each tick."""
+ callback = unittest.mock.MagicMock()
+ bus.subscribe('main', callback)
+
+ def set_start():
+ time.sleep(0.05)
+ bus.start()
+ threading.Thread(target=set_start).start()
+ bus.wait(bus.states.STARTED, interval=0.01, channel='main')
+ assert callback.call_count > 3
+
+
+def test_block(bus, log_tracker):
+ """Test that bus block waits for exiting."""
+ def f(): # pylint: disable=invalid-name
+ time.sleep(0.2)
+ bus.exit()
+
+ def g(): # pylint: disable=invalid-name
+ time.sleep(0.4)
+
+ threading.Thread(target=f).start()
+ threading.Thread(target=g).start()
+ threads = [t for t in threading.enumerate() if not t.daemon]
+ assert len(threads) == 3
+
+ bus.block()
+
+ # The block method MUST wait for the EXITING state.
+ assert bus.state == bus.states.EXITING
+
+ # The block method MUST wait for ALL non-main, non-daemon threads to
+ # finish.
+ threads = [t for t in threading.enumerate() if not t.daemon]
+ assert len(threads) == 1
+
+ # The last message will mention an indeterminable thread name; ignore
+ # it
+ expected_bus_messages = [
+ 'Bus STOPPING',
+ 'Bus STOPPED',
+ 'Bus EXITING',
+ 'Bus EXITED',
+ 'Waiting for child threads to terminate...',
+ ]
+ bus_msg_num = len(expected_bus_messages)
+
+ # If the last message mentions an indeterminable thread name then ignore it
+ assert log_tracker.log_entries[:bus_msg_num] == expected_bus_messages
+ assert len(log_tracker.log_entries[bus_msg_num:]) <= 1, (
+ 'No more than one extra log line with the thread name expected'
+ )
+
+
+def test_start_with_callback(bus):
+ """Test that callback fires on bus start."""
+ try:
+ events = []
+
+ def f(*args, **kwargs): # pylint: disable=invalid-name
+ events.append(('f', args, kwargs))
+
+ def g(): # pylint: disable=invalid-name
+ events.append('g')
+ bus.subscribe('start', g)
+ bus.start_with_callback(f, (1, 3, 5), {'foo': 'bar'})
+
+ # Give wait() time to run f()
+ time.sleep(0.2)
+
+ # The callback method MUST wait for the STARTED state.
+ assert bus.state == bus.states.STARTED
+
+ # The callback method MUST run after all start methods.
+ assert events == ['g', ('f', (1, 3, 5), {'foo': 'bar'})]
+ finally:
+ bus.exit()
+
+
+def test_log(bus, log_tracker):
+ """Test that bus messages and errors are logged."""
+ assert log_tracker.log_entries == []
+
+ # Try a normal message.
+ expected = []
+ for msg_ in ["O mah darlin'"] * 3 + ['Clementiiiiiiiine']:
+ bus.log(msg_)
+ expected.append(msg_)
+ assert log_tracker.log_entries == expected
+
+ # Try an error message
+ try:
+ foo
+ except NameError:
+ bus.log('You are lost and gone forever', traceback=True)
+ lastmsg = log_tracker.log_entries[-1]
+ assert 'Traceback' in lastmsg and 'NameError' in lastmsg, (
+ 'Last log message %r did not contain '
+ 'the expected traceback.' % lastmsg
+ )
+ else:
+ pytest.fail('NameError was not raised as expected.')
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_caching.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_caching.py
new file mode 100644
index 0000000000000000000000000000000000000000..c0b89797670593f17486a5edc53f77064b0ead27
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_caching.py
@@ -0,0 +1,390 @@
+import datetime
+from itertools import count
+import os
+import threading
+import time
+import urllib.parse
+
+import pytest
+
+import cherrypy
+from cherrypy.lib import httputil
+
+from cherrypy.test import helper
+
+
+curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
+
+gif_bytes = (
+ b'GIF89a\x01\x00\x01\x00\x82\x00\x01\x99"\x1e\x00\x00\x00\x00\x00'
+ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+ b'\x00,\x00\x00\x00\x00\x01\x00\x01\x00\x02\x03\x02\x08\t\x00;'
+)
+
+
+class CacheTest(helper.CPWebCase):
+
+ @staticmethod
+ def setup_server():
+
+ @cherrypy.config(**{'tools.caching.on': True})
+ class Root:
+
+ def __init__(self):
+ self.counter = 0
+ self.control_counter = 0
+ self.longlock = threading.Lock()
+
+ @cherrypy.expose
+ def index(self):
+ self.counter += 1
+ msg = 'visit #%s' % self.counter
+ return msg
+
+ @cherrypy.expose
+ def control(self):
+ self.control_counter += 1
+ return 'visit #%s' % self.control_counter
+
+ @cherrypy.expose
+ def a_gif(self):
+ cherrypy.response.headers[
+ 'Last-Modified'] = httputil.HTTPDate()
+ return gif_bytes
+
+ @cherrypy.expose
+ def long_process(self, seconds='1'):
+ try:
+ self.longlock.acquire()
+ time.sleep(float(seconds))
+ finally:
+ self.longlock.release()
+ return 'success!'
+
+ @cherrypy.expose
+ def clear_cache(self, path):
+ cherrypy._cache.store[cherrypy.request.base + path].clear()
+
+ @cherrypy.config(**{
+ 'tools.caching.on': True,
+ 'tools.response_headers.on': True,
+ 'tools.response_headers.headers': [
+ ('Vary', 'Our-Varying-Header')
+ ],
+ })
+ class VaryHeaderCachingServer(object):
+
+ def __init__(self):
+ self.counter = count(1)
+
+ @cherrypy.expose
+ def index(self):
+ return 'visit #%s' % next(self.counter)
+
+ @cherrypy.config(**{
+ 'tools.expires.on': True,
+ 'tools.expires.secs': 60,
+ 'tools.staticdir.on': True,
+ 'tools.staticdir.dir': 'static',
+ 'tools.staticdir.root': curdir,
+ })
+ class UnCached(object):
+
+ @cherrypy.expose
+ @cherrypy.config(**{'tools.expires.secs': 0})
+ def force(self):
+ cherrypy.response.headers['Etag'] = 'bibbitybobbityboo'
+ self._cp_config['tools.expires.force'] = True
+ self._cp_config['tools.expires.secs'] = 0
+ return 'being forceful'
+
+ @cherrypy.expose
+ def dynamic(self):
+ cherrypy.response.headers['Etag'] = 'bibbitybobbityboo'
+ cherrypy.response.headers['Cache-Control'] = 'private'
+ return 'D-d-d-dynamic!'
+
+ @cherrypy.expose
+ def cacheable(self):
+ cherrypy.response.headers['Etag'] = 'bibbitybobbityboo'
+ return "Hi, I'm cacheable."
+
+ @cherrypy.expose
+ @cherrypy.config(**{'tools.expires.secs': 86400})
+ def specific(self):
+ cherrypy.response.headers[
+ 'Etag'] = 'need_this_to_make_me_cacheable'
+ return 'I am being specific'
+
+ class Foo(object):
+ pass
+
+ @cherrypy.expose
+ @cherrypy.config(**{'tools.expires.secs': Foo()})
+ def wrongtype(self):
+ cherrypy.response.headers[
+ 'Etag'] = 'need_this_to_make_me_cacheable'
+ return 'Woops'
+
+ @cherrypy.config(**{
+ 'tools.gzip.mime_types': ['text/*', 'image/*'],
+ 'tools.caching.on': True,
+ 'tools.staticdir.on': True,
+ 'tools.staticdir.dir': 'static',
+ 'tools.staticdir.root': curdir
+ })
+ class GzipStaticCache(object):
+ pass
+
+ cherrypy.tree.mount(Root())
+ cherrypy.tree.mount(UnCached(), '/expires')
+ cherrypy.tree.mount(VaryHeaderCachingServer(), '/varying_headers')
+ cherrypy.tree.mount(GzipStaticCache(), '/gzip_static_cache')
+ cherrypy.config.update({'tools.gzip.on': True})
+
+ def testCaching(self):
+ elapsed = 0.0
+ for trial in range(10):
+ self.getPage('/')
+ # The response should be the same every time,
+ # except for the Age response header.
+ self.assertBody('visit #1')
+ if trial != 0:
+ age = int(self.assertHeader('Age'))
+ assert age >= elapsed
+ elapsed = age
+
+ # POST, PUT, DELETE should not be cached.
+ self.getPage('/', method='POST')
+ self.assertBody('visit #2')
+ # Because gzip is turned on, the Vary header should always Vary for
+ # content-encoding
+ self.assertHeader('Vary', 'Accept-Encoding')
+ # The previous request should have invalidated the cache,
+ # so this request will recalc the response.
+ self.getPage('/', method='GET')
+ self.assertBody('visit #3')
+ # ...but this request should get the cached copy.
+ self.getPage('/', method='GET')
+ self.assertBody('visit #3')
+ self.getPage('/', method='DELETE')
+ self.assertBody('visit #4')
+
+ # The previous request should have invalidated the cache,
+ # so this request will recalc the response.
+ self.getPage('/', method='GET', headers=[('Accept-Encoding', 'gzip')])
+ self.assertHeader('Content-Encoding', 'gzip')
+ self.assertHeader('Vary')
+ self.assertEqual(
+ cherrypy.lib.encoding.decompress(self.body), b'visit #5')
+
+ # Now check that a second request gets the gzip header and gzipped body
+ # This also tests a bug in 3.0 to 3.0.2 whereby the cached, gzipped
+ # response body was being gzipped a second time.
+ self.getPage('/', method='GET', headers=[('Accept-Encoding', 'gzip')])
+ self.assertHeader('Content-Encoding', 'gzip')
+ self.assertEqual(
+ cherrypy.lib.encoding.decompress(self.body), b'visit #5')
+
+ # Now check that a third request that doesn't accept gzip
+ # skips the cache (because the 'Vary' header denies it).
+ self.getPage('/', method='GET')
+ self.assertNoHeader('Content-Encoding')
+ self.assertBody('visit #6')
+
+ def testVaryHeader(self):
+ self.getPage('/varying_headers/')
+ self.assertStatus('200 OK')
+ self.assertHeaderItemValue('Vary', 'Our-Varying-Header')
+ self.assertBody('visit #1')
+
+ # Now check that different 'Vary'-fields don't evict each other.
+ # This test creates 2 requests with different 'Our-Varying-Header'
+ # and then tests if the first one still exists.
+ self.getPage('/varying_headers/',
+ headers=[('Our-Varying-Header', 'request 2')])
+ self.assertStatus('200 OK')
+ self.assertBody('visit #2')
+
+ self.getPage('/varying_headers/',
+ headers=[('Our-Varying-Header', 'request 2')])
+ self.assertStatus('200 OK')
+ self.assertBody('visit #2')
+
+ self.getPage('/varying_headers/')
+ self.assertStatus('200 OK')
+ self.assertBody('visit #1')
+
+ def testExpiresTool(self):
+ # test setting an expires header
+ self.getPage('/expires/specific')
+ self.assertStatus('200 OK')
+ self.assertHeader('Expires')
+
+ # test exceptions for bad time values
+ self.getPage('/expires/wrongtype')
+ self.assertStatus(500)
+ self.assertInBody('TypeError')
+
+ # static content should not have "cache prevention" headers
+ self.getPage('/expires/index.html')
+ self.assertStatus('200 OK')
+ self.assertNoHeader('Pragma')
+ self.assertNoHeader('Cache-Control')
+ self.assertHeader('Expires')
+
+ # dynamic content that sets indicators should not have
+ # "cache prevention" headers
+ self.getPage('/expires/cacheable')
+ self.assertStatus('200 OK')
+ self.assertNoHeader('Pragma')
+ self.assertNoHeader('Cache-Control')
+ self.assertHeader('Expires')
+
+ self.getPage('/expires/dynamic')
+ self.assertBody('D-d-d-dynamic!')
+ # the Cache-Control header should be untouched
+ self.assertHeader('Cache-Control', 'private')
+ self.assertHeader('Expires')
+
+ # configure the tool to ignore indicators and replace existing headers
+ self.getPage('/expires/force')
+ self.assertStatus('200 OK')
+ # This also gives us a chance to test 0 expiry with no other headers
+ self.assertHeader('Pragma', 'no-cache')
+ if cherrypy.server.protocol_version == 'HTTP/1.1':
+ self.assertHeader('Cache-Control', 'no-cache, must-revalidate')
+ self.assertHeader('Expires', 'Sun, 28 Jan 2007 00:00:00 GMT')
+
+ # static content should now have "cache prevention" headers
+ self.getPage('/expires/index.html')
+ self.assertStatus('200 OK')
+ self.assertHeader('Pragma', 'no-cache')
+ if cherrypy.server.protocol_version == 'HTTP/1.1':
+ self.assertHeader('Cache-Control', 'no-cache, must-revalidate')
+ self.assertHeader('Expires', 'Sun, 28 Jan 2007 00:00:00 GMT')
+
+ # the cacheable handler should now have "cache prevention" headers
+ self.getPage('/expires/cacheable')
+ self.assertStatus('200 OK')
+ self.assertHeader('Pragma', 'no-cache')
+ if cherrypy.server.protocol_version == 'HTTP/1.1':
+ self.assertHeader('Cache-Control', 'no-cache, must-revalidate')
+ self.assertHeader('Expires', 'Sun, 28 Jan 2007 00:00:00 GMT')
+
+ self.getPage('/expires/dynamic')
+ self.assertBody('D-d-d-dynamic!')
+ # dynamic sets Cache-Control to private but it should be
+ # overwritten here ...
+ self.assertHeader('Pragma', 'no-cache')
+ if cherrypy.server.protocol_version == 'HTTP/1.1':
+ self.assertHeader('Cache-Control', 'no-cache, must-revalidate')
+ self.assertHeader('Expires', 'Sun, 28 Jan 2007 00:00:00 GMT')
+
+ def _assert_resp_len_and_enc_for_gzip(self, uri):
+ """
+ Test that after querying gzipped content it's remains valid in
+ cache and available non-gzipped as well.
+ """
+ ACCEPT_GZIP_HEADERS = [('Accept-Encoding', 'gzip')]
+ content_len = None
+
+ for _ in range(3):
+ self.getPage(uri, method='GET', headers=ACCEPT_GZIP_HEADERS)
+
+ if content_len is not None:
+ # all requests should get the same length
+ self.assertHeader('Content-Length', content_len)
+ self.assertHeader('Content-Encoding', 'gzip')
+
+ content_len = dict(self.headers)['Content-Length']
+
+ # check that we can still get non-gzipped version
+ self.getPage(uri, method='GET')
+ self.assertNoHeader('Content-Encoding')
+ # non-gzipped version should have a different content length
+ self.assertNoHeaderItemValue('Content-Length', content_len)
+
+ def testGzipStaticCache(self):
+ """Test that cache and gzip tools play well together when both enabled.
+
+ Ref GitHub issue #1190.
+ """
+ GZIP_STATIC_CACHE_TMPL = '/gzip_static_cache/{}'
+ resource_files = ('index.html', 'dirback.jpg')
+
+ for f in resource_files:
+ uri = GZIP_STATIC_CACHE_TMPL.format(f)
+ self._assert_resp_len_and_enc_for_gzip(uri)
+
+ def testLastModified(self):
+ self.getPage('/a.gif')
+ self.assertStatus(200)
+ self.assertBody(gif_bytes)
+ lm1 = self.assertHeader('Last-Modified')
+
+ # this request should get the cached copy.
+ self.getPage('/a.gif')
+ self.assertStatus(200)
+ self.assertBody(gif_bytes)
+ self.assertHeader('Age')
+ lm2 = self.assertHeader('Last-Modified')
+ self.assertEqual(lm1, lm2)
+
+ # this request should match the cached copy, but raise 304.
+ self.getPage('/a.gif', [('If-Modified-Since', lm1)])
+ self.assertStatus(304)
+ self.assertNoHeader('Last-Modified')
+ if not getattr(cherrypy.server, 'using_apache', False):
+ self.assertHeader('Age')
+
+ @pytest.mark.xfail(reason='#1536')
+ def test_antistampede(self):
+ SECONDS = 4
+ slow_url = '/long_process?seconds={SECONDS}'.format(**locals())
+ # We MUST make an initial synchronous request in order to create the
+ # AntiStampedeCache object, and populate its selecting_headers,
+ # before the actual stampede.
+ self.getPage(slow_url)
+ self.assertBody('success!')
+ path = urllib.parse.quote(slow_url, safe='')
+ self.getPage('/clear_cache?path=' + path)
+ self.assertStatus(200)
+
+ start = datetime.datetime.now()
+
+ def run():
+ self.getPage(slow_url)
+ # The response should be the same every time
+ self.assertBody('success!')
+ ts = [threading.Thread(target=run) for i in range(100)]
+ for t in ts:
+ t.start()
+ for t in ts:
+ t.join()
+ finish = datetime.datetime.now()
+ # Allow for overhead, two seconds for slow hosts
+ allowance = SECONDS + 2
+ self.assertEqualDates(start, finish, seconds=allowance)
+
+ def test_cache_control(self):
+ self.getPage('/control')
+ self.assertBody('visit #1')
+ self.getPage('/control')
+ self.assertBody('visit #1')
+
+ self.getPage('/control', headers=[('Cache-Control', 'no-cache')])
+ self.assertBody('visit #2')
+ self.getPage('/control')
+ self.assertBody('visit #2')
+
+ self.getPage('/control', headers=[('Pragma', 'no-cache')])
+ self.assertBody('visit #3')
+ self.getPage('/control')
+ self.assertBody('visit #3')
+
+ time.sleep(1)
+ self.getPage('/control', headers=[('Cache-Control', 'max-age=0')])
+ self.assertBody('visit #4')
+ self.getPage('/control')
+ self.assertBody('visit #4')
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_config.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..ecd460198499f57a899bb72e528d8b1324fe6b79
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_config.py
@@ -0,0 +1,291 @@
+"""Tests for the CherryPy configuration system."""
+
+import io
+import os
+import sys
+import unittest
+
+import cherrypy
+
+from cherrypy.test import helper
+
+
+localDir = os.path.join(os.getcwd(), os.path.dirname(__file__))
+
+
+def StringIOFromNative(x):
+ return io.StringIO(str(x))
+
+
+def setup_server():
+
+ @cherrypy.config(foo='this', bar='that')
+ class Root:
+
+ def __init__(self):
+ cherrypy.config.namespaces['db'] = self.db_namespace
+
+ def db_namespace(self, k, v):
+ if k == 'scheme':
+ self.db = v
+
+ @cherrypy.expose(alias=('global_', 'xyz'))
+ def index(self, key):
+ return cherrypy.request.config.get(key, 'None')
+
+ @cherrypy.expose
+ def repr(self, key):
+ return repr(cherrypy.request.config.get(key, None))
+
+ @cherrypy.expose
+ def dbscheme(self):
+ return self.db
+
+ @cherrypy.expose
+ @cherrypy.config(**{'request.body.attempt_charsets': ['utf-16']})
+ def plain(self, x):
+ return x
+
+ favicon_ico = cherrypy.tools.staticfile.handler(
+ filename=os.path.join(localDir, '../favicon.ico'))
+
+ @cherrypy.config(foo='this2', baz='that2')
+ class Foo:
+
+ @cherrypy.expose
+ def index(self, key):
+ return cherrypy.request.config.get(key, 'None')
+ nex = index
+
+ @cherrypy.expose
+ @cherrypy.config(**{'response.headers.X-silly': 'sillyval'})
+ def silly(self):
+ return 'Hello world'
+
+ # Test the expose and config decorators
+ @cherrypy.config(foo='this3', **{'bax': 'this4'})
+ @cherrypy.expose
+ def bar(self, key):
+ return repr(cherrypy.request.config.get(key, None))
+
+ class Another:
+
+ @cherrypy.expose
+ def index(self, key):
+ return str(cherrypy.request.config.get(key, 'None'))
+
+ def raw_namespace(key, value):
+ if key == 'input.map':
+ handler = cherrypy.request.handler
+
+ def wrapper():
+ params = cherrypy.request.params
+ for name, coercer in value.copy().items():
+ try:
+ params[name] = coercer(params[name])
+ except KeyError:
+ pass
+ return handler()
+ cherrypy.request.handler = wrapper
+ elif key == 'output':
+ handler = cherrypy.request.handler
+
+ def wrapper():
+ # 'value' is a type (like int or str).
+ return value(handler())
+ cherrypy.request.handler = wrapper
+
+ @cherrypy.config(**{'raw.output': repr})
+ class Raw:
+
+ @cherrypy.expose
+ @cherrypy.config(**{'raw.input.map': {'num': int}})
+ def incr(self, num):
+ return num + 1
+
+ ioconf = StringIOFromNative("""
+[/]
+neg: -1234
+filename: os.path.join(sys.prefix, "hello.py")
+thing1: cherrypy.lib.httputil.response_codes[404]
+thing2: __import__('cherrypy.tutorial', globals(), locals(), ['']).thing2
+complex: 3+2j
+mul: 6*3
+ones: "11"
+twos: "22"
+stradd: %%(ones)s + %%(twos)s + "33"
+
+[/favicon.ico]
+tools.staticfile.filename = %r
+""" % os.path.join(localDir, 'static/dirback.jpg'))
+
+ root = Root()
+ root.foo = Foo()
+ root.raw = Raw()
+ app = cherrypy.tree.mount(root, config=ioconf)
+ app.request_class.namespaces['raw'] = raw_namespace
+
+ cherrypy.tree.mount(Another(), '/another')
+ cherrypy.config.update({'luxuryyacht': 'throatwobblermangrove',
+ 'db.scheme': r'sqlite///memory',
+ })
+
+
+# Client-side code #
+
+
+class ConfigTests(helper.CPWebCase):
+ setup_server = staticmethod(setup_server)
+
+ def testConfig(self):
+ tests = [
+ ('/', 'nex', 'None'),
+ ('/', 'foo', 'this'),
+ ('/', 'bar', 'that'),
+ ('/xyz', 'foo', 'this'),
+ ('/foo/', 'foo', 'this2'),
+ ('/foo/', 'bar', 'that'),
+ ('/foo/', 'bax', 'None'),
+ ('/foo/bar', 'baz', "'that2'"),
+ ('/foo/nex', 'baz', 'that2'),
+ # If 'foo' == 'this', then the mount point '/another' leaks into
+ # '/'.
+ ('/another/', 'foo', 'None'),
+ ]
+ for path, key, expected in tests:
+ self.getPage(path + '?key=' + key)
+ self.assertBody(expected)
+
+ expectedconf = {
+ # From CP defaults
+ 'tools.log_headers.on': False,
+ 'tools.log_tracebacks.on': True,
+ 'request.show_tracebacks': True,
+ 'log.screen': False,
+ 'environment': 'test_suite',
+ 'engine.autoreload.on': False,
+ # From global config
+ 'luxuryyacht': 'throatwobblermangrove',
+ # From Root._cp_config
+ 'bar': 'that',
+ # From Foo._cp_config
+ 'baz': 'that2',
+ # From Foo.bar._cp_config
+ 'foo': 'this3',
+ 'bax': 'this4',
+ }
+ for key, expected in expectedconf.items():
+ self.getPage('/foo/bar?key=' + key)
+ self.assertBody(repr(expected))
+
+ def testUnrepr(self):
+ self.getPage('/repr?key=neg')
+ self.assertBody('-1234')
+
+ self.getPage('/repr?key=filename')
+ self.assertBody(repr(os.path.join(sys.prefix, 'hello.py')))
+
+ self.getPage('/repr?key=thing1')
+ self.assertBody(repr(cherrypy.lib.httputil.response_codes[404]))
+
+ if not getattr(cherrypy.server, 'using_apache', False):
+ # The object ID's won't match up when using Apache, since the
+ # server and client are running in different processes.
+ self.getPage('/repr?key=thing2')
+ from cherrypy.tutorial import thing2
+ self.assertBody(repr(thing2))
+
+ self.getPage('/repr?key=complex')
+ self.assertBody('(3+2j)')
+
+ self.getPage('/repr?key=mul')
+ self.assertBody('18')
+
+ self.getPage('/repr?key=stradd')
+ self.assertBody(repr('112233'))
+
+ def testRespNamespaces(self):
+ self.getPage('/foo/silly')
+ self.assertHeader('X-silly', 'sillyval')
+ self.assertBody('Hello world')
+
+ def testCustomNamespaces(self):
+ self.getPage('/raw/incr?num=12')
+ self.assertBody('13')
+
+ self.getPage('/dbscheme')
+ self.assertBody(r'sqlite///memory')
+
+ def testHandlerToolConfigOverride(self):
+ # Assert that config overrides tool constructor args. Above, we set
+ # the favicon in the page handler to be '../favicon.ico',
+ # but then overrode it in config to be './static/dirback.jpg'.
+ self.getPage('/favicon.ico')
+ with open(os.path.join(localDir, 'static/dirback.jpg'), 'rb') as tf:
+ self.assertBody(tf.read())
+
+ def test_request_body_namespace(self):
+ self.getPage('/plain', method='POST', headers=[
+ ('Content-Type', 'application/x-www-form-urlencoded'),
+ ('Content-Length', '13')],
+ body=b'\xff\xfex\x00=\xff\xfea\x00b\x00c\x00')
+ self.assertBody('abc')
+
+
+class VariableSubstitutionTests(unittest.TestCase):
+ setup_server = staticmethod(setup_server)
+
+ def test_config(self):
+ from textwrap import dedent
+
+ # variable substitution with [DEFAULT]
+ conf = dedent("""
+ [DEFAULT]
+ dir = "/some/dir"
+ my.dir = %(dir)s + "/sub"
+
+ [my]
+ my.dir = %(dir)s + "/my/dir"
+ my.dir2 = %(my.dir)s + '/dir2'
+
+ """)
+
+ fp = StringIOFromNative(conf)
+
+ cherrypy.config.update(fp)
+ self.assertEqual(cherrypy.config['my']['my.dir'], '/some/dir/my/dir')
+ self.assertEqual(cherrypy.config['my']
+ ['my.dir2'], '/some/dir/my/dir/dir2')
+
+
+class CallablesInConfigTest(unittest.TestCase):
+ setup_server = staticmethod(setup_server)
+
+ def test_call_with_literal_dict(self):
+ from textwrap import dedent
+ conf = dedent("""
+ [my]
+ value = dict(**{'foo': 'bar'})
+ """)
+ fp = StringIOFromNative(conf)
+ cherrypy.config.update(fp)
+ self.assertEqual(cherrypy.config['my']['value'], {'foo': 'bar'})
+
+ def test_call_with_kwargs(self):
+ from textwrap import dedent
+ conf = dedent("""
+ [my]
+ value = dict(foo="buzz", **cherrypy._test_dict)
+ """)
+ test_dict = {
+ 'foo': 'bar',
+ 'bar': 'foo',
+ 'fizz': 'buzz'
+ }
+ cherrypy._test_dict = test_dict
+ fp = StringIOFromNative(conf)
+ cherrypy.config.update(fp)
+ test_dict['foo'] = 'buzz'
+ self.assertEqual(cherrypy.config['my']['value']['foo'], 'buzz')
+ self.assertEqual(cherrypy.config['my']['value'], test_dict)
+ del cherrypy._test_dict
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_config_server.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_config_server.py
new file mode 100644
index 0000000000000000000000000000000000000000..7b1835304f537528e471c4e0fe32a51a2f4fcfa3
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_config_server.py
@@ -0,0 +1,126 @@
+"""Tests for the CherryPy configuration system."""
+
+import os
+
+import cherrypy
+from cherrypy.test import helper
+
+
+localDir = os.path.join(os.getcwd(), os.path.dirname(__file__))
+
+
+# Client-side code #
+
+
+class ServerConfigTests(helper.CPWebCase):
+
+ @staticmethod
+ def setup_server():
+
+ class Root:
+
+ @cherrypy.expose
+ def index(self):
+ return cherrypy.request.wsgi_environ['SERVER_PORT']
+
+ @cherrypy.expose
+ def upload(self, file):
+ return 'Size: %s' % len(file.file.read())
+
+ @cherrypy.expose
+ @cherrypy.config(**{'request.body.maxbytes': 100})
+ def tinyupload(self):
+ return cherrypy.request.body.read()
+
+ cherrypy.tree.mount(Root())
+
+ cherrypy.config.update({
+ 'server.socket_host': '0.0.0.0',
+ 'server.socket_port': 9876,
+ 'server.max_request_body_size': 200,
+ 'server.max_request_header_size': 500,
+ 'server.socket_timeout': 0.5,
+
+ # Test explicit server.instance
+ 'server.2.instance': 'cherrypy._cpwsgi_server.CPWSGIServer',
+ 'server.2.socket_port': 9877,
+
+ # Test non-numeric <servername>
+ # Also test default server.instance = builtin server
+ 'server.yetanother.socket_port': 9878,
+ })
+
+ PORT = 9876
+
+ def testBasicConfig(self):
+ self.getPage('/')
+ self.assertBody(str(self.PORT))
+
+ def testAdditionalServers(self):
+ if self.scheme == 'https':
+ return self.skip('not available under ssl')
+ self.PORT = 9877
+ self.getPage('/')
+ self.assertBody(str(self.PORT))
+ self.PORT = 9878
+ self.getPage('/')
+ self.assertBody(str(self.PORT))
+
+ def testMaxRequestSizePerHandler(self):
+ if getattr(cherrypy.server, 'using_apache', False):
+ return self.skip('skipped due to known Apache differences... ')
+
+ self.getPage('/tinyupload', method='POST',
+ headers=[('Content-Type', 'text/plain'),
+ ('Content-Length', '100')],
+ body='x' * 100)
+ self.assertStatus(200)
+ self.assertBody('x' * 100)
+
+ self.getPage('/tinyupload', method='POST',
+ headers=[('Content-Type', 'text/plain'),
+ ('Content-Length', '101')],
+ body='x' * 101)
+ self.assertStatus(413)
+
+ def testMaxRequestSize(self):
+ if getattr(cherrypy.server, 'using_apache', False):
+ return self.skip('skipped due to known Apache differences... ')
+
+ for size in (500, 5000, 50000):
+ self.getPage('/', headers=[('From', 'x' * 500)])
+ self.assertStatus(413)
+
+ # Test for https://github.com/cherrypy/cherrypy/issues/421
+ # (Incorrect border condition in readline of SizeCheckWrapper).
+ # This hangs in rev 891 and earlier.
+ lines256 = 'x' * 248
+ self.getPage('/',
+ headers=[('Host', '%s:%s' % (self.HOST, self.PORT)),
+ ('From', lines256)])
+
+ # Test upload
+ cd = (
+ 'Content-Disposition: form-data; '
+ 'name="file"; '
+ 'filename="hello.txt"'
+ )
+ body = '\r\n'.join([
+ '--x',
+ cd,
+ 'Content-Type: text/plain',
+ '',
+ '%s',
+ '--x--'])
+ partlen = 200 - len(body)
+ b = body % ('x' * partlen)
+ h = [('Content-type', 'multipart/form-data; boundary=x'),
+ ('Content-Length', '%s' % len(b))]
+ self.getPage('/upload', h, 'POST', b)
+ self.assertBody('Size: %d' % partlen)
+
+ b = body % ('x' * 200)
+ h = [('Content-type', 'multipart/form-data; boundary=x'),
+ ('Content-Length', '%s' % len(b))]
+ self.getPage('/upload', h, 'POST', b)
+ self.assertStatus(413)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_conn.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_conn.py
new file mode 100644
index 0000000000000000000000000000000000000000..e4426c422f8899ad108b2451a9d57e86244ce10c
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_conn.py
@@ -0,0 +1,864 @@
+"""Tests for TCP connection handling, including proper and timely close."""
+
+import errno
+import socket
+import sys
+import time
+import urllib.parse
+from http.client import BadStatusLine, HTTPConnection, NotConnected
+
+from cheroot.test import webtest
+
+import cherrypy
+from cherrypy._cpcompat import HTTPSConnection, ntob, tonative
+from cherrypy.test import helper
+
+
+timeout = 1
+pov = 'pPeErRsSiIsStTeEnNcCeE oOfF vViIsSiIoOnN'
+
+
+def setup_server():
+
+ def raise500():
+ raise cherrypy.HTTPError(500)
+
+ class Root:
+
+ @cherrypy.expose
+ def index(self):
+ return pov
+ page1 = index
+ page2 = index
+ page3 = index
+
+ @cherrypy.expose
+ def hello(self):
+ return 'Hello, world!'
+
+ @cherrypy.expose
+ def timeout(self, t):
+ return str(cherrypy.server.httpserver.timeout)
+
+ @cherrypy.expose
+ @cherrypy.config(**{'response.stream': True})
+ def stream(self, set_cl=False):
+ if set_cl:
+ cherrypy.response.headers['Content-Length'] = 10
+
+ def content():
+ for x in range(10):
+ yield str(x)
+
+ return content()
+
+ @cherrypy.expose
+ def error(self, code=500):
+ raise cherrypy.HTTPError(code)
+
+ @cherrypy.expose
+ def upload(self):
+ if not cherrypy.request.method == 'POST':
+ raise AssertionError("'POST' != request.method %r" %
+ cherrypy.request.method)
+ return "thanks for '%s'" % cherrypy.request.body.read()
+
+ @cherrypy.expose
+ def custom(self, response_code):
+ cherrypy.response.status = response_code
+ return 'Code = %s' % response_code
+
+ @cherrypy.expose
+ @cherrypy.config(**{'hooks.on_start_resource': raise500})
+ def err_before_read(self):
+ return 'ok'
+
+ @cherrypy.expose
+ def one_megabyte_of_a(self):
+ return ['a' * 1024] * 1024
+
+ @cherrypy.expose
+ # Turn off the encoding tool so it doens't collapse
+ # our response body and reclaculate the Content-Length.
+ @cherrypy.config(**{'tools.encode.on': False})
+ def custom_cl(self, body, cl):
+ cherrypy.response.headers['Content-Length'] = cl
+ if not isinstance(body, list):
+ body = [body]
+ newbody = []
+ for chunk in body:
+ if isinstance(chunk, str):
+ chunk = chunk.encode('ISO-8859-1')
+ newbody.append(chunk)
+ return newbody
+
+ cherrypy.tree.mount(Root())
+ cherrypy.config.update({
+ 'server.max_request_body_size': 1001,
+ 'server.socket_timeout': timeout,
+ })
+
+
+class ConnectionCloseTests(helper.CPWebCase):
+ setup_server = staticmethod(setup_server)
+
+ def test_HTTP11(self):
+ if cherrypy.server.protocol_version != 'HTTP/1.1':
+ return self.skip()
+
+ self.PROTOCOL = 'HTTP/1.1'
+
+ self.persistent = True
+
+ # Make the first request and assert there's no "Connection: close".
+ self.getPage('/')
+ self.assertStatus('200 OK')
+ self.assertBody(pov)
+ self.assertNoHeader('Connection')
+
+ # Make another request on the same connection.
+ self.getPage('/page1')
+ self.assertStatus('200 OK')
+ self.assertBody(pov)
+ self.assertNoHeader('Connection')
+
+ # Test client-side close.
+ self.getPage('/page2', headers=[('Connection', 'close')])
+ self.assertStatus('200 OK')
+ self.assertBody(pov)
+ self.assertHeader('Connection', 'close')
+
+ # Make another request on the same connection, which should error.
+ self.assertRaises(NotConnected, self.getPage, '/')
+
+ def test_Streaming_no_len(self):
+ try:
+ self._streaming(set_cl=False)
+ finally:
+ try:
+ self.HTTP_CONN.close()
+ except (TypeError, AttributeError):
+ pass
+
+ def test_Streaming_with_len(self):
+ try:
+ self._streaming(set_cl=True)
+ finally:
+ try:
+ self.HTTP_CONN.close()
+ except (TypeError, AttributeError):
+ pass
+
+ def _streaming(self, set_cl):
+ if cherrypy.server.protocol_version == 'HTTP/1.1':
+ self.PROTOCOL = 'HTTP/1.1'
+
+ self.persistent = True
+
+ # Make the first request and assert there's no "Connection: close".
+ self.getPage('/')
+ self.assertStatus('200 OK')
+ self.assertBody(pov)
+ self.assertNoHeader('Connection')
+
+ # Make another, streamed request on the same connection.
+ if set_cl:
+ # When a Content-Length is provided, the content should stream
+ # without closing the connection.
+ self.getPage('/stream?set_cl=Yes')
+ self.assertHeader('Content-Length')
+ self.assertNoHeader('Connection', 'close')
+ self.assertNoHeader('Transfer-Encoding')
+
+ self.assertStatus('200 OK')
+ self.assertBody('0123456789')
+ else:
+ # When no Content-Length response header is provided,
+ # streamed output will either close the connection, or use
+ # chunked encoding, to determine transfer-length.
+ self.getPage('/stream')
+ self.assertNoHeader('Content-Length')
+ self.assertStatus('200 OK')
+ self.assertBody('0123456789')
+
+ chunked_response = False
+ for k, v in self.headers:
+ if k.lower() == 'transfer-encoding':
+ if str(v) == 'chunked':
+ chunked_response = True
+
+ if chunked_response:
+ self.assertNoHeader('Connection', 'close')
+ else:
+ self.assertHeader('Connection', 'close')
+
+ # Make another request on the same connection, which should
+ # error.
+ self.assertRaises(NotConnected, self.getPage, '/')
+
+ # Try HEAD. See
+ # https://github.com/cherrypy/cherrypy/issues/864.
+ self.getPage('/stream', method='HEAD')
+ self.assertStatus('200 OK')
+ self.assertBody('')
+ self.assertNoHeader('Transfer-Encoding')
+ else:
+ self.PROTOCOL = 'HTTP/1.0'
+
+ self.persistent = True
+
+ # Make the first request and assert Keep-Alive.
+ self.getPage('/', headers=[('Connection', 'Keep-Alive')])
+ self.assertStatus('200 OK')
+ self.assertBody(pov)
+ self.assertHeader('Connection', 'Keep-Alive')
+
+ # Make another, streamed request on the same connection.
+ if set_cl:
+ # When a Content-Length is provided, the content should
+ # stream without closing the connection.
+ self.getPage('/stream?set_cl=Yes',
+ headers=[('Connection', 'Keep-Alive')])
+ self.assertHeader('Content-Length')
+ self.assertHeader('Connection', 'Keep-Alive')
+ self.assertNoHeader('Transfer-Encoding')
+ self.assertStatus('200 OK')
+ self.assertBody('0123456789')
+ else:
+ # When a Content-Length is not provided,
+ # the server should close the connection.
+ self.getPage('/stream', headers=[('Connection', 'Keep-Alive')])
+ self.assertStatus('200 OK')
+ self.assertBody('0123456789')
+
+ self.assertNoHeader('Content-Length')
+ self.assertNoHeader('Connection', 'Keep-Alive')
+ self.assertNoHeader('Transfer-Encoding')
+
+ # Make another request on the same connection, which should
+ # error.
+ self.assertRaises(NotConnected, self.getPage, '/')
+
+ def test_HTTP10_KeepAlive(self):
+ self.PROTOCOL = 'HTTP/1.0'
+ if self.scheme == 'https':
+ self.HTTP_CONN = HTTPSConnection
+ else:
+ self.HTTP_CONN = HTTPConnection
+
+ # Test a normal HTTP/1.0 request.
+ self.getPage('/page2')
+ self.assertStatus('200 OK')
+ self.assertBody(pov)
+ # Apache, for example, may emit a Connection header even for HTTP/1.0
+ # self.assertNoHeader("Connection")
+
+ # Test a keep-alive HTTP/1.0 request.
+ self.persistent = True
+
+ self.getPage('/page3', headers=[('Connection', 'Keep-Alive')])
+ self.assertStatus('200 OK')
+ self.assertBody(pov)
+ self.assertHeader('Connection', 'Keep-Alive')
+
+ # Remove the keep-alive header again.
+ self.getPage('/page3')
+ self.assertStatus('200 OK')
+ self.assertBody(pov)
+ # Apache, for example, may emit a Connection header even for HTTP/1.0
+ # self.assertNoHeader("Connection")
+
+
+class PipelineTests(helper.CPWebCase):
+ setup_server = staticmethod(setup_server)
+
+ def test_HTTP11_Timeout(self):
+ # If we timeout without sending any data,
+ # the server will close the conn with a 408.
+ if cherrypy.server.protocol_version != 'HTTP/1.1':
+ return self.skip()
+
+ self.PROTOCOL = 'HTTP/1.1'
+
+ # Connect but send nothing.
+ self.persistent = True
+ conn = self.HTTP_CONN
+ conn.auto_open = False
+ conn.connect()
+
+ # Wait for our socket timeout
+ time.sleep(timeout * 2)
+
+ # The request should have returned 408 already.
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ self.assertEqual(response.status, 408)
+ conn.close()
+
+ # Connect but send half the headers only.
+ self.persistent = True
+ conn = self.HTTP_CONN
+ conn.auto_open = False
+ conn.connect()
+ conn.send(b'GET /hello HTTP/1.1')
+ conn.send(('Host: %s' % self.HOST).encode('ascii'))
+
+ # Wait for our socket timeout
+ time.sleep(timeout * 2)
+
+ # The conn should have already sent 408.
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ self.assertEqual(response.status, 408)
+ conn.close()
+
+ def test_HTTP11_Timeout_after_request(self):
+ # If we timeout after at least one request has succeeded,
+ # the server will close the conn without 408.
+ if cherrypy.server.protocol_version != 'HTTP/1.1':
+ return self.skip()
+
+ self.PROTOCOL = 'HTTP/1.1'
+
+ # Make an initial request
+ self.persistent = True
+ conn = self.HTTP_CONN
+ conn.putrequest('GET', '/timeout?t=%s' % timeout, skip_host=True)
+ conn.putheader('Host', self.HOST)
+ conn.endheaders()
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ self.assertEqual(response.status, 200)
+ self.body = response.read()
+ self.assertBody(str(timeout))
+
+ # Make a second request on the same socket
+ conn._output(b'GET /hello HTTP/1.1')
+ conn._output(ntob('Host: %s' % self.HOST, 'ascii'))
+ conn._send_output()
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ self.assertEqual(response.status, 200)
+ self.body = response.read()
+ self.assertBody('Hello, world!')
+
+ # Wait for our socket timeout
+ time.sleep(timeout * 2)
+
+ # Make another request on the same socket, which should error
+ conn._output(b'GET /hello HTTP/1.1')
+ conn._output(ntob('Host: %s' % self.HOST, 'ascii'))
+ conn._send_output()
+ response = conn.response_class(conn.sock, method='GET')
+ msg = (
+ "Writing to timed out socket didn't fail as it should have: %s")
+ try:
+ response.begin()
+ except Exception:
+ if not isinstance(sys.exc_info()[1],
+ (socket.error, BadStatusLine)):
+ self.fail(msg % sys.exc_info()[1])
+ else:
+ if response.status != 408:
+ self.fail(msg % response.read())
+
+ conn.close()
+
+ # Make another request on a new socket, which should work
+ self.persistent = True
+ conn = self.HTTP_CONN
+ conn.putrequest('GET', '/', skip_host=True)
+ conn.putheader('Host', self.HOST)
+ conn.endheaders()
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ self.assertEqual(response.status, 200)
+ self.body = response.read()
+ self.assertBody(pov)
+
+ # Make another request on the same socket,
+ # but timeout on the headers
+ conn.send(b'GET /hello HTTP/1.1')
+ # Wait for our socket timeout
+ time.sleep(timeout * 2)
+ response = conn.response_class(conn.sock, method='GET')
+ try:
+ response.begin()
+ except Exception:
+ if not isinstance(sys.exc_info()[1],
+ (socket.error, BadStatusLine)):
+ self.fail(msg % sys.exc_info()[1])
+ else:
+ if response.status != 408:
+ self.fail(msg % response.read())
+
+ conn.close()
+
+ # Retry the request on a new connection, which should work
+ self.persistent = True
+ conn = self.HTTP_CONN
+ conn.putrequest('GET', '/', skip_host=True)
+ conn.putheader('Host', self.HOST)
+ conn.endheaders()
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ self.assertEqual(response.status, 200)
+ self.body = response.read()
+ self.assertBody(pov)
+ conn.close()
+
+ def test_HTTP11_pipelining(self):
+ if cherrypy.server.protocol_version != 'HTTP/1.1':
+ return self.skip()
+
+ self.PROTOCOL = 'HTTP/1.1'
+
+ # Test pipelining. httplib doesn't support this directly.
+ self.persistent = True
+ conn = self.HTTP_CONN
+
+ # Put request 1
+ conn.putrequest('GET', '/hello', skip_host=True)
+ conn.putheader('Host', self.HOST)
+ conn.endheaders()
+
+ for trial in range(5):
+ # Put next request
+ conn._output(b'GET /hello HTTP/1.1')
+ conn._output(ntob('Host: %s' % self.HOST, 'ascii'))
+ conn._send_output()
+
+ # Retrieve previous response
+ response = conn.response_class(conn.sock, method='GET')
+ # there is a bug in python3 regarding the buffering of
+ # ``conn.sock``. Until that bug get's fixed we will
+ # monkey patch the ``response`` instance.
+ # https://bugs.python.org/issue23377
+ response.fp = conn.sock.makefile('rb', 0)
+ response.begin()
+ body = response.read(13)
+ self.assertEqual(response.status, 200)
+ self.assertEqual(body, b'Hello, world!')
+
+ # Retrieve final response
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ body = response.read()
+ self.assertEqual(response.status, 200)
+ self.assertEqual(body, b'Hello, world!')
+
+ conn.close()
+
+ def test_100_Continue(self):
+ if cherrypy.server.protocol_version != 'HTTP/1.1':
+ return self.skip()
+
+ self.PROTOCOL = 'HTTP/1.1'
+
+ self.persistent = True
+ conn = self.HTTP_CONN
+
+ # Try a page without an Expect request header first.
+ # Note that httplib's response.begin automatically ignores
+ # 100 Continue responses, so we must manually check for it.
+ try:
+ conn.putrequest('POST', '/upload', skip_host=True)
+ conn.putheader('Host', self.HOST)
+ conn.putheader('Content-Type', 'text/plain')
+ conn.putheader('Content-Length', '4')
+ conn.endheaders()
+ conn.send(ntob("d'oh"))
+ response = conn.response_class(conn.sock, method='POST')
+ version, status, reason = response._read_status()
+ self.assertNotEqual(status, 100)
+ finally:
+ conn.close()
+
+ # Now try a page with an Expect header...
+ try:
+ conn.connect()
+ conn.putrequest('POST', '/upload', skip_host=True)
+ conn.putheader('Host', self.HOST)
+ conn.putheader('Content-Type', 'text/plain')
+ conn.putheader('Content-Length', '17')
+ conn.putheader('Expect', '100-continue')
+ conn.endheaders()
+ response = conn.response_class(conn.sock, method='POST')
+
+ # ...assert and then skip the 100 response
+ version, status, reason = response._read_status()
+ self.assertEqual(status, 100)
+ while True:
+ line = response.fp.readline().strip()
+ if line:
+ self.fail(
+ '100 Continue should not output any headers. Got %r' %
+ line)
+ else:
+ break
+
+ # ...send the body
+ body = b'I am a small file'
+ conn.send(body)
+
+ # ...get the final response
+ response.begin()
+ self.status, self.headers, self.body = webtest.shb(response)
+ self.assertStatus(200)
+ self.assertBody("thanks for '%s'" % body)
+ finally:
+ conn.close()
+
+
+class ConnectionTests(helper.CPWebCase):
+ setup_server = staticmethod(setup_server)
+
+ def test_readall_or_close(self):
+ if cherrypy.server.protocol_version != 'HTTP/1.1':
+ return self.skip()
+
+ self.PROTOCOL = 'HTTP/1.1'
+
+ if self.scheme == 'https':
+ self.HTTP_CONN = HTTPSConnection
+ else:
+ self.HTTP_CONN = HTTPConnection
+
+ # Test a max of 0 (the default) and then reset to what it was above.
+ old_max = cherrypy.server.max_request_body_size
+ for new_max in (0, old_max):
+ cherrypy.server.max_request_body_size = new_max
+
+ self.persistent = True
+ conn = self.HTTP_CONN
+
+ # Get a POST page with an error
+ conn.putrequest('POST', '/err_before_read', skip_host=True)
+ conn.putheader('Host', self.HOST)
+ conn.putheader('Content-Type', 'text/plain')
+ conn.putheader('Content-Length', '1000')
+ conn.putheader('Expect', '100-continue')
+ conn.endheaders()
+ response = conn.response_class(conn.sock, method='POST')
+
+ # ...assert and then skip the 100 response
+ version, status, reason = response._read_status()
+ self.assertEqual(status, 100)
+ while True:
+ skip = response.fp.readline().strip()
+ if not skip:
+ break
+
+ # ...send the body
+ conn.send(ntob('x' * 1000))
+
+ # ...get the final response
+ response.begin()
+ self.status, self.headers, self.body = webtest.shb(response)
+ self.assertStatus(500)
+
+ # Now try a working page with an Expect header...
+ conn._output(b'POST /upload HTTP/1.1')
+ conn._output(ntob('Host: %s' % self.HOST, 'ascii'))
+ conn._output(b'Content-Type: text/plain')
+ conn._output(b'Content-Length: 17')
+ conn._output(b'Expect: 100-continue')
+ conn._send_output()
+ response = conn.response_class(conn.sock, method='POST')
+
+ # ...assert and then skip the 100 response
+ version, status, reason = response._read_status()
+ self.assertEqual(status, 100)
+ while True:
+ skip = response.fp.readline().strip()
+ if not skip:
+ break
+
+ # ...send the body
+ body = b'I am a small file'
+ conn.send(body)
+
+ # ...get the final response
+ response.begin()
+ self.status, self.headers, self.body = webtest.shb(response)
+ self.assertStatus(200)
+ self.assertBody("thanks for '%s'" % body)
+ conn.close()
+
+ def test_No_Message_Body(self):
+ if cherrypy.server.protocol_version != 'HTTP/1.1':
+ return self.skip()
+
+ self.PROTOCOL = 'HTTP/1.1'
+
+ # Set our HTTP_CONN to an instance so it persists between requests.
+ self.persistent = True
+
+ # Make the first request and assert there's no "Connection: close".
+ self.getPage('/')
+ self.assertStatus('200 OK')
+ self.assertBody(pov)
+ self.assertNoHeader('Connection')
+
+ # Make a 204 request on the same connection.
+ self.getPage('/custom/204')
+ self.assertStatus(204)
+ self.assertNoHeader('Content-Length')
+ self.assertBody('')
+ self.assertNoHeader('Connection')
+
+ # Make a 304 request on the same connection.
+ self.getPage('/custom/304')
+ self.assertStatus(304)
+ self.assertNoHeader('Content-Length')
+ self.assertBody('')
+ self.assertNoHeader('Connection')
+
+ def test_Chunked_Encoding(self):
+ if cherrypy.server.protocol_version != 'HTTP/1.1':
+ return self.skip()
+
+ if (hasattr(self, 'harness') and
+ 'modpython' in self.harness.__class__.__name__.lower()):
+ # mod_python forbids chunked encoding
+ return self.skip()
+
+ self.PROTOCOL = 'HTTP/1.1'
+
+ # Set our HTTP_CONN to an instance so it persists between requests.
+ self.persistent = True
+ conn = self.HTTP_CONN
+
+ # Try a normal chunked request (with extensions)
+ body = ntob('8;key=value\r\nxx\r\nxxxx\r\n5\r\nyyyyy\r\n0\r\n'
+ 'Content-Type: application/json\r\n'
+ '\r\n')
+ conn.putrequest('POST', '/upload', skip_host=True)
+ conn.putheader('Host', self.HOST)
+ conn.putheader('Transfer-Encoding', 'chunked')
+ conn.putheader('Trailer', 'Content-Type')
+ # Note that this is somewhat malformed:
+ # we shouldn't be sending Content-Length.
+ # RFC 2616 says the server should ignore it.
+ conn.putheader('Content-Length', '3')
+ conn.endheaders()
+ conn.send(body)
+ response = conn.getresponse()
+ self.status, self.headers, self.body = webtest.shb(response)
+ self.assertStatus('200 OK')
+ self.assertBody("thanks for '%s'" % b'xx\r\nxxxxyyyyy')
+
+ # Try a chunked request that exceeds server.max_request_body_size.
+ # Note that the delimiters and trailer are included.
+ body = ntob('3e3\r\n' + ('x' * 995) + '\r\n0\r\n\r\n')
+ conn.putrequest('POST', '/upload', skip_host=True)
+ conn.putheader('Host', self.HOST)
+ conn.putheader('Transfer-Encoding', 'chunked')
+ conn.putheader('Content-Type', 'text/plain')
+ # Chunked requests don't need a content-length
+ # # conn.putheader("Content-Length", len(body))
+ conn.endheaders()
+ conn.send(body)
+ response = conn.getresponse()
+ self.status, self.headers, self.body = webtest.shb(response)
+ self.assertStatus(413)
+ conn.close()
+
+ def test_Content_Length_in(self):
+ # Try a non-chunked request where Content-Length exceeds
+ # server.max_request_body_size. Assert error before body send.
+ self.persistent = True
+ conn = self.HTTP_CONN
+ conn.putrequest('POST', '/upload', skip_host=True)
+ conn.putheader('Host', self.HOST)
+ conn.putheader('Content-Type', 'text/plain')
+ conn.putheader('Content-Length', '9999')
+ conn.endheaders()
+ response = conn.getresponse()
+ self.status, self.headers, self.body = webtest.shb(response)
+ self.assertStatus(413)
+ self.assertBody('The entity sent with the request exceeds '
+ 'the maximum allowed bytes.')
+ conn.close()
+
+ def test_Content_Length_out_preheaders(self):
+ # Try a non-chunked response where Content-Length is less than
+ # the actual bytes in the response body.
+ self.persistent = True
+ conn = self.HTTP_CONN
+ conn.putrequest('GET', '/custom_cl?body=I+have+too+many+bytes&cl=5',
+ skip_host=True)
+ conn.putheader('Host', self.HOST)
+ conn.endheaders()
+ response = conn.getresponse()
+ self.status, self.headers, self.body = webtest.shb(response)
+ self.assertStatus(500)
+ self.assertBody(
+ 'The requested resource returned more bytes than the '
+ 'declared Content-Length.')
+ conn.close()
+
+ def test_Content_Length_out_postheaders(self):
+ # Try a non-chunked response where Content-Length is less than
+ # the actual bytes in the response body.
+ self.persistent = True
+ conn = self.HTTP_CONN
+ conn.putrequest(
+ 'GET', '/custom_cl?body=I+too&body=+have+too+many&cl=5',
+ skip_host=True)
+ conn.putheader('Host', self.HOST)
+ conn.endheaders()
+ response = conn.getresponse()
+ self.status, self.headers, self.body = webtest.shb(response)
+ self.assertStatus(200)
+ self.assertBody('I too')
+ conn.close()
+
+ def test_598(self):
+ tmpl = '{scheme}://{host}:{port}/one_megabyte_of_a/'
+ url = tmpl.format(
+ scheme=self.scheme,
+ host=self.HOST,
+ port=self.PORT,
+ )
+ remote_data_conn = urllib.request.urlopen(url)
+ buf = remote_data_conn.read(512)
+ time.sleep(timeout * 0.6)
+ remaining = (1024 * 1024) - 512
+ while remaining:
+ data = remote_data_conn.read(remaining)
+ if not data:
+ break
+ else:
+ buf += data
+ remaining -= len(data)
+
+ self.assertEqual(len(buf), 1024 * 1024)
+ self.assertEqual(buf, ntob('a' * 1024 * 1024))
+ self.assertEqual(remaining, 0)
+ remote_data_conn.close()
+
+
+def setup_upload_server():
+
+ class Root:
+ @cherrypy.expose
+ def upload(self):
+ if not cherrypy.request.method == 'POST':
+ raise AssertionError("'POST' != request.method %r" %
+ cherrypy.request.method)
+ return "thanks for '%s'" % tonative(cherrypy.request.body.read())
+
+ cherrypy.tree.mount(Root())
+ cherrypy.config.update({
+ 'server.max_request_body_size': 1001,
+ 'server.socket_timeout': 10,
+ 'server.accepted_queue_size': 5,
+ 'server.accepted_queue_timeout': 0.1,
+ })
+
+
+reset_names = 'ECONNRESET', 'WSAECONNRESET'
+socket_reset_errors = [
+ getattr(errno, name)
+ for name in reset_names
+ if hasattr(errno, name)
+]
+'reset error numbers available on this platform'
+
+socket_reset_errors += [
+ # Python 3.5 raises an http.client.RemoteDisconnected
+ # with this message
+ 'Remote end closed connection without response',
+]
+
+
+class LimitedRequestQueueTests(helper.CPWebCase):
+ setup_server = staticmethod(setup_upload_server)
+
+ def test_queue_full(self):
+ conns = []
+ overflow_conn = None
+
+ try:
+ # Make 15 initial requests and leave them open, which should use
+ # all of wsgiserver's WorkerThreads and fill its Queue.
+ for i in range(15):
+ conn = self.HTTP_CONN(self.HOST, self.PORT)
+ conn.putrequest('POST', '/upload', skip_host=True)
+ conn.putheader('Host', self.HOST)
+ conn.putheader('Content-Type', 'text/plain')
+ conn.putheader('Content-Length', '4')
+ conn.endheaders()
+ conns.append(conn)
+
+ # Now try a 16th conn, which should be closed by the
+ # server immediately.
+ overflow_conn = self.HTTP_CONN(self.HOST, self.PORT)
+ # Manually connect since httplib won't let us set a timeout
+ for res in socket.getaddrinfo(self.HOST, self.PORT, 0,
+ socket.SOCK_STREAM):
+ af, socktype, proto, canonname, sa = res
+ overflow_conn.sock = socket.socket(af, socktype, proto)
+ overflow_conn.sock.settimeout(5)
+ overflow_conn.sock.connect(sa)
+ break
+
+ overflow_conn.putrequest('GET', '/', skip_host=True)
+ overflow_conn.putheader('Host', self.HOST)
+ overflow_conn.endheaders()
+ response = overflow_conn.response_class(
+ overflow_conn.sock,
+ method='GET',
+ )
+ try:
+ response.begin()
+ except socket.error as exc:
+ if exc.args[0] in socket_reset_errors:
+ pass # Expected.
+ else:
+ tmpl = (
+ 'Overflow conn did not get RST. '
+ 'Got {exc.args!r} instead'
+ )
+ raise AssertionError(tmpl.format(**locals()))
+ except BadStatusLine:
+ # This is a special case in OS X. Linux and Windows will
+ # RST correctly.
+ assert sys.platform == 'darwin'
+ else:
+ raise AssertionError('Overflow conn did not get RST ')
+ finally:
+ for conn in conns:
+ conn.send(b'done')
+ response = conn.response_class(conn.sock, method='POST')
+ response.begin()
+ self.body = response.read()
+ self.assertBody("thanks for 'done'")
+ self.assertEqual(response.status, 200)
+ conn.close()
+ if overflow_conn:
+ overflow_conn.close()
+
+
+class BadRequestTests(helper.CPWebCase):
+ setup_server = staticmethod(setup_server)
+
+ def test_No_CRLF(self):
+ self.persistent = True
+
+ conn = self.HTTP_CONN
+ conn.send(b'GET /hello HTTP/1.1\n\n')
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ self.body = response.read()
+ self.assertBody('HTTP requires CRLF terminators')
+ conn.close()
+
+ conn.connect()
+ conn.send(b'GET /hello HTTP/1.1\r\n\n')
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ self.body = response.read()
+ self.assertBody('HTTP requires CRLF terminators')
+ conn.close()
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_core.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_core.py
new file mode 100644
index 0000000000000000000000000000000000000000..6fde3a973b152bc90f4f2a8ae18e000ea05dfc7d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_core.py
@@ -0,0 +1,826 @@
+# coding: utf-8
+
+"""Basic tests for the CherryPy core: request handling."""
+
+import os
+import sys
+import types
+
+import cherrypy
+from cherrypy._cpcompat import ntou
+from cherrypy import _cptools, tools
+from cherrypy.lib import httputil, static
+
+from cherrypy.test._test_decorators import ExposeExamples
+from cherrypy.test import helper
+
+
+localDir = os.path.dirname(__file__)
+favicon_path = os.path.join(os.getcwd(), localDir, '../favicon.ico')
+
+# Client-side code #
+
+
+class CoreRequestHandlingTest(helper.CPWebCase):
+
+ @staticmethod
+ def setup_server():
+ class Root:
+
+ @cherrypy.expose
+ def index(self):
+ return 'hello'
+
+ favicon_ico = tools.staticfile.handler(filename=favicon_path)
+
+ @cherrypy.expose
+ def defct(self, newct):
+ newct = 'text/%s' % newct
+ cherrypy.config.update({'tools.response_headers.on': True,
+ 'tools.response_headers.headers':
+ [('Content-Type', newct)]})
+
+ @cherrypy.expose
+ def baseurl(self, path_info, relative=None):
+ return cherrypy.url(path_info, relative=bool(relative))
+
+ root = Root()
+ root.expose_dec = ExposeExamples()
+
+ class TestType(type):
+
+ """Metaclass which automatically exposes all functions in each
+ subclass, and adds an instance of the subclass as an attribute
+ of root.
+ """
+ def __init__(cls, name, bases, dct):
+ type.__init__(cls, name, bases, dct)
+ for value in dct.values():
+ if isinstance(value, types.FunctionType):
+ value.exposed = True
+ setattr(root, name.lower(), cls())
+ Test = TestType('Test', (object, ), {})
+
+ @cherrypy.config(**{'tools.trailing_slash.on': False})
+ class URL(Test):
+
+ def index(self, path_info, relative=None):
+ if relative != 'server':
+ relative = bool(relative)
+ return cherrypy.url(path_info, relative=relative)
+
+ def leaf(self, path_info, relative=None):
+ if relative != 'server':
+ relative = bool(relative)
+ return cherrypy.url(path_info, relative=relative)
+
+ def qs(self, qs):
+ return cherrypy.url(qs=qs)
+
+ def log_status():
+ Status.statuses.append(cherrypy.response.status)
+ cherrypy.tools.log_status = cherrypy.Tool(
+ 'on_end_resource', log_status)
+
+ class Status(Test):
+
+ def index(self):
+ return 'normal'
+
+ def blank(self):
+ cherrypy.response.status = ''
+
+ # According to RFC 2616, new status codes are OK as long as they
+ # are between 100 and 599.
+
+ # Here is an illegal code...
+ def illegal(self):
+ cherrypy.response.status = 781
+ return 'oops'
+
+ # ...and here is an unknown but legal code.
+ def unknown(self):
+ cherrypy.response.status = '431 My custom error'
+ return 'funky'
+
+ # Non-numeric code
+ def bad(self):
+ cherrypy.response.status = 'error'
+ return 'bad news'
+
+ statuses = []
+
+ @cherrypy.config(**{'tools.log_status.on': True})
+ def on_end_resource_stage(self):
+ return repr(self.statuses)
+
+ class Redirect(Test):
+
+ @cherrypy.config(**{
+ 'tools.err_redirect.on': True,
+ 'tools.err_redirect.url': '/errpage',
+ 'tools.err_redirect.internal': False,
+ })
+ class Error:
+ @cherrypy.expose
+ def index(self):
+ raise NameError('redirect_test')
+
+ error = Error()
+
+ def index(self):
+ return 'child'
+
+ def custom(self, url, code):
+ raise cherrypy.HTTPRedirect(url, code)
+
+ @cherrypy.config(**{'tools.trailing_slash.extra': True})
+ def by_code(self, code):
+ raise cherrypy.HTTPRedirect('somewhere%20else', code)
+
+ def nomodify(self):
+ raise cherrypy.HTTPRedirect('', 304)
+
+ def proxy(self):
+ raise cherrypy.HTTPRedirect('proxy', 305)
+
+ def stringify(self):
+ return str(cherrypy.HTTPRedirect('/'))
+
+ def fragment(self, frag):
+ raise cherrypy.HTTPRedirect('/some/url#%s' % frag)
+
+ def url_with_quote(self):
+ raise cherrypy.HTTPRedirect("/some\"url/that'we/want")
+
+ def url_with_xss(self):
+ raise cherrypy.HTTPRedirect(
+ "/some<script>alert(1);</script>url/that'we/want")
+
+ def url_with_unicode(self):
+ raise cherrypy.HTTPRedirect(ntou('тест', 'utf-8'))
+
+ def login_redir():
+ if not getattr(cherrypy.request, 'login', None):
+ raise cherrypy.InternalRedirect('/internalredirect/login')
+ tools.login_redir = _cptools.Tool('before_handler', login_redir)
+
+ def redir_custom():
+ raise cherrypy.InternalRedirect('/internalredirect/custom_err')
+
+ class InternalRedirect(Test):
+
+ def index(self):
+ raise cherrypy.InternalRedirect('/')
+
+ @cherrypy.expose
+ @cherrypy.config(**{'hooks.before_error_response': redir_custom})
+ def choke(self):
+ return 3 / 0
+
+ def relative(self, a, b):
+ raise cherrypy.InternalRedirect('cousin?t=6')
+
+ def cousin(self, t):
+ assert cherrypy.request.prev.closed
+ return cherrypy.request.prev.query_string
+
+ def petshop(self, user_id):
+ if user_id == 'parrot':
+ # Trade it for a slug when redirecting
+ raise cherrypy.InternalRedirect(
+ '/image/getImagesByUser?user_id=slug')
+ elif user_id == 'terrier':
+ # Trade it for a fish when redirecting
+ raise cherrypy.InternalRedirect(
+ '/image/getImagesByUser?user_id=fish')
+ else:
+ # This should pass the user_id through to getImagesByUser
+ raise cherrypy.InternalRedirect(
+ '/image/getImagesByUser?user_id=%s' % str(user_id))
+
+ # We support Python 2.3, but the @-deco syntax would look like
+ # this:
+ # @tools.login_redir()
+ def secure(self):
+ return 'Welcome!'
+ secure = tools.login_redir()(secure)
+ # Since calling the tool returns the same function you pass in,
+ # you could skip binding the return value, and just write:
+ # tools.login_redir()(secure)
+
+ def login(self):
+ return 'Please log in'
+
+ def custom_err(self):
+ return 'Something went horribly wrong.'
+
+ @cherrypy.config(**{'hooks.before_request_body': redir_custom})
+ def early_ir(self, arg):
+ return 'whatever'
+
+ class Image(Test):
+
+ def getImagesByUser(self, user_id):
+ return '0 images for %s' % user_id
+
+ class Flatten(Test):
+
+ def as_string(self):
+ return 'content'
+
+ def as_list(self):
+ return ['con', 'tent']
+
+ def as_yield(self):
+ yield b'content'
+
+ @cherrypy.config(**{'tools.flatten.on': True})
+ def as_dblyield(self):
+ yield self.as_yield()
+
+ def as_refyield(self):
+ for chunk in self.as_yield():
+ yield chunk
+
+ class Ranges(Test):
+
+ def get_ranges(self, bytes):
+ return repr(httputil.get_ranges('bytes=%s' % bytes, 8))
+
+ def slice_file(self):
+ path = os.path.join(os.getcwd(), os.path.dirname(__file__))
+ return static.serve_file(
+ os.path.join(path, 'static/index.html'))
+
+ class Cookies(Test):
+
+ def single(self, name):
+ cookie = cherrypy.request.cookie[name]
+ # Python2's SimpleCookie.__setitem__ won't take unicode keys.
+ cherrypy.response.cookie[str(name)] = cookie.value
+
+ def multiple(self, names):
+ list(map(self.single, names))
+
+ def append_headers(header_list, debug=False):
+ if debug:
+ cherrypy.log(
+ 'Extending response headers with %s' % repr(header_list),
+ 'TOOLS.APPEND_HEADERS')
+ cherrypy.serving.response.header_list.extend(header_list)
+ cherrypy.tools.append_headers = cherrypy.Tool(
+ 'on_end_resource', append_headers)
+
+ class MultiHeader(Test):
+
+ def header_list(self):
+ pass
+ header_list = cherrypy.tools.append_headers(header_list=[
+ (b'WWW-Authenticate', b'Negotiate'),
+ (b'WWW-Authenticate', b'Basic realm="foo"'),
+ ])(header_list)
+
+ def commas(self):
+ cherrypy.response.headers[
+ 'WWW-Authenticate'] = 'Negotiate,Basic realm="foo"'
+
+ cherrypy.tree.mount(root)
+
+ def testStatus(self):
+ self.getPage('/status/')
+ self.assertBody('normal')
+ self.assertStatus(200)
+
+ self.getPage('/status/blank')
+ self.assertBody('')
+ self.assertStatus(200)
+
+ self.getPage('/status/illegal')
+ self.assertStatus(500)
+ msg = 'Illegal response status from server (781 is out of range).'
+ self.assertErrorPage(500, msg)
+
+ if not getattr(cherrypy.server, 'using_apache', False):
+ self.getPage('/status/unknown')
+ self.assertBody('funky')
+ self.assertStatus(431)
+
+ self.getPage('/status/bad')
+ self.assertStatus(500)
+ msg = "Illegal response status from server ('error' is non-numeric)."
+ self.assertErrorPage(500, msg)
+
+ def test_on_end_resource_status(self):
+ self.getPage('/status/on_end_resource_stage')
+ self.assertBody('[]')
+ self.getPage('/status/on_end_resource_stage')
+ self.assertBody(repr(['200 OK']))
+
+ def testSlashes(self):
+ # Test that requests for index methods without a trailing slash
+ # get redirected to the same URI path with a trailing slash.
+ # Make sure GET params are preserved.
+ self.getPage('/redirect?id=3')
+ self.assertStatus(301)
+ self.assertMatchesBody(
+ '<a href=([\'"])%s/redirect/[?]id=3\\1>'
+ '%s/redirect/[?]id=3</a>' % (self.base(), self.base())
+ )
+
+ if self.prefix():
+ # Corner case: the "trailing slash" redirect could be tricky if
+ # we're using a virtual root and the URI is "/vroot" (no slash).
+ self.getPage('')
+ self.assertStatus(301)
+ self.assertMatchesBody("<a href=(['\"])%s/\\1>%s/</a>" %
+ (self.base(), self.base()))
+
+ # Test that requests for NON-index methods WITH a trailing slash
+ # get redirected to the same URI path WITHOUT a trailing slash.
+ # Make sure GET params are preserved.
+ self.getPage('/redirect/by_code/?code=307')
+ self.assertStatus(301)
+ self.assertMatchesBody(
+ "<a href=(['\"])%s/redirect/by_code[?]code=307\\1>"
+ '%s/redirect/by_code[?]code=307</a>'
+ % (self.base(), self.base())
+ )
+
+ # If the trailing_slash tool is off, CP should just continue
+ # as if the slashes were correct. But it needs some help
+ # inside cherrypy.url to form correct output.
+ self.getPage('/url?path_info=page1')
+ self.assertBody('%s/url/page1' % self.base())
+ self.getPage('/url/leaf/?path_info=page1')
+ self.assertBody('%s/url/page1' % self.base())
+
+ def testRedirect(self):
+ self.getPage('/redirect/')
+ self.assertBody('child')
+ self.assertStatus(200)
+
+ self.getPage('/redirect/by_code?code=300')
+ self.assertMatchesBody(
+ r"<a href=(['\"])(.*)somewhere%20else\1>\2somewhere%20else</a>")
+ self.assertStatus(300)
+
+ self.getPage('/redirect/by_code?code=301')
+ self.assertMatchesBody(
+ r"<a href=(['\"])(.*)somewhere%20else\1>\2somewhere%20else</a>")
+ self.assertStatus(301)
+
+ self.getPage('/redirect/by_code?code=302')
+ self.assertMatchesBody(
+ r"<a href=(['\"])(.*)somewhere%20else\1>\2somewhere%20else</a>")
+ self.assertStatus(302)
+
+ self.getPage('/redirect/by_code?code=303')
+ self.assertMatchesBody(
+ r"<a href=(['\"])(.*)somewhere%20else\1>\2somewhere%20else</a>")
+ self.assertStatus(303)
+
+ self.getPage('/redirect/by_code?code=307')
+ self.assertMatchesBody(
+ r"<a href=(['\"])(.*)somewhere%20else\1>\2somewhere%20else</a>")
+ self.assertStatus(307)
+
+ self.getPage('/redirect/by_code?code=308')
+ self.assertMatchesBody(
+ r"<a href=(['\"])(.*)somewhere%20else\1>\2somewhere%20else</a>")
+ self.assertStatus(308)
+
+ self.getPage('/redirect/nomodify')
+ self.assertBody('')
+ self.assertStatus(304)
+
+ self.getPage('/redirect/proxy')
+ self.assertBody('')
+ self.assertStatus(305)
+
+ # HTTPRedirect on error
+ self.getPage('/redirect/error/')
+ self.assertStatus(('302 Found', '303 See Other'))
+ self.assertInBody('/errpage')
+
+ # Make sure str(HTTPRedirect()) works.
+ self.getPage('/redirect/stringify', protocol='HTTP/1.0')
+ self.assertStatus(200)
+ self.assertBody("(['%s/'], 302)" % self.base())
+ if cherrypy.server.protocol_version == 'HTTP/1.1':
+ self.getPage('/redirect/stringify', protocol='HTTP/1.1')
+ self.assertStatus(200)
+ self.assertBody("(['%s/'], 303)" % self.base())
+
+ # check that #fragments are handled properly
+ # http://skrb.org/ietf/http_errata.html#location-fragments
+ frag = 'foo'
+ self.getPage('/redirect/fragment/%s' % frag)
+ self.assertMatchesBody(
+ r"<a href=(['\"])(.*)\/some\/url\#%s\1>\2\/some\/url\#%s</a>" % (
+ frag, frag))
+ loc = self.assertHeader('Location')
+ assert loc.endswith('#%s' % frag)
+ self.assertStatus(('302 Found', '303 See Other'))
+
+ # check injection protection
+ # See https://github.com/cherrypy/cherrypy/issues/1003
+ self.getPage(
+ '/redirect/custom?'
+ 'code=303&url=/foobar/%0d%0aSet-Cookie:%20somecookie=someval')
+ self.assertStatus(303)
+ loc = self.assertHeader('Location')
+ assert 'Set-Cookie' in loc
+ self.assertNoHeader('Set-Cookie')
+
+ def assertValidXHTML():
+ from xml.etree import ElementTree
+ try:
+ ElementTree.fromstring(
+ '<html><body>%s</body></html>' % self.body,
+ )
+ except ElementTree.ParseError:
+ self._handlewebError(
+ 'automatically generated redirect did not '
+ 'generate well-formed html',
+ )
+
+ # check redirects to URLs generated valid HTML - we check this
+ # by seeing if it appears as valid XHTML.
+ self.getPage('/redirect/by_code?code=303')
+ self.assertStatus(303)
+ assertValidXHTML()
+
+ # do the same with a url containing quote characters.
+ self.getPage('/redirect/url_with_quote')
+ self.assertStatus(303)
+ assertValidXHTML()
+
+ def test_redirect_with_xss(self):
+ """A redirect to a URL with HTML injected should result
+ in page contents escaped."""
+ self.getPage('/redirect/url_with_xss')
+ self.assertStatus(303)
+ assert b'<script>' not in self.body
+ assert b'<script>' in self.body
+
+ def test_redirect_with_unicode(self):
+ """
+ A redirect to a URL with Unicode should return a Location
+ header containing that Unicode URL.
+ """
+ # test disabled due to #1440
+ return
+ self.getPage('/redirect/url_with_unicode')
+ self.assertStatus(303)
+ loc = self.assertHeader('Location')
+ assert ntou('тест', encoding='utf-8') in loc
+
+ def test_InternalRedirect(self):
+ # InternalRedirect
+ self.getPage('/internalredirect/')
+ self.assertBody('hello')
+ self.assertStatus(200)
+
+ # Test passthrough
+ self.getPage(
+ '/internalredirect/petshop?user_id=Sir-not-appearing-in-this-film')
+ self.assertBody('0 images for Sir-not-appearing-in-this-film')
+ self.assertStatus(200)
+
+ # Test args
+ self.getPage('/internalredirect/petshop?user_id=parrot')
+ self.assertBody('0 images for slug')
+ self.assertStatus(200)
+
+ # Test POST
+ self.getPage('/internalredirect/petshop', method='POST',
+ body='user_id=terrier')
+ self.assertBody('0 images for fish')
+ self.assertStatus(200)
+
+ # Test ir before body read
+ self.getPage('/internalredirect/early_ir', method='POST',
+ body='arg=aha!')
+ self.assertBody('Something went horribly wrong.')
+ self.assertStatus(200)
+
+ self.getPage('/internalredirect/secure')
+ self.assertBody('Please log in')
+ self.assertStatus(200)
+
+ # Relative path in InternalRedirect.
+ # Also tests request.prev.
+ self.getPage('/internalredirect/relative?a=3&b=5')
+ self.assertBody('a=3&b=5')
+ self.assertStatus(200)
+
+ # InternalRedirect on error
+ self.getPage('/internalredirect/choke')
+ self.assertStatus(200)
+ self.assertBody('Something went horribly wrong.')
+
+ def testFlatten(self):
+ for url in ['/flatten/as_string', '/flatten/as_list',
+ '/flatten/as_yield', '/flatten/as_dblyield',
+ '/flatten/as_refyield']:
+ self.getPage(url)
+ self.assertBody('content')
+
+ def testRanges(self):
+ self.getPage('/ranges/get_ranges?bytes=3-6')
+ self.assertBody('[(3, 7)]')
+
+ # Test multiple ranges and a suffix-byte-range-spec, for good measure.
+ self.getPage('/ranges/get_ranges?bytes=2-4,-1')
+ self.assertBody('[(2, 5), (7, 8)]')
+
+ # Test a suffix-byte-range longer than the content
+ # length. Note that in this test, the content length
+ # is 8 bytes.
+ self.getPage('/ranges/get_ranges?bytes=-100')
+ self.assertBody('[(0, 8)]')
+
+ # Get a partial file.
+ if cherrypy.server.protocol_version == 'HTTP/1.1':
+ self.getPage('/ranges/slice_file', [('Range', 'bytes=2-5')])
+ self.assertStatus(206)
+ self.assertHeader('Content-Type', 'text/html;charset=utf-8')
+ self.assertHeader('Content-Range', 'bytes 2-5/14')
+ self.assertBody('llo,')
+
+ # What happens with overlapping ranges (and out of order, too)?
+ self.getPage('/ranges/slice_file', [('Range', 'bytes=4-6,2-5')])
+ self.assertStatus(206)
+ ct = self.assertHeader('Content-Type')
+ expected_type = 'multipart/byteranges; boundary='
+ assert ct.startswith(expected_type)
+ boundary = ct[len(expected_type):]
+ expected_body = ('\r\n--%s\r\n'
+ 'Content-type: text/html\r\n'
+ 'Content-range: bytes 4-6/14\r\n'
+ '\r\n'
+ 'o, \r\n'
+ '--%s\r\n'
+ 'Content-type: text/html\r\n'
+ 'Content-range: bytes 2-5/14\r\n'
+ '\r\n'
+ 'llo,\r\n'
+ '--%s--\r\n' % (boundary, boundary, boundary))
+ self.assertBody(expected_body)
+ self.assertHeader('Content-Length')
+
+ # Test "416 Requested Range Not Satisfiable"
+ self.getPage('/ranges/slice_file', [('Range', 'bytes=2300-2900')])
+ self.assertStatus(416)
+ # "When this status code is returned for a byte-range request,
+ # the response SHOULD include a Content-Range entity-header
+ # field specifying the current length of the selected resource"
+ self.assertHeader('Content-Range', 'bytes */14')
+ elif cherrypy.server.protocol_version == 'HTTP/1.0':
+ # Test Range behavior with HTTP/1.0 request
+ self.getPage('/ranges/slice_file', [('Range', 'bytes=2-5')])
+ self.assertStatus(200)
+ self.assertBody('Hello, world\r\n')
+
+ def testFavicon(self):
+ # favicon.ico is served by staticfile.
+ icofilename = os.path.join(localDir, '../favicon.ico')
+ icofile = open(icofilename, 'rb')
+ data = icofile.read()
+ icofile.close()
+
+ self.getPage('/favicon.ico')
+ self.assertBody(data)
+
+ def skip_if_bad_cookies(self):
+ """
+ cookies module fails to reject invalid cookies
+ https://github.com/cherrypy/cherrypy/issues/1405
+ """
+ cookies = sys.modules.get('http.cookies')
+ _is_legal_key = getattr(cookies, '_is_legal_key', lambda x: False)
+ if not _is_legal_key(','):
+ return
+ issue = 'http://bugs.python.org/issue26302'
+ tmpl = 'Broken cookies module ({issue})'
+ self.skip(tmpl.format(**locals()))
+
+ def testCookies(self):
+ self.skip_if_bad_cookies()
+
+ self.getPage('/cookies/single?name=First',
+ [('Cookie', 'First=Dinsdale;')])
+ self.assertHeader('Set-Cookie', 'First=Dinsdale')
+
+ self.getPage('/cookies/multiple?names=First&names=Last',
+ [('Cookie', 'First=Dinsdale; Last=Piranha;'),
+ ])
+ self.assertHeader('Set-Cookie', 'First=Dinsdale')
+ self.assertHeader('Set-Cookie', 'Last=Piranha')
+
+ self.getPage('/cookies/single?name=Something-With%2CComma',
+ [('Cookie', 'Something-With,Comma=some-value')])
+ self.assertStatus(400)
+
+ def testDefaultContentType(self):
+ self.getPage('/')
+ self.assertHeader('Content-Type', 'text/html;charset=utf-8')
+ self.getPage('/defct/plain')
+ self.getPage('/')
+ self.assertHeader('Content-Type', 'text/plain;charset=utf-8')
+ self.getPage('/defct/html')
+
+ def test_multiple_headers(self):
+ self.getPage('/multiheader/header_list')
+ self.assertEqual(
+ [(k, v) for k, v in self.headers if k == 'WWW-Authenticate'],
+ [('WWW-Authenticate', 'Negotiate'),
+ ('WWW-Authenticate', 'Basic realm="foo"'),
+ ])
+ self.getPage('/multiheader/commas')
+ self.assertHeader('WWW-Authenticate', 'Negotiate,Basic realm="foo"')
+
+ def test_cherrypy_url(self):
+ # Input relative to current
+ self.getPage('/url/leaf?path_info=page1')
+ self.assertBody('%s/url/page1' % self.base())
+ self.getPage('/url/?path_info=page1')
+ self.assertBody('%s/url/page1' % self.base())
+ # Other host header
+ host = 'www.mydomain.example'
+ self.getPage('/url/leaf?path_info=page1',
+ headers=[('Host', host)])
+ self.assertBody('%s://%s/url/page1' % (self.scheme, host))
+
+ # Input is 'absolute'; that is, relative to script_name
+ self.getPage('/url/leaf?path_info=/page1')
+ self.assertBody('%s/page1' % self.base())
+ self.getPage('/url/?path_info=/page1')
+ self.assertBody('%s/page1' % self.base())
+
+ # Single dots
+ self.getPage('/url/leaf?path_info=./page1')
+ self.assertBody('%s/url/page1' % self.base())
+ self.getPage('/url/leaf?path_info=other/./page1')
+ self.assertBody('%s/url/other/page1' % self.base())
+ self.getPage('/url/?path_info=/other/./page1')
+ self.assertBody('%s/other/page1' % self.base())
+ self.getPage('/url/?path_info=/other/././././page1')
+ self.assertBody('%s/other/page1' % self.base())
+
+ # Double dots
+ self.getPage('/url/leaf?path_info=../page1')
+ self.assertBody('%s/page1' % self.base())
+ self.getPage('/url/leaf?path_info=other/../page1')
+ self.assertBody('%s/url/page1' % self.base())
+ self.getPage('/url/leaf?path_info=/other/../page1')
+ self.assertBody('%s/page1' % self.base())
+ self.getPage('/url/leaf?path_info=/other/../../../page1')
+ self.assertBody('%s/page1' % self.base())
+ self.getPage('/url/leaf?path_info=/other/../../../../../page1')
+ self.assertBody('%s/page1' % self.base())
+
+ # qs param is not normalized as a path
+ self.getPage('/url/qs?qs=/other')
+ self.assertBody('%s/url/qs?/other' % self.base())
+ self.getPage('/url/qs?qs=/other/../page1')
+ self.assertBody('%s/url/qs?/other/../page1' % self.base())
+ self.getPage('/url/qs?qs=../page1')
+ self.assertBody('%s/url/qs?../page1' % self.base())
+ self.getPage('/url/qs?qs=../../page1')
+ self.assertBody('%s/url/qs?../../page1' % self.base())
+
+ # Output relative to current path or script_name
+ self.getPage('/url/?path_info=page1&relative=True')
+ self.assertBody('page1')
+ self.getPage('/url/leaf?path_info=/page1&relative=True')
+ self.assertBody('../page1')
+ self.getPage('/url/leaf?path_info=page1&relative=True')
+ self.assertBody('page1')
+ self.getPage('/url/leaf?path_info=leaf/page1&relative=True')
+ self.assertBody('leaf/page1')
+ self.getPage('/url/leaf?path_info=../page1&relative=True')
+ self.assertBody('../page1')
+ self.getPage('/url/?path_info=other/../page1&relative=True')
+ self.assertBody('page1')
+
+ # Output relative to /
+ self.getPage('/baseurl?path_info=ab&relative=True')
+ self.assertBody('ab')
+ # Output relative to /
+ self.getPage('/baseurl?path_info=/ab&relative=True')
+ self.assertBody('ab')
+
+ # absolute-path references ("server-relative")
+ # Input relative to current
+ self.getPage('/url/leaf?path_info=page1&relative=server')
+ self.assertBody('/url/page1')
+ self.getPage('/url/?path_info=page1&relative=server')
+ self.assertBody('/url/page1')
+ # Input is 'absolute'; that is, relative to script_name
+ self.getPage('/url/leaf?path_info=/page1&relative=server')
+ self.assertBody('/page1')
+ self.getPage('/url/?path_info=/page1&relative=server')
+ self.assertBody('/page1')
+
+ def test_expose_decorator(self):
+ # Test @expose
+ self.getPage('/expose_dec/no_call')
+ self.assertStatus(200)
+ self.assertBody('Mr E. R. Bradshaw')
+
+ # Test @expose()
+ self.getPage('/expose_dec/call_empty')
+ self.assertStatus(200)
+ self.assertBody('Mrs. B.J. Smegma')
+
+ # Test @expose("alias")
+ self.getPage('/expose_dec/call_alias')
+ self.assertStatus(200)
+ self.assertBody('Mr Nesbitt')
+ # Does the original name work?
+ self.getPage('/expose_dec/nesbitt')
+ self.assertStatus(200)
+ self.assertBody('Mr Nesbitt')
+
+ # Test @expose(["alias1", "alias2"])
+ self.getPage('/expose_dec/alias1')
+ self.assertStatus(200)
+ self.assertBody('Mr Ken Andrews')
+ self.getPage('/expose_dec/alias2')
+ self.assertStatus(200)
+ self.assertBody('Mr Ken Andrews')
+ # Does the original name work?
+ self.getPage('/expose_dec/andrews')
+ self.assertStatus(200)
+ self.assertBody('Mr Ken Andrews')
+
+ # Test @expose(alias="alias")
+ self.getPage('/expose_dec/alias3')
+ self.assertStatus(200)
+ self.assertBody('Mr. and Mrs. Watson')
+
+
+class ErrorTests(helper.CPWebCase):
+
+ @staticmethod
+ def setup_server():
+ def break_header():
+ # Add a header after finalize that is invalid
+ cherrypy.serving.response.header_list.append((2, 3))
+ cherrypy.tools.break_header = cherrypy.Tool(
+ 'on_end_resource', break_header)
+
+ class Root:
+
+ @cherrypy.expose
+ def index(self):
+ return 'hello'
+
+ @cherrypy.config(**{'tools.break_header.on': True})
+ def start_response_error(self):
+ return 'salud!'
+
+ @cherrypy.expose
+ def stat(self, path):
+ with cherrypy.HTTPError.handle(OSError, 404):
+ os.stat(path)
+
+ root = Root()
+
+ cherrypy.tree.mount(root)
+
+ def test_start_response_error(self):
+ self.getPage('/start_response_error')
+ self.assertStatus(500)
+ self.assertInBody(
+ 'TypeError: response.header_list key 2 is not a byte string.')
+
+ def test_contextmanager(self):
+ self.getPage('/stat/missing')
+ self.assertStatus(404)
+ body_text = self.body.decode('utf-8')
+ assert (
+ 'No such file or directory' in body_text or
+ 'cannot find the file specified' in body_text
+ )
+
+
+class TestBinding:
+ def test_bind_ephemeral_port(self):
+ """
+ A server configured to bind to port 0 will bind to an ephemeral
+ port and indicate that port number on startup.
+ """
+ cherrypy.config.reset()
+ bind_ephemeral_conf = {
+ 'server.socket_port': 0,
+ }
+ cherrypy.config.update(bind_ephemeral_conf)
+ cherrypy.engine.start()
+ assert cherrypy.server.bound_addr != cherrypy.server.bind_addr
+ _host, port = cherrypy.server.bound_addr
+ assert port > 0
+ cherrypy.engine.stop()
+ assert cherrypy.server.bind_addr == cherrypy.server.bound_addr
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_dynamicobjectmapping.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_dynamicobjectmapping.py
new file mode 100644
index 0000000000000000000000000000000000000000..aaa89ca7f1ff3eb17d9374097fa8dfc51b1c5a40
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_dynamicobjectmapping.py
@@ -0,0 +1,422 @@
+import cherrypy
+from cherrypy.test import helper
+
+script_names = ['', '/foo', '/users/fred/blog', '/corp/blog']
+
+
+def setup_server():
+ class SubSubRoot:
+
+ @cherrypy.expose
+ def index(self):
+ return 'SubSubRoot index'
+
+ @cherrypy.expose
+ def default(self, *args):
+ return 'SubSubRoot default'
+
+ @cherrypy.expose
+ def handler(self):
+ return 'SubSubRoot handler'
+
+ @cherrypy.expose
+ def dispatch(self):
+ return 'SubSubRoot dispatch'
+
+ subsubnodes = {
+ '1': SubSubRoot(),
+ '2': SubSubRoot(),
+ }
+
+ class SubRoot:
+
+ @cherrypy.expose
+ def index(self):
+ return 'SubRoot index'
+
+ @cherrypy.expose
+ def default(self, *args):
+ return 'SubRoot %s' % (args,)
+
+ @cherrypy.expose
+ def handler(self):
+ return 'SubRoot handler'
+
+ def _cp_dispatch(self, vpath):
+ return subsubnodes.get(vpath[0], None)
+
+ subnodes = {
+ '1': SubRoot(),
+ '2': SubRoot(),
+ }
+
+ class Root:
+
+ @cherrypy.expose
+ def index(self):
+ return 'index'
+
+ @cherrypy.expose
+ def default(self, *args):
+ return 'default %s' % (args,)
+
+ @cherrypy.expose
+ def handler(self):
+ return 'handler'
+
+ def _cp_dispatch(self, vpath):
+ return subnodes.get(vpath[0])
+
+ # -------------------------------------------------------------------------
+ # DynamicNodeAndMethodDispatcher example.
+ # This example exposes a fairly naive HTTP api
+ class User(object):
+
+ def __init__(self, id, name):
+ self.id = id
+ self.name = name
+
+ def __unicode__(self):
+ return str(self.name)
+
+ def __str__(self):
+ return str(self.name)
+
+ user_lookup = {
+ 1: User(1, 'foo'),
+ 2: User(2, 'bar'),
+ }
+
+ def make_user(name, id=None):
+ if not id:
+ id = max(*list(user_lookup.keys())) + 1
+ user_lookup[id] = User(id, name)
+ return id
+
+ @cherrypy.expose
+ class UserContainerNode(object):
+
+ def POST(self, name):
+ """
+ Allow the creation of a new Object
+ """
+ return 'POST %d' % make_user(name)
+
+ def GET(self):
+ return str(sorted(user_lookup.keys()))
+
+ def dynamic_dispatch(self, vpath):
+ try:
+ id = int(vpath[0])
+ except (ValueError, IndexError):
+ return None
+ return UserInstanceNode(id)
+
+ @cherrypy.expose
+ class UserInstanceNode(object):
+
+ def __init__(self, id):
+ self.id = id
+ self.user = user_lookup.get(id, None)
+
+ # For all but PUT methods there MUST be a valid user identified
+ # by self.id
+ if not self.user and cherrypy.request.method != 'PUT':
+ raise cherrypy.HTTPError(404)
+
+ def GET(self, *args, **kwargs):
+ """
+ Return the appropriate representation of the instance.
+ """
+ return str(self.user)
+
+ def POST(self, name):
+ """
+ Update the fields of the user instance.
+ """
+ self.user.name = name
+ return 'POST %d' % self.user.id
+
+ def PUT(self, name):
+ """
+ Create a new user with the specified id, or edit it if it already
+ exists
+ """
+ if self.user:
+ # Edit the current user
+ self.user.name = name
+ return 'PUT %d' % self.user.id
+ else:
+ # Make a new user with said attributes.
+ return 'PUT %d' % make_user(name, self.id)
+
+ def DELETE(self):
+ """
+ Delete the user specified at the id.
+ """
+ id = self.user.id
+ del user_lookup[self.user.id]
+ del self.user
+ return 'DELETE %d' % id
+
+ class ABHandler:
+
+ class CustomDispatch:
+
+ @cherrypy.expose
+ def index(self, a, b):
+ return 'custom'
+
+ def _cp_dispatch(self, vpath):
+ """Make sure that if we don't pop anything from vpath,
+ processing still works.
+ """
+ return self.CustomDispatch()
+
+ @cherrypy.expose
+ def index(self, a, b=None):
+ body = ['a:' + str(a)]
+ if b is not None:
+ body.append(',b:' + str(b))
+ return ''.join(body)
+
+ @cherrypy.expose
+ def delete(self, a, b):
+ return 'deleting ' + str(a) + ' and ' + str(b)
+
+ class IndexOnly:
+
+ def _cp_dispatch(self, vpath):
+ """Make sure that popping ALL of vpath still shows the index
+ handler.
+ """
+ while vpath:
+ vpath.pop()
+ return self
+
+ @cherrypy.expose
+ def index(self):
+ return 'IndexOnly index'
+
+ class DecoratedPopArgs:
+
+ """Test _cp_dispatch with @cherrypy.popargs."""
+
+ @cherrypy.expose
+ def index(self):
+ return 'no params'
+
+ @cherrypy.expose
+ def hi(self):
+ return "hi was not interpreted as 'a' param"
+ DecoratedPopArgs = cherrypy.popargs(
+ 'a', 'b', handler=ABHandler())(DecoratedPopArgs)
+
+ class NonDecoratedPopArgs:
+
+ """Test _cp_dispatch = cherrypy.popargs()"""
+
+ _cp_dispatch = cherrypy.popargs('a')
+
+ @cherrypy.expose
+ def index(self, a):
+ return 'index: ' + str(a)
+
+ class ParameterizedHandler:
+
+ """Special handler created for each request"""
+
+ def __init__(self, a):
+ self.a = a
+
+ @cherrypy.expose
+ def index(self):
+ if 'a' in cherrypy.request.params:
+ raise Exception(
+ 'Parameterized handler argument ended up in '
+ 'request.params')
+ return self.a
+
+ class ParameterizedPopArgs:
+
+ """Test cherrypy.popargs() with a function call handler"""
+ ParameterizedPopArgs = cherrypy.popargs(
+ 'a', handler=ParameterizedHandler)(ParameterizedPopArgs)
+
+ Root.decorated = DecoratedPopArgs()
+ Root.undecorated = NonDecoratedPopArgs()
+ Root.index_only = IndexOnly()
+ Root.parameter_test = ParameterizedPopArgs()
+
+ Root.users = UserContainerNode()
+
+ md = cherrypy.dispatch.MethodDispatcher('dynamic_dispatch')
+ for url in script_names:
+ conf = {
+ '/': {
+ 'user': (url or '/').split('/')[-2],
+ },
+ '/users': {
+ 'request.dispatch': md
+ },
+ }
+ cherrypy.tree.mount(Root(), url, conf)
+
+
+class DynamicObjectMappingTest(helper.CPWebCase):
+ setup_server = staticmethod(setup_server)
+
+ def testObjectMapping(self):
+ for url in script_names:
+ self.script_name = url
+
+ self.getPage('/')
+ self.assertBody('index')
+
+ self.getPage('/handler')
+ self.assertBody('handler')
+
+ # Dynamic dispatch will succeed here for the subnodes
+ # so the subroot gets called
+ self.getPage('/1/')
+ self.assertBody('SubRoot index')
+
+ self.getPage('/2/')
+ self.assertBody('SubRoot index')
+
+ self.getPage('/1/handler')
+ self.assertBody('SubRoot handler')
+
+ self.getPage('/2/handler')
+ self.assertBody('SubRoot handler')
+
+ # Dynamic dispatch will fail here for the subnodes
+ # so the default gets called
+ self.getPage('/asdf/')
+ self.assertBody("default ('asdf',)")
+
+ self.getPage('/asdf/asdf')
+ self.assertBody("default ('asdf', 'asdf')")
+
+ self.getPage('/asdf/handler')
+ self.assertBody("default ('asdf', 'handler')")
+
+ # Dynamic dispatch will succeed here for the subsubnodes
+ # so the subsubroot gets called
+ self.getPage('/1/1/')
+ self.assertBody('SubSubRoot index')
+
+ self.getPage('/2/2/')
+ self.assertBody('SubSubRoot index')
+
+ self.getPage('/1/1/handler')
+ self.assertBody('SubSubRoot handler')
+
+ self.getPage('/2/2/handler')
+ self.assertBody('SubSubRoot handler')
+
+ self.getPage('/2/2/dispatch')
+ self.assertBody('SubSubRoot dispatch')
+
+ # The exposed dispatch will not be called as a dispatch
+ # method.
+ self.getPage('/2/2/foo/foo')
+ self.assertBody('SubSubRoot default')
+
+ # Dynamic dispatch will fail here for the subsubnodes
+ # so the SubRoot gets called
+ self.getPage('/1/asdf/')
+ self.assertBody("SubRoot ('asdf',)")
+
+ self.getPage('/1/asdf/asdf')
+ self.assertBody("SubRoot ('asdf', 'asdf')")
+
+ self.getPage('/1/asdf/handler')
+ self.assertBody("SubRoot ('asdf', 'handler')")
+
+ def testMethodDispatch(self):
+ # GET acts like a container
+ self.getPage('/users')
+ self.assertBody('[1, 2]')
+ self.assertHeader('Allow', 'GET, HEAD, POST')
+
+ # POST to the container URI allows creation
+ self.getPage('/users', method='POST', body='name=baz')
+ self.assertBody('POST 3')
+ self.assertHeader('Allow', 'GET, HEAD, POST')
+
+ # POST to a specific instanct URI results in a 404
+ # as the resource does not exit.
+ self.getPage('/users/5', method='POST', body='name=baz')
+ self.assertStatus(404)
+
+ # PUT to a specific instanct URI results in creation
+ self.getPage('/users/5', method='PUT', body='name=boris')
+ self.assertBody('PUT 5')
+ self.assertHeader('Allow', 'DELETE, GET, HEAD, POST, PUT')
+
+ # GET acts like a container
+ self.getPage('/users')
+ self.assertBody('[1, 2, 3, 5]')
+ self.assertHeader('Allow', 'GET, HEAD, POST')
+
+ test_cases = (
+ (1, 'foo', 'fooupdated', 'DELETE, GET, HEAD, POST, PUT'),
+ (2, 'bar', 'barupdated', 'DELETE, GET, HEAD, POST, PUT'),
+ (3, 'baz', 'bazupdated', 'DELETE, GET, HEAD, POST, PUT'),
+ (5, 'boris', 'borisupdated', 'DELETE, GET, HEAD, POST, PUT'),
+ )
+ for id, name, updatedname, headers in test_cases:
+ self.getPage('/users/%d' % id)
+ self.assertBody(name)
+ self.assertHeader('Allow', headers)
+
+ # Make sure POSTs update already existings resources
+ self.getPage('/users/%d' %
+ id, method='POST', body='name=%s' % updatedname)
+ self.assertBody('POST %d' % id)
+ self.assertHeader('Allow', headers)
+
+ # Make sure PUTs Update already existing resources.
+ self.getPage('/users/%d' %
+ id, method='PUT', body='name=%s' % updatedname)
+ self.assertBody('PUT %d' % id)
+ self.assertHeader('Allow', headers)
+
+ # Make sure DELETES Remove already existing resources.
+ self.getPage('/users/%d' % id, method='DELETE')
+ self.assertBody('DELETE %d' % id)
+ self.assertHeader('Allow', headers)
+
+ # GET acts like a container
+ self.getPage('/users')
+ self.assertBody('[]')
+ self.assertHeader('Allow', 'GET, HEAD, POST')
+
+ def testVpathDispatch(self):
+ self.getPage('/decorated/')
+ self.assertBody('no params')
+
+ self.getPage('/decorated/hi')
+ self.assertBody("hi was not interpreted as 'a' param")
+
+ self.getPage('/decorated/yo/')
+ self.assertBody('a:yo')
+
+ self.getPage('/decorated/yo/there/')
+ self.assertBody('a:yo,b:there')
+
+ self.getPage('/decorated/yo/there/delete')
+ self.assertBody('deleting yo and there')
+
+ self.getPage('/decorated/yo/there/handled_by_dispatch/')
+ self.assertBody('custom')
+
+ self.getPage('/undecorated/blah/')
+ self.assertBody('index: blah')
+
+ self.getPage('/index_only/a/b/c/d/e/f/g/')
+ self.assertBody('IndexOnly index')
+
+ self.getPage('/parameter_test/argument2/')
+ self.assertBody('argument2')
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_encoding.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_encoding.py
new file mode 100644
index 0000000000000000000000000000000000000000..882d7a5b8c21ae422f595ec13e0d479e4ea9a6c9
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_encoding.py
@@ -0,0 +1,432 @@
+# coding: utf-8
+
+import gzip
+import io
+from unittest import mock
+from http.client import IncompleteRead
+from urllib.parse import quote as url_quote
+
+import cherrypy
+from cherrypy._cpcompat import ntob, ntou
+
+from cherrypy.test import helper
+
+
+europoundUnicode = ntou('£', encoding='utf-8')
+sing = ntou('毛泽东: Sing, Little Birdie?', encoding='utf-8')
+
+sing8 = sing.encode('utf-8')
+sing16 = sing.encode('utf-16')
+
+
+class EncodingTests(helper.CPWebCase):
+
+ @staticmethod
+ def setup_server():
+ class Root:
+
+ @cherrypy.expose
+ def index(self, param):
+ assert param == europoundUnicode, '%r != %r' % (
+ param, europoundUnicode)
+ yield europoundUnicode
+
+ @cherrypy.expose
+ def mao_zedong(self):
+ return sing
+
+ @cherrypy.expose
+ @cherrypy.config(**{'tools.encode.encoding': 'utf-8'})
+ def utf8(self):
+ return sing8
+
+ @cherrypy.expose
+ def cookies_and_headers(self):
+ # if the headers have non-ascii characters and a cookie has
+ # any part which is unicode (even ascii), the response
+ # should not fail.
+ cherrypy.response.cookie['candy'] = 'bar'
+ cherrypy.response.cookie['candy']['domain'] = 'cherrypy.org'
+ cherrypy.response.headers[
+ 'Some-Header'] = 'My d\xc3\xb6g has fleas'
+ cherrypy.response.headers[
+ 'Bytes-Header'] = b'Bytes given header'
+ return 'Any content'
+
+ @cherrypy.expose
+ def reqparams(self, *args, **kwargs):
+ return b', '.join(
+ [': '.join((k, v)).encode('utf8')
+ for k, v in sorted(cherrypy.request.params.items())]
+ )
+
+ @cherrypy.expose
+ @cherrypy.config(**{
+ 'tools.encode.text_only': False,
+ 'tools.encode.add_charset': True,
+ })
+ def nontext(self, *args, **kwargs):
+ cherrypy.response.headers[
+ 'Content-Type'] = 'application/binary'
+ return '\x00\x01\x02\x03'
+
+ class GZIP:
+
+ @cherrypy.expose
+ def index(self):
+ yield 'Hello, world'
+
+ @cherrypy.expose
+ # Turn encoding off so the gzip tool is the one doing the collapse.
+ @cherrypy.config(**{'tools.encode.on': False})
+ def noshow(self):
+ # Test for ticket #147, where yield showed no exceptions
+ # (content-encoding was still gzip even though traceback
+ # wasn't zipped).
+ raise IndexError()
+ yield 'Here be dragons'
+
+ @cherrypy.expose
+ @cherrypy.config(**{'response.stream': True})
+ def noshow_stream(self):
+ # Test for ticket #147, where yield showed no exceptions
+ # (content-encoding was still gzip even though traceback
+ # wasn't zipped).
+ raise IndexError()
+ yield 'Here be dragons'
+
+ class Decode:
+
+ @cherrypy.expose
+ @cherrypy.config(**{
+ 'tools.decode.on': True,
+ 'tools.decode.default_encoding': ['utf-16'],
+ })
+ def extra_charset(self, *args, **kwargs):
+ return ', '.join([': '.join((k, v))
+ for k, v in cherrypy.request.params.items()])
+
+ @cherrypy.expose
+ @cherrypy.config(**{
+ 'tools.decode.on': True,
+ 'tools.decode.encoding': 'utf-16',
+ })
+ def force_charset(self, *args, **kwargs):
+ return ', '.join([': '.join((k, v))
+ for k, v in cherrypy.request.params.items()])
+
+ root = Root()
+ root.gzip = GZIP()
+ root.decode = Decode()
+ cherrypy.tree.mount(root, config={'/gzip': {'tools.gzip.on': True}})
+
+ def test_query_string_decoding(self):
+ URI_TMPL = '/reqparams?q={q}'
+
+ europoundUtf8_2_bytes = europoundUnicode.encode('utf-8')
+ europoundUtf8_2nd_byte = europoundUtf8_2_bytes[1:2]
+
+ # Encoded utf8 query strings MUST be parsed correctly.
+ # Here, q is the POUND SIGN U+00A3 encoded in utf8 and then %HEX
+ self.getPage(URI_TMPL.format(q=url_quote(europoundUtf8_2_bytes)))
+ # The return value will be encoded as utf8.
+ self.assertBody(b'q: ' + europoundUtf8_2_bytes)
+
+ # Query strings that are incorrectly encoded MUST raise 404.
+ # Here, q is the second byte of POUND SIGN U+A3 encoded in utf8
+ # and then %HEX
+ # TODO: check whether this shouldn't raise 400 Bad Request instead
+ self.getPage(URI_TMPL.format(q=url_quote(europoundUtf8_2nd_byte)))
+ self.assertStatus(404)
+ self.assertErrorPage(
+ 404,
+ 'The given query string could not be processed. Query '
+ "strings for this resource must be encoded with 'utf8'.")
+
+ def test_urlencoded_decoding(self):
+ # Test the decoding of an application/x-www-form-urlencoded entity.
+ europoundUtf8 = europoundUnicode.encode('utf-8')
+ body = b'param=' + europoundUtf8
+ self.getPage('/',
+ method='POST',
+ headers=[
+ ('Content-Type', 'application/x-www-form-urlencoded'),
+ ('Content-Length', str(len(body))),
+ ],
+ body=body),
+ self.assertBody(europoundUtf8)
+
+ # Encoded utf8 entities MUST be parsed and decoded correctly.
+ # Here, q is the POUND SIGN U+00A3 encoded in utf8
+ body = b'q=\xc2\xa3'
+ self.getPage('/reqparams', method='POST',
+ headers=[(
+ 'Content-Type', 'application/x-www-form-urlencoded'),
+ ('Content-Length', str(len(body))),
+ ],
+ body=body),
+ self.assertBody(b'q: \xc2\xa3')
+
+ # ...and in utf16, which is not in the default attempt_charsets list:
+ body = b'\xff\xfeq\x00=\xff\xfe\xa3\x00'
+ self.getPage('/reqparams',
+ method='POST',
+ headers=[
+ ('Content-Type',
+ 'application/x-www-form-urlencoded;charset=utf-16'),
+ ('Content-Length', str(len(body))),
+ ],
+ body=body),
+ self.assertBody(b'q: \xc2\xa3')
+
+ # Entities that are incorrectly encoded MUST raise 400.
+ # Here, q is the POUND SIGN U+00A3 encoded in utf16, but
+ # the Content-Type incorrectly labels it utf-8.
+ body = b'\xff\xfeq\x00=\xff\xfe\xa3\x00'
+ self.getPage('/reqparams',
+ method='POST',
+ headers=[
+ ('Content-Type',
+ 'application/x-www-form-urlencoded;charset=utf-8'),
+ ('Content-Length', str(len(body))),
+ ],
+ body=body),
+ self.assertStatus(400)
+ self.assertErrorPage(
+ 400,
+ 'The request entity could not be decoded. The following charsets '
+ "were attempted: ['utf-8']")
+
+ def test_decode_tool(self):
+ # An extra charset should be tried first, and succeed if it matches.
+ # Here, we add utf-16 as a charset and pass a utf-16 body.
+ body = b'\xff\xfeq\x00=\xff\xfe\xa3\x00'
+ self.getPage('/decode/extra_charset', method='POST',
+ headers=[(
+ 'Content-Type', 'application/x-www-form-urlencoded'),
+ ('Content-Length', str(len(body))),
+ ],
+ body=body),
+ self.assertBody(b'q: \xc2\xa3')
+
+ # An extra charset should be tried first, and continue to other default
+ # charsets if it doesn't match.
+ # Here, we add utf-16 as a charset but still pass a utf-8 body.
+ body = b'q=\xc2\xa3'
+ self.getPage('/decode/extra_charset', method='POST',
+ headers=[(
+ 'Content-Type', 'application/x-www-form-urlencoded'),
+ ('Content-Length', str(len(body))),
+ ],
+ body=body),
+ self.assertBody(b'q: \xc2\xa3')
+
+ # An extra charset should error if force is True and it doesn't match.
+ # Here, we force utf-16 as a charset but still pass a utf-8 body.
+ body = b'q=\xc2\xa3'
+ self.getPage('/decode/force_charset', method='POST',
+ headers=[(
+ 'Content-Type', 'application/x-www-form-urlencoded'),
+ ('Content-Length', str(len(body))),
+ ],
+ body=body),
+ self.assertErrorPage(
+ 400,
+ 'The request entity could not be decoded. The following charsets '
+ "were attempted: ['utf-16']")
+
+ def test_multipart_decoding(self):
+ # Test the decoding of a multipart entity when the charset (utf16) is
+ # explicitly given.
+ body = ntob('\r\n'.join([
+ '--X',
+ 'Content-Type: text/plain;charset=utf-16',
+ 'Content-Disposition: form-data; name="text"',
+ '',
+ '\xff\xfea\x00b\x00\x1c c\x00',
+ '--X',
+ 'Content-Type: text/plain;charset=utf-16',
+ 'Content-Disposition: form-data; name="submit"',
+ '',
+ '\xff\xfeC\x00r\x00e\x00a\x00t\x00e\x00',
+ '--X--'
+ ]))
+ self.getPage('/reqparams', method='POST',
+ headers=[(
+ 'Content-Type', 'multipart/form-data;boundary=X'),
+ ('Content-Length', str(len(body))),
+ ],
+ body=body),
+ self.assertBody(b'submit: Create, text: ab\xe2\x80\x9cc')
+
+ @mock.patch('cherrypy._cpreqbody.Part.maxrambytes', 1)
+ def test_multipart_decoding_bigger_maxrambytes(self):
+ """
+ Decoding of a multipart entity should also pass when
+ the entity is bigger than maxrambytes. See ticket #1352.
+ """
+ self.test_multipart_decoding()
+
+ def test_multipart_decoding_no_charset(self):
+ # Test the decoding of a multipart entity when the charset (utf8) is
+ # NOT explicitly given, but is in the list of charsets to attempt.
+ body = ntob('\r\n'.join([
+ '--X',
+ 'Content-Disposition: form-data; name="text"',
+ '',
+ '\xe2\x80\x9c',
+ '--X',
+ 'Content-Disposition: form-data; name="submit"',
+ '',
+ 'Create',
+ '--X--'
+ ]))
+ self.getPage('/reqparams', method='POST',
+ headers=[(
+ 'Content-Type', 'multipart/form-data;boundary=X'),
+ ('Content-Length', str(len(body))),
+ ],
+ body=body),
+ self.assertBody(b'submit: Create, text: \xe2\x80\x9c')
+
+ def test_multipart_decoding_no_successful_charset(self):
+ # Test the decoding of a multipart entity when the charset (utf16) is
+ # NOT explicitly given, and is NOT in the list of charsets to attempt.
+ body = ntob('\r\n'.join([
+ '--X',
+ 'Content-Disposition: form-data; name="text"',
+ '',
+ '\xff\xfea\x00b\x00\x1c c\x00',
+ '--X',
+ 'Content-Disposition: form-data; name="submit"',
+ '',
+ '\xff\xfeC\x00r\x00e\x00a\x00t\x00e\x00',
+ '--X--'
+ ]))
+ self.getPage('/reqparams', method='POST',
+ headers=[(
+ 'Content-Type', 'multipart/form-data;boundary=X'),
+ ('Content-Length', str(len(body))),
+ ],
+ body=body),
+ self.assertStatus(400)
+ self.assertErrorPage(
+ 400,
+ 'The request entity could not be decoded. The following charsets '
+ "were attempted: ['us-ascii', 'utf-8']")
+
+ def test_nontext(self):
+ self.getPage('/nontext')
+ self.assertHeader('Content-Type', 'application/binary;charset=utf-8')
+ self.assertBody('\x00\x01\x02\x03')
+
+ def testEncoding(self):
+ # Default encoding should be utf-8
+ self.getPage('/mao_zedong')
+ self.assertBody(sing8)
+
+ # Ask for utf-16.
+ self.getPage('/mao_zedong', [('Accept-Charset', 'utf-16')])
+ self.assertHeader('Content-Type', 'text/html;charset=utf-16')
+ self.assertBody(sing16)
+
+ # Ask for multiple encodings. ISO-8859-1 should fail, and utf-16
+ # should be produced.
+ self.getPage('/mao_zedong', [('Accept-Charset',
+ 'iso-8859-1;q=1, utf-16;q=0.5')])
+ self.assertBody(sing16)
+
+ # The "*" value should default to our default_encoding, utf-8
+ self.getPage('/mao_zedong', [('Accept-Charset', '*;q=1, utf-7;q=.2')])
+ self.assertBody(sing8)
+
+ # Only allow iso-8859-1, which should fail and raise 406.
+ self.getPage('/mao_zedong', [('Accept-Charset', 'iso-8859-1, *;q=0')])
+ self.assertStatus('406 Not Acceptable')
+ self.assertInBody('Your client sent this Accept-Charset header: '
+ 'iso-8859-1, *;q=0. We tried these charsets: '
+ 'iso-8859-1.')
+
+ # Ask for x-mac-ce, which should be unknown. See ticket #569.
+ self.getPage('/mao_zedong', [('Accept-Charset',
+ 'us-ascii, ISO-8859-1, x-mac-ce')])
+ self.assertStatus('406 Not Acceptable')
+ self.assertInBody('Your client sent this Accept-Charset header: '
+ 'us-ascii, ISO-8859-1, x-mac-ce. We tried these '
+ 'charsets: ISO-8859-1, us-ascii, x-mac-ce.')
+
+ # Test the 'encoding' arg to encode.
+ self.getPage('/utf8')
+ self.assertBody(sing8)
+ self.getPage('/utf8', [('Accept-Charset', 'us-ascii, ISO-8859-1')])
+ self.assertStatus('406 Not Acceptable')
+
+ # Test malformed quality value, which should raise 400.
+ self.getPage('/mao_zedong', [('Accept-Charset',
+ 'ISO-8859-1,utf-8;q=0.7,*;q=0.7)')])
+ self.assertStatus('400 Bad Request')
+
+ def testGzip(self):
+ zbuf = io.BytesIO()
+ zfile = gzip.GzipFile(mode='wb', fileobj=zbuf, compresslevel=9)
+ zfile.write(b'Hello, world')
+ zfile.close()
+
+ self.getPage('/gzip/', headers=[('Accept-Encoding', 'gzip')])
+ self.assertInBody(zbuf.getvalue()[:3])
+ self.assertHeader('Vary', 'Accept-Encoding')
+ self.assertHeader('Content-Encoding', 'gzip')
+
+ # Test when gzip is denied.
+ self.getPage('/gzip/', headers=[('Accept-Encoding', 'identity')])
+ self.assertHeader('Vary', 'Accept-Encoding')
+ self.assertNoHeader('Content-Encoding')
+ self.assertBody('Hello, world')
+
+ self.getPage('/gzip/', headers=[('Accept-Encoding', 'gzip;q=0')])
+ self.assertHeader('Vary', 'Accept-Encoding')
+ self.assertNoHeader('Content-Encoding')
+ self.assertBody('Hello, world')
+
+ # Test that trailing comma doesn't cause IndexError
+ # Ref: https://github.com/cherrypy/cherrypy/issues/988
+ self.getPage('/gzip/', headers=[('Accept-Encoding', 'gzip,deflate,')])
+ self.assertStatus(200)
+ self.assertNotInBody('IndexError')
+
+ self.getPage('/gzip/', headers=[('Accept-Encoding', '*;q=0')])
+ self.assertStatus(406)
+ self.assertNoHeader('Content-Encoding')
+ self.assertErrorPage(406, 'identity, gzip')
+
+ # Test for ticket #147
+ self.getPage('/gzip/noshow', headers=[('Accept-Encoding', 'gzip')])
+ self.assertNoHeader('Content-Encoding')
+ self.assertStatus(500)
+ self.assertErrorPage(500, pattern='IndexError\n')
+
+ # In this case, there's nothing we can do to deliver a
+ # readable page, since 1) the gzip header is already set,
+ # and 2) we may have already written some of the body.
+ # The fix is to never stream yields when using gzip.
+ if (cherrypy.server.protocol_version == 'HTTP/1.0' or
+ getattr(cherrypy.server, 'using_apache', False)):
+ self.getPage('/gzip/noshow_stream',
+ headers=[('Accept-Encoding', 'gzip')])
+ self.assertHeader('Content-Encoding', 'gzip')
+ self.assertInBody('\x1f\x8b\x08\x00')
+ else:
+ # The wsgiserver will simply stop sending data, and the HTTP client
+ # will error due to an incomplete chunk-encoded stream.
+ self.assertRaises((ValueError, IncompleteRead), self.getPage,
+ '/gzip/noshow_stream',
+ headers=[('Accept-Encoding', 'gzip')])
+
+ def test_UnicodeHeaders(self):
+ self.getPage('/cookies_and_headers')
+ self.assertBody('Any content')
+
+ def test_BytesHeaders(self):
+ self.getPage('/cookies_and_headers')
+ self.assertBody('Any content')
+ self.assertHeader('Bytes-Header', 'Bytes given header')
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_etags.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_etags.py
new file mode 100644
index 0000000000000000000000000000000000000000..293eb866269f487c0ec298d5ceb5f4fe19861e92
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_etags.py
@@ -0,0 +1,84 @@
+import cherrypy
+from cherrypy._cpcompat import ntou
+from cherrypy.test import helper
+
+
+class ETagTest(helper.CPWebCase):
+
+ @staticmethod
+ def setup_server():
+ class Root:
+
+ @cherrypy.expose
+ def resource(self):
+ return 'Oh wah ta goo Siam.'
+
+ @cherrypy.expose
+ def fail(self, code):
+ code = int(code)
+ if 300 <= code <= 399:
+ raise cherrypy.HTTPRedirect([], code)
+ else:
+ raise cherrypy.HTTPError(code)
+
+ @cherrypy.expose
+ # In Python 3, tools.encode is on by default
+ @cherrypy.config(**{'tools.encode.on': True})
+ def unicoded(self):
+ return ntou('I am a \u1ee4nicode string.', 'escape')
+
+ conf = {'/': {'tools.etags.on': True,
+ 'tools.etags.autotags': True,
+ }}
+ cherrypy.tree.mount(Root(), config=conf)
+
+ def test_etags(self):
+ self.getPage('/resource')
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Type', 'text/html;charset=utf-8')
+ self.assertBody('Oh wah ta goo Siam.')
+ etag = self.assertHeader('ETag')
+
+ # Test If-Match (both valid and invalid)
+ self.getPage('/resource', headers=[('If-Match', etag)])
+ self.assertStatus('200 OK')
+ self.getPage('/resource', headers=[('If-Match', '*')])
+ self.assertStatus('200 OK')
+ self.getPage('/resource', headers=[('If-Match', '*')], method='POST')
+ self.assertStatus('200 OK')
+ self.getPage('/resource', headers=[('If-Match', 'a bogus tag')])
+ self.assertStatus('412 Precondition Failed')
+
+ # Test If-None-Match (both valid and invalid)
+ self.getPage('/resource', headers=[('If-None-Match', etag)])
+ self.assertStatus(304)
+ self.getPage('/resource', method='POST',
+ headers=[('If-None-Match', etag)])
+ self.assertStatus('412 Precondition Failed')
+ self.getPage('/resource', headers=[('If-None-Match', '*')])
+ self.assertStatus(304)
+ self.getPage('/resource', headers=[('If-None-Match', 'a bogus tag')])
+ self.assertStatus('200 OK')
+
+ def test_errors(self):
+ self.getPage('/resource')
+ self.assertStatus(200)
+ etag = self.assertHeader('ETag')
+
+ # Test raising errors in page handler
+ self.getPage('/fail/412', headers=[('If-Match', etag)])
+ self.assertStatus(412)
+ self.getPage('/fail/304', headers=[('If-Match', etag)])
+ self.assertStatus(304)
+ self.getPage('/fail/412', headers=[('If-None-Match', '*')])
+ self.assertStatus(412)
+ self.getPage('/fail/304', headers=[('If-None-Match', '*')])
+ self.assertStatus(304)
+
+ def test_unicode_body(self):
+ self.getPage('/unicoded')
+ self.assertStatus(200)
+ etag1 = self.assertHeader('ETag')
+ self.getPage('/unicoded', headers=[('If-Match', etag1)])
+ self.assertStatus(200)
+ self.assertHeader('ETag', etag1)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_http.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_http.py
new file mode 100644
index 0000000000000000000000000000000000000000..a955be43d213b14ce551e1c86891cebd456a514d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_http.py
@@ -0,0 +1,305 @@
+# coding: utf-8
+"""Tests for managing HTTP issues (malformed requests, etc)."""
+
+import errno
+import mimetypes
+import socket
+import sys
+from unittest import mock
+import urllib.parse
+from http.client import HTTPConnection
+
+import cherrypy
+from cherrypy._cpcompat import HTTPSConnection
+
+from cherrypy.test import helper
+
+
+def is_ascii(text):
+ """
+ Return True if the text encodes as ascii.
+ """
+ try:
+ text.encode('ascii')
+ return True
+ except Exception:
+ pass
+ return False
+
+
+def encode_filename(filename):
+ """
+ Given a filename to be used in a multipart/form-data,
+ encode the name. Return the key and encoded filename.
+ """
+ if is_ascii(filename):
+ return 'filename', '"{filename}"'.format(**locals())
+ encoded = urllib.parse.quote(filename, encoding='utf-8')
+ return 'filename*', "'".join((
+ 'UTF-8',
+ '', # lang
+ encoded,
+ ))
+
+
+def encode_multipart_formdata(files):
+ """Return (content_type, body) ready for httplib.HTTP instance.
+
+ files: a sequence of (name, filename, value) tuples for multipart uploads.
+ filename can be a string or a tuple ('filename string', 'encoding')
+ """
+ BOUNDARY = '________ThIs_Is_tHe_bouNdaRY_$'
+ L = []
+ for key, filename, value in files:
+ L.append('--' + BOUNDARY)
+
+ fn_key, encoded = encode_filename(filename)
+ tmpl = \
+ 'Content-Disposition: form-data; name="{key}"; {fn_key}={encoded}'
+ L.append(tmpl.format(**locals()))
+ ct = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
+ L.append('Content-Type: %s' % ct)
+ L.append('')
+ L.append(value)
+ L.append('--' + BOUNDARY + '--')
+ L.append('')
+ body = '\r\n'.join(L)
+ content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
+ return content_type, body
+
+
+class HTTPTests(helper.CPWebCase):
+
+ def make_connection(self):
+ if self.scheme == 'https':
+ return HTTPSConnection('%s:%s' % (self.interface(), self.PORT))
+ else:
+ return HTTPConnection('%s:%s' % (self.interface(), self.PORT))
+
+ @staticmethod
+ def setup_server():
+ class Root:
+
+ @cherrypy.expose
+ def index(self, *args, **kwargs):
+ return 'Hello world!'
+
+ @cherrypy.expose
+ @cherrypy.config(**{'request.process_request_body': False})
+ def no_body(self, *args, **kwargs):
+ return 'Hello world!'
+
+ @cherrypy.expose
+ def post_multipart(self, file):
+ """Return a summary ("a * 65536\nb * 65536") of the uploaded
+ file.
+ """
+ contents = file.file.read()
+ summary = []
+ curchar = None
+ count = 0
+ for c in contents:
+ if c == curchar:
+ count += 1
+ else:
+ if count:
+ curchar = chr(curchar)
+ summary.append('%s * %d' % (curchar, count))
+ count = 1
+ curchar = c
+ if count:
+ curchar = chr(curchar)
+ summary.append('%s * %d' % (curchar, count))
+ return ', '.join(summary)
+
+ @cherrypy.expose
+ def post_filename(self, myfile):
+ '''Return the name of the file which was uploaded.'''
+ return myfile.filename
+
+ cherrypy.tree.mount(Root())
+ cherrypy.config.update({'server.max_request_body_size': 30000000})
+
+ def test_no_content_length(self):
+ # "The presence of a message-body in a request is signaled by the
+ # inclusion of a Content-Length or Transfer-Encoding header field in
+ # the request's message-headers."
+ #
+ # Send a message with neither header and no body. Even though
+ # the request is of method POST, this should be OK because we set
+ # request.process_request_body to False for our handler.
+ c = self.make_connection()
+ c.request('POST', '/no_body')
+ response = c.getresponse()
+ self.body = response.fp.read()
+ self.status = str(response.status)
+ self.assertStatus(200)
+ self.assertBody(b'Hello world!')
+
+ # Now send a message that has no Content-Length, but does send a body.
+ # Verify that CP times out the socket and responds
+ # with 411 Length Required.
+ if self.scheme == 'https':
+ c = HTTPSConnection('%s:%s' % (self.interface(), self.PORT))
+ else:
+ c = HTTPConnection('%s:%s' % (self.interface(), self.PORT))
+
+ # `_get_content_length` is needed for Python 3.6+
+ with mock.patch.object(
+ c,
+ '_get_content_length',
+ lambda body, method: None,
+ create=True):
+ # `_set_content_length` is needed for Python 2.7-3.5
+ with mock.patch.object(c, '_set_content_length', create=True):
+ c.request('POST', '/')
+
+ response = c.getresponse()
+ self.body = response.fp.read()
+ self.status = str(response.status)
+ self.assertStatus(411)
+
+ def test_post_multipart(self):
+ alphabet = 'abcdefghijklmnopqrstuvwxyz'
+ # generate file contents for a large post
+ contents = ''.join([c * 65536 for c in alphabet])
+
+ # encode as multipart form data
+ files = [('file', 'file.txt', contents)]
+ content_type, body = encode_multipart_formdata(files)
+ body = body.encode('Latin-1')
+
+ # post file
+ c = self.make_connection()
+ c.putrequest('POST', '/post_multipart')
+ c.putheader('Content-Type', content_type)
+ c.putheader('Content-Length', str(len(body)))
+ c.endheaders()
+ c.send(body)
+
+ response = c.getresponse()
+ self.body = response.fp.read()
+ self.status = str(response.status)
+ self.assertStatus(200)
+ parts = ['%s * 65536' % ch for ch in alphabet]
+ self.assertBody(', '.join(parts))
+
+ def test_post_filename_with_special_characters(self):
+ """Testing that we can handle filenames with special characters.
+
+ This was reported as a bug in:
+
+ * https://github.com/cherrypy/cherrypy/issues/1146/
+ * https://github.com/cherrypy/cherrypy/issues/1397/
+ * https://github.com/cherrypy/cherrypy/issues/1694/
+ """
+ # We'll upload a bunch of files with differing names.
+ fnames = [
+ 'boop.csv', 'foo, bar.csv', 'bar, xxxx.csv', 'file"name.csv',
+ 'file;name.csv', 'file; name.csv', u'test_łóąä.txt',
+ ]
+ for fname in fnames:
+ files = [('myfile', fname, 'yunyeenyunyue')]
+ content_type, body = encode_multipart_formdata(files)
+ body = body.encode('Latin-1')
+
+ # post file
+ c = self.make_connection()
+ c.putrequest('POST', '/post_filename')
+ c.putheader('Content-Type', content_type)
+ c.putheader('Content-Length', str(len(body)))
+ c.endheaders()
+ c.send(body)
+
+ response = c.getresponse()
+ self.body = response.fp.read()
+ self.status = str(response.status)
+ self.assertStatus(200)
+ self.assertBody(fname)
+
+ def test_malformed_request_line(self):
+ if getattr(cherrypy.server, 'using_apache', False):
+ return self.skip('skipped due to known Apache differences...')
+
+ # Test missing version in Request-Line
+ c = self.make_connection()
+ c._output(b'geT /')
+ c._send_output()
+ if hasattr(c, 'strict'):
+ response = c.response_class(c.sock, strict=c.strict, method='GET')
+ else:
+ # Python 3.2 removed the 'strict' feature, saying:
+ # "http.client now always assumes HTTP/1.x compliant servers."
+ response = c.response_class(c.sock, method='GET')
+ response.begin()
+ self.assertEqual(response.status, 400)
+ self.assertEqual(response.fp.read(22), b'Malformed Request-Line')
+ c.close()
+
+ def test_request_line_split_issue_1220(self):
+ params = {
+ 'intervenant-entreprise-evenement_classaction':
+ 'evenement-mailremerciements',
+ '_path': 'intervenant-entreprise-evenement',
+ 'intervenant-entreprise-evenement_action-id': 19404,
+ 'intervenant-entreprise-evenement_id': 19404,
+ 'intervenant-entreprise_id': 28092,
+ }
+ Request_URI = '/index?' + urllib.parse.urlencode(params)
+ self.assertEqual(len('GET %s HTTP/1.1\r\n' % Request_URI), 256)
+ self.getPage(Request_URI)
+ self.assertBody('Hello world!')
+
+ def test_malformed_header(self):
+ c = self.make_connection()
+ c.putrequest('GET', '/')
+ c.putheader('Content-Type', 'text/plain')
+ # See https://github.com/cherrypy/cherrypy/issues/941
+ c._output(b're, 1.2.3.4#015#012')
+ c.endheaders()
+
+ response = c.getresponse()
+ self.status = str(response.status)
+ self.assertStatus(400)
+ self.body = response.fp.read(20)
+ self.assertBody('Illegal header line.')
+
+ def test_http_over_https(self):
+ if self.scheme != 'https':
+ return self.skip('skipped (not running HTTPS)... ')
+
+ # Try connecting without SSL.
+ conn = HTTPConnection('%s:%s' % (self.interface(), self.PORT))
+ conn.putrequest('GET', '/', skip_host=True)
+ conn.putheader('Host', self.HOST)
+ conn.endheaders()
+ response = conn.response_class(conn.sock, method='GET')
+ try:
+ response.begin()
+ self.assertEqual(response.status, 400)
+ self.body = response.read()
+ self.assertBody('The client sent a plain HTTP request, but this '
+ 'server only speaks HTTPS on this port.')
+ except socket.error:
+ e = sys.exc_info()[1]
+ # "Connection reset by peer" is also acceptable.
+ if e.errno != errno.ECONNRESET:
+ raise
+
+ def test_garbage_in(self):
+ # Connect without SSL regardless of server.scheme
+ c = HTTPConnection('%s:%s' % (self.interface(), self.PORT))
+ c._output(b'gjkgjklsgjklsgjkljklsg')
+ c._send_output()
+ response = c.response_class(c.sock, method='GET')
+ try:
+ response.begin()
+ self.assertEqual(response.status, 400)
+ self.assertEqual(response.fp.read(22),
+ b'Malformed Request-Line')
+ c.close()
+ except socket.error:
+ e = sys.exc_info()[1]
+ # "Connection reset by peer" is also acceptable.
+ if e.errno != errno.ECONNRESET:
+ raise
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_httputil.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_httputil.py
new file mode 100644
index 0000000000000000000000000000000000000000..84661424788ea4079013dd5f864bd06668000218
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_httputil.py
@@ -0,0 +1,81 @@
+"""Test helpers from ``cherrypy.lib.httputil`` module."""
+import pytest
+import http.client
+
+from cherrypy.lib import httputil
+
+
+@pytest.mark.parametrize(
+ 'script_name,path_info,expected_url',
+ [
+ ('/sn/', '/pi/', '/sn/pi/'),
+ ('/sn/', '/pi', '/sn/pi'),
+ ('/sn/', '/', '/sn/'),
+ ('/sn/', '', '/sn/'),
+ ('/sn', '/pi/', '/sn/pi/'),
+ ('/sn', '/pi', '/sn/pi'),
+ ('/sn', '/', '/sn/'),
+ ('/sn', '', '/sn'),
+ ('/', '/pi/', '/pi/'),
+ ('/', '/pi', '/pi'),
+ ('/', '/', '/'),
+ ('/', '', '/'),
+ ('', '/pi/', '/pi/'),
+ ('', '/pi', '/pi'),
+ ('', '/', '/'),
+ ('', '', '/'),
+ ]
+)
+def test_urljoin(script_name, path_info, expected_url):
+ """Test all slash+atom combinations for SCRIPT_NAME and PATH_INFO."""
+ actual_url = httputil.urljoin(script_name, path_info)
+ assert actual_url == expected_url
+
+
+EXPECTED_200 = (200, 'OK', 'Request fulfilled, document follows')
+EXPECTED_500 = (
+ 500,
+ 'Internal Server Error',
+ 'The server encountered an unexpected condition which '
+ 'prevented it from fulfilling the request.',
+)
+EXPECTED_404 = (404, 'Not Found', 'Nothing matches the given URI')
+EXPECTED_444 = (444, 'Non-existent reason', '')
+
+
+@pytest.mark.parametrize(
+ 'status,expected_status',
+ [
+ (None, EXPECTED_200),
+ (200, EXPECTED_200),
+ ('500', EXPECTED_500),
+ (http.client.NOT_FOUND, EXPECTED_404),
+ ('444 Non-existent reason', EXPECTED_444),
+ ]
+)
+def test_valid_status(status, expected_status):
+ """Check valid int, string and http.client-constants
+ statuses processing."""
+ assert httputil.valid_status(status) == expected_status
+
+
+@pytest.mark.parametrize(
+ 'status_code,error_msg',
+ [
+ (
+ 'hey',
+ r"Illegal response status from server \('hey' is non-numeric\)."
+ ),
+ (
+ {'hey': 'hi'},
+ r'Illegal response status from server '
+ r"\(\{'hey': 'hi'\} is non-numeric\).",
+ ),
+ (1, r'Illegal response status from server \(1 is out of range\).'),
+ (600, r'Illegal response status from server \(600 is out of range\).'),
+ ]
+)
+def test_invalid_status(status_code, error_msg):
+ """Check that invalid status cause certain errors."""
+ with pytest.raises(ValueError, match=error_msg):
+ httputil.valid_status(status_code)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_iterator.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_iterator.py
new file mode 100644
index 0000000000000000000000000000000000000000..6600a78dba4718ca75ad6a2aeddd1443ae1f5414
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_iterator.py
@@ -0,0 +1,194 @@
+import cherrypy
+from cherrypy.test import helper
+
+
+class IteratorBase(object):
+
+ created = 0
+ datachunk = 'butternut squash' * 256
+
+ @classmethod
+ def incr(cls):
+ cls.created += 1
+
+ @classmethod
+ def decr(cls):
+ cls.created -= 1
+
+
+class OurGenerator(IteratorBase):
+
+ def __iter__(self):
+ self.incr()
+ try:
+ for i in range(1024):
+ yield self.datachunk
+ finally:
+ self.decr()
+
+
+class OurIterator(IteratorBase):
+
+ started = False
+ closed_off = False
+ count = 0
+
+ def increment(self):
+ self.incr()
+
+ def decrement(self):
+ if not self.closed_off:
+ self.closed_off = True
+ self.decr()
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ if not self.started:
+ self.started = True
+ self.increment()
+ self.count += 1
+ if self.count > 1024:
+ raise StopIteration
+ return self.datachunk
+
+ next = __next__
+
+ def __del__(self):
+ self.decrement()
+
+
+class OurClosableIterator(OurIterator):
+
+ def close(self):
+ self.decrement()
+
+
+class OurNotClosableIterator(OurIterator):
+
+ # We can't close something which requires an additional argument.
+ def close(self, somearg):
+ self.decrement()
+
+
+class OurUnclosableIterator(OurIterator):
+ close = 'close' # not callable!
+
+
+class IteratorTest(helper.CPWebCase):
+
+ @staticmethod
+ def setup_server():
+
+ class Root(object):
+
+ @cherrypy.expose
+ def count(self, clsname):
+ cherrypy.response.headers['Content-Type'] = 'text/plain'
+ return str(globals()[clsname].created)
+
+ @cherrypy.expose
+ def getall(self, clsname):
+ cherrypy.response.headers['Content-Type'] = 'text/plain'
+ return globals()[clsname]()
+
+ @cherrypy.expose
+ @cherrypy.config(**{'response.stream': True})
+ def stream(self, clsname):
+ return self.getall(clsname)
+
+ cherrypy.tree.mount(Root())
+
+ def test_iterator(self):
+ try:
+ self._test_iterator()
+ except Exception:
+ 'Test fails intermittently. See #1419'
+
+ def _test_iterator(self):
+ if cherrypy.server.protocol_version != 'HTTP/1.1':
+ return self.skip()
+
+ self.PROTOCOL = 'HTTP/1.1'
+
+ # Check the counts of all the classes, they should be zero.
+ closables = ['OurClosableIterator', 'OurGenerator']
+ unclosables = ['OurUnclosableIterator', 'OurNotClosableIterator']
+ all_classes = closables + unclosables
+
+ import random
+ random.shuffle(all_classes)
+
+ for clsname in all_classes:
+ self.getPage('/count/' + clsname)
+ self.assertStatus(200)
+ self.assertBody('0')
+
+ # We should also be able to read the entire content body
+ # successfully, though we don't need to, we just want to
+ # check the header.
+ for clsname in all_classes:
+ itr_conn = self.get_conn()
+ itr_conn.putrequest('GET', '/getall/' + clsname)
+ itr_conn.endheaders()
+ response = itr_conn.getresponse()
+ self.assertEqual(response.status, 200)
+ headers = response.getheaders()
+ for header_name, header_value in headers:
+ if header_name.lower() == 'content-length':
+ expected = str(1024 * 16 * 256)
+ assert header_value == expected, header_value
+ break
+ else:
+ raise AssertionError('No Content-Length header found')
+
+ # As the response should be fully consumed by CherryPy
+ # before sending back, the count should still be at zero
+ # by the time the response has been sent.
+ self.getPage('/count/' + clsname)
+ self.assertStatus(200)
+ self.assertBody('0')
+
+ # Now we do the same check with streaming - some classes will
+ # be automatically closed, while others cannot.
+ stream_counts = {}
+ for clsname in all_classes:
+ itr_conn = self.get_conn()
+ itr_conn.putrequest('GET', '/stream/' + clsname)
+ itr_conn.endheaders()
+ response = itr_conn.getresponse()
+ self.assertEqual(response.status, 200)
+ response.fp.read(65536)
+
+ # Let's check the count - this should always be one.
+ self.getPage('/count/' + clsname)
+ self.assertBody('1')
+
+ # Now if we close the connection, the count should go back
+ # to zero.
+ itr_conn.close()
+ self.getPage('/count/' + clsname)
+
+ # If this is a response which should be easily closed, then
+ # we will test to see if the value has gone back down to
+ # zero.
+ if clsname in closables:
+
+ # Sometimes we try to get the answer too quickly - we
+ # will wait for 100 ms before asking again if we didn't
+ # get the answer we wanted.
+ if self.body != '0':
+ import time
+ time.sleep(0.1)
+ self.getPage('/count/' + clsname)
+
+ stream_counts[clsname] = int(self.body)
+
+ # Check that we closed off the classes which should provide
+ # easy mechanisms for doing so.
+ for clsname in closables:
+ assert stream_counts[clsname] == 0, (
+ 'did not close off stream response correctly, expected '
+ 'count of zero for %s: %s' % (clsname, stream_counts)
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_json.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_json.py
new file mode 100644
index 0000000000000000000000000000000000000000..4b8be548fb73f230e744205a63a6ac768ced929e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_json.py
@@ -0,0 +1,101 @@
+import cherrypy
+from cherrypy.test import helper
+from cherrypy._json import json
+
+
+json_out = cherrypy.config(**{'tools.json_out.on': True})
+json_in = cherrypy.config(**{'tools.json_in.on': True})
+
+
+class JsonTest(helper.CPWebCase):
+
+ @staticmethod
+ def setup_server():
+ class Root(object):
+
+ @cherrypy.expose
+ def plain(self):
+ return 'hello'
+
+ @cherrypy.expose
+ @json_out
+ def json_string(self):
+ return 'hello'
+
+ @cherrypy.expose
+ @json_out
+ def json_list(self):
+ return ['a', 'b', 42]
+
+ @cherrypy.expose
+ @json_out
+ def json_dict(self):
+ return {'answer': 42}
+
+ @cherrypy.expose
+ @json_in
+ def json_post(self):
+ if cherrypy.request.json == [13, 'c']:
+ return 'ok'
+ else:
+ return 'nok'
+
+ @cherrypy.expose
+ @json_out
+ @cherrypy.config(**{'tools.caching.on': True})
+ def json_cached(self):
+ return 'hello there'
+
+ root = Root()
+ cherrypy.tree.mount(root)
+
+ def test_json_output(self):
+ if json is None:
+ self.skip('json not found ')
+ return
+
+ self.getPage('/plain')
+ self.assertBody('hello')
+
+ self.getPage('/json_string')
+ self.assertBody('"hello"')
+
+ self.getPage('/json_list')
+ self.assertBody('["a", "b", 42]')
+
+ self.getPage('/json_dict')
+ self.assertBody('{"answer": 42}')
+
+ def test_json_input(self):
+ if json is None:
+ self.skip('json not found ')
+ return
+
+ body = '[13, "c"]'
+ headers = [('Content-Type', 'application/json'),
+ ('Content-Length', str(len(body)))]
+ self.getPage('/json_post', method='POST', headers=headers, body=body)
+ self.assertBody('ok')
+
+ body = '[13, "c"]'
+ headers = [('Content-Type', 'text/plain'),
+ ('Content-Length', str(len(body)))]
+ self.getPage('/json_post', method='POST', headers=headers, body=body)
+ self.assertStatus(415, 'Expected an application/json content type')
+
+ body = '[13, -]'
+ headers = [('Content-Type', 'application/json'),
+ ('Content-Length', str(len(body)))]
+ self.getPage('/json_post', method='POST', headers=headers, body=body)
+ self.assertStatus(400, 'Invalid JSON document')
+
+ def test_cached(self):
+ if json is None:
+ self.skip('json not found ')
+ return
+
+ self.getPage('/json_cached')
+ self.assertStatus(200, '"hello"')
+
+ self.getPage('/json_cached') # 2'nd time to hit cache
+ self.assertStatus(200, '"hello"')
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_logging.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_logging.py
new file mode 100644
index 0000000000000000000000000000000000000000..5308fb72fb29e784d7957eeb24dd4ebee3e791dd
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_logging.py
@@ -0,0 +1,288 @@
+"""Basic tests for the CherryPy core: request handling."""
+
+import logging
+
+from cheroot.test import webtest
+import pytest
+import requests # FIXME: Temporary using it directly, better switch
+
+import cherrypy
+from cherrypy.test.logtest import LogCase
+
+
+# Some unicode strings.
+tartaros = u'\u03a4\u1f71\u03c1\u03c4\u03b1\u03c1\u03bf\u03c2'
+erebos = u'\u0388\u03c1\u03b5\u03b2\u03bf\u03c2.com'
+
+
+@pytest.fixture
+def access_log_file(tmp_path_factory):
+ return tmp_path_factory.mktemp('logs') / 'access.log'
+
+
+@pytest.fixture
+def error_log_file(tmp_path_factory):
+ return tmp_path_factory.mktemp('logs') / 'access.log'
+
+
+@pytest.fixture
+def server(configure_server):
+ cherrypy.engine.start()
+ cherrypy.engine.wait(cherrypy.engine.states.STARTED)
+
+ yield
+
+ shutdown_server()
+
+
+def shutdown_server():
+ cherrypy.engine.exit()
+ cherrypy.engine.block()
+
+ for name, server in getattr(cherrypy, 'servers', {}).copy().items():
+ server.unsubscribe()
+ del cherrypy.servers[name]
+
+
+@pytest.fixture
+def configure_server(access_log_file, error_log_file):
+ class Root:
+
+ @cherrypy.expose
+ def index(self):
+ return 'hello'
+
+ @cherrypy.expose
+ def uni_code(self):
+ cherrypy.request.login = tartaros
+ cherrypy.request.remote.name = erebos
+
+ @cherrypy.expose
+ def slashes(self):
+ cherrypy.request.request_line = r'GET /slashed\path HTTP/1.1'
+
+ @cherrypy.expose
+ def whitespace(self):
+ # User-Agent = "User-Agent" ":" 1*( product | comment )
+ # comment = "(" *( ctext | quoted-pair | comment ) ")"
+ # ctext = <any TEXT excluding "(" and ")">
+ # TEXT = <any OCTET except CTLs, but including LWS>
+ # LWS = [CRLF] 1*( SP | HT )
+ cherrypy.request.headers['User-Agent'] = 'Browzuh (1.0\r\n\t\t.3)'
+
+ @cherrypy.expose
+ def as_string(self):
+ return 'content'
+
+ @cherrypy.expose
+ def as_yield(self):
+ yield 'content'
+
+ @cherrypy.expose
+ @cherrypy.config(**{'tools.log_tracebacks.on': True})
+ def error(self):
+ raise ValueError()
+
+ root = Root()
+
+ cherrypy.config.reset()
+ cherrypy.config.update({
+ 'server.socket_host': webtest.WebCase.HOST,
+ 'server.socket_port': webtest.WebCase.PORT,
+ 'server.protocol_version': webtest.WebCase.PROTOCOL,
+ 'environment': 'test_suite',
+ })
+ cherrypy.config.update({
+ 'log.error_file': str(error_log_file),
+ 'log.access_file': str(access_log_file),
+ })
+ cherrypy.tree.mount(root)
+
+
+@pytest.fixture
+def log_tracker(access_log_file):
+ class LogTracker(LogCase):
+ logfile = str(access_log_file)
+ return LogTracker()
+
+
+def test_normal_return(log_tracker, server):
+ log_tracker.markLog()
+ host = webtest.interface(webtest.WebCase.HOST)
+ port = webtest.WebCase.PORT
+ resp = requests.get(
+ 'http://%s:%s/as_string' % (host, port),
+ headers={
+ 'Referer': 'http://www.cherrypy.org/',
+ 'User-Agent': 'Mozilla/5.0',
+ },
+ )
+ expected_body = 'content'
+ assert resp.text == expected_body
+ assert resp.status_code == 200
+
+ intro = '%s - - [' % host
+
+ log_tracker.assertLog(-1, intro)
+
+ content_length = len(expected_body)
+ if not any(
+ k for k, v in resp.headers.items()
+ if k.lower() == 'content-length'
+ ):
+ content_length = '-'
+
+ log_tracker.assertLog(
+ -1,
+ '] "GET /as_string HTTP/1.1" 200 %s '
+ '"http://www.cherrypy.org/" "Mozilla/5.0"'
+ % content_length,
+ )
+
+
+def test_normal_yield(log_tracker, server):
+ log_tracker.markLog()
+ host = webtest.interface(webtest.WebCase.HOST)
+ port = webtest.WebCase.PORT
+ resp = requests.get(
+ 'http://%s:%s/as_yield' % (host, port),
+ headers={
+ 'User-Agent': '',
+ },
+ )
+ expected_body = 'content'
+ assert resp.text == expected_body
+ assert resp.status_code == 200
+
+ intro = '%s - - [' % host
+
+ log_tracker.assertLog(-1, intro)
+ content_length = len(expected_body)
+ if not any(
+ k for k, v in resp.headers.items()
+ if k.lower() == 'content-length'
+ ):
+ content_length = '-'
+
+ log_tracker.assertLog(
+ -1,
+ '] "GET /as_yield HTTP/1.1" 200 %s "" ""'
+ % content_length,
+ )
+
+
+def test_custom_log_format(log_tracker, monkeypatch, server):
+ """Test a customized access_log_format string, which is a
+ feature of _cplogging.LogManager.access()."""
+ monkeypatch.setattr(
+ 'cherrypy._cplogging.LogManager.access_log_format',
+ '{h} {l} {u} {t} "{r}" {s} {b} "{f}" "{a}" {o}',
+ )
+ log_tracker.markLog()
+ host = webtest.interface(webtest.WebCase.HOST)
+ port = webtest.WebCase.PORT
+ requests.get(
+ 'http://%s:%s/as_string' % (host, port),
+ headers={
+ 'Referer': 'REFERER',
+ 'User-Agent': 'USERAGENT',
+ 'Host': 'HOST',
+ },
+ )
+ log_tracker.assertLog(-1, '%s - - [' % host)
+ log_tracker.assertLog(
+ -1,
+ '] "GET /as_string HTTP/1.1" '
+ '200 7 "REFERER" "USERAGENT" HOST',
+ )
+
+
+def test_timez_log_format(log_tracker, monkeypatch, server):
+ """Test a customized access_log_format string, which is a
+ feature of _cplogging.LogManager.access()."""
+ monkeypatch.setattr(
+ 'cherrypy._cplogging.LogManager.access_log_format',
+ '{h} {l} {u} {z} "{r}" {s} {b} "{f}" "{a}" {o}',
+ )
+ log_tracker.markLog()
+
+ expected_time = str(cherrypy._cplogging.LazyRfc3339UtcTime())
+ monkeypatch.setattr(
+ 'cherrypy._cplogging.LazyRfc3339UtcTime',
+ lambda: expected_time,
+ )
+ host = webtest.interface(webtest.WebCase.HOST)
+ port = webtest.WebCase.PORT
+ requests.get(
+ 'http://%s:%s/as_string' % (host, port),
+ headers={
+ 'Referer': 'REFERER',
+ 'User-Agent': 'USERAGENT',
+ 'Host': 'HOST',
+ },
+ )
+
+ log_tracker.assertLog(-1, '%s - - ' % host)
+ log_tracker.assertLog(-1, expected_time)
+ log_tracker.assertLog(
+ -1,
+ ' "GET /as_string HTTP/1.1" '
+ '200 7 "REFERER" "USERAGENT" HOST',
+ )
+
+
+def test_UUIDv4_parameter_log_format(log_tracker, monkeypatch, server):
+ """Test rendering of UUID4 within access log."""
+ monkeypatch.setattr(
+ 'cherrypy._cplogging.LogManager.access_log_format',
+ '{i}',
+ )
+ log_tracker.markLog()
+ host = webtest.interface(webtest.WebCase.HOST)
+ port = webtest.WebCase.PORT
+ requests.get('http://%s:%s/as_string' % (host, port))
+ log_tracker.assertValidUUIDv4()
+
+
+def test_escaped_output(log_tracker, server):
+ # Test unicode in access log pieces.
+ log_tracker.markLog()
+ host = webtest.interface(webtest.WebCase.HOST)
+ port = webtest.WebCase.PORT
+ resp = requests.get('http://%s:%s/uni_code' % (host, port))
+ assert resp.status_code == 200
+ # The repr of a bytestring includes a b'' prefix
+ log_tracker.assertLog(-1, repr(tartaros.encode('utf8'))[2:-1])
+ # Test the erebos value. Included inline for your enlightenment.
+ # Note the 'r' prefix--those backslashes are literals.
+ log_tracker.assertLog(
+ -1,
+ r'\xce\x88\xcf\x81\xce\xb5\xce\xb2\xce\xbf\xcf\x82',
+ )
+
+ # Test backslashes in output.
+ log_tracker.markLog()
+ resp = requests.get('http://%s:%s/slashes' % (host, port))
+ assert resp.status_code == 200
+ log_tracker.assertLog(-1, b'"GET /slashed\\path HTTP/1.1"')
+
+ # Test whitespace in output.
+ log_tracker.markLog()
+ resp = requests.get('http://%s:%s/whitespace' % (host, port))
+ assert resp.status_code == 200
+ # Again, note the 'r' prefix.
+ log_tracker.assertLog(-1, r'"Browzuh (1.0\r\n\t\t.3)"')
+
+
+def test_tracebacks(server, caplog):
+ host = webtest.interface(webtest.WebCase.HOST)
+ port = webtest.WebCase.PORT
+ with caplog.at_level(logging.ERROR, logger='cherrypy.error'):
+ resp = requests.get('http://%s:%s/error' % (host, port))
+
+ rec = caplog.records[0]
+ exc_cls, exc_msg = rec.exc_info[0], rec.message
+
+ assert 'raise ValueError()' in resp.text
+ assert 'HTTP' in exc_msg
+ assert exc_cls is ValueError
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_mime.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_mime.py
new file mode 100644
index 0000000000000000000000000000000000000000..ef35d10e70c7949914940b206e2632b5fbbdb8f3
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_mime.py
@@ -0,0 +1,134 @@
+"""Tests for various MIME issues, including the safe_multipart Tool."""
+
+import cherrypy
+from cherrypy._cpcompat import ntou
+from cherrypy.test import helper
+
+
+def setup_server():
+
+ class Root:
+
+ @cherrypy.expose
+ def multipart(self, parts):
+ return repr(parts)
+
+ @cherrypy.expose
+ def multipart_form_data(self, **kwargs):
+ return repr(list(sorted(kwargs.items())))
+
+ @cherrypy.expose
+ def flashupload(self, Filedata, Upload, Filename):
+ return ('Upload: %s, Filename: %s, Filedata: %r' %
+ (Upload, Filename, Filedata.file.read()))
+
+ cherrypy.config.update({'server.max_request_body_size': 0})
+ cherrypy.tree.mount(Root())
+
+
+# Client-side code #
+
+
+class MultipartTest(helper.CPWebCase):
+ setup_server = staticmethod(setup_server)
+
+ def test_multipart(self):
+ text_part = ntou('This is the text version')
+ html_part = ntou(
+ """<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<html>
+<head>
+ <meta content="text/html;charset=ISO-8859-1" http-equiv="Content-Type">
+</head>
+<body bgcolor="#ffffff" text="#000000">
+
+This is the <strong>HTML</strong> version
+</body>
+</html>
+""")
+ body = '\r\n'.join([
+ '--123456789',
+ "Content-Type: text/plain; charset='ISO-8859-1'",
+ 'Content-Transfer-Encoding: 7bit',
+ '',
+ text_part,
+ '--123456789',
+ "Content-Type: text/html; charset='ISO-8859-1'",
+ '',
+ html_part,
+ '--123456789--'])
+ headers = [
+ ('Content-Type', 'multipart/mixed; boundary=123456789'),
+ ('Content-Length', str(len(body))),
+ ]
+ self.getPage('/multipart', headers, 'POST', body)
+ self.assertBody(repr([text_part, html_part]))
+
+ def test_multipart_form_data(self):
+ body = '\r\n'.join([
+ '--X',
+ 'Content-Disposition: form-data; name="foo"',
+ '',
+ 'bar',
+ '--X',
+ # Test a param with more than one value.
+ # See
+ # https://github.com/cherrypy/cherrypy/issues/1028
+ 'Content-Disposition: form-data; name="baz"',
+ '',
+ '111',
+ '--X',
+ 'Content-Disposition: form-data; name="baz"',
+ '',
+ '333',
+ '--X--'
+ ])
+ self.getPage('/multipart_form_data', method='POST',
+ headers=[(
+ 'Content-Type', 'multipart/form-data;boundary=X'),
+ ('Content-Length', str(len(body))),
+ ],
+ body=body),
+ self.assertBody(
+ repr([('baz', [ntou('111'), ntou('333')]), ('foo', ntou('bar'))]))
+
+
+class SafeMultipartHandlingTest(helper.CPWebCase):
+ setup_server = staticmethod(setup_server)
+
+ def test_Flash_Upload(self):
+ headers = [
+ ('Accept', 'text/*'),
+ ('Content-Type', 'multipart/form-data; '
+ 'boundary=----------KM7Ij5cH2KM7Ef1gL6ae0ae0cH2gL6'),
+ ('User-Agent', 'Shockwave Flash'),
+ ('Host', 'www.example.com:54583'),
+ ('Content-Length', '499'),
+ ('Connection', 'Keep-Alive'),
+ ('Cache-Control', 'no-cache'),
+ ]
+ filedata = (b'<?xml version="1.0" encoding="UTF-8"?>\r\n'
+ b'<projectDescription>\r\n'
+ b'</projectDescription>\r\n')
+ body = (
+ b'------------KM7Ij5cH2KM7Ef1gL6ae0ae0cH2gL6\r\n'
+ b'Content-Disposition: form-data; name="Filename"\r\n'
+ b'\r\n'
+ b'.project\r\n'
+ b'------------KM7Ij5cH2KM7Ef1gL6ae0ae0cH2gL6\r\n'
+ b'Content-Disposition: form-data; '
+ b'name="Filedata"; filename=".project"\r\n'
+ b'Content-Type: application/octet-stream\r\n'
+ b'\r\n' +
+ filedata +
+ b'\r\n'
+ b'------------KM7Ij5cH2KM7Ef1gL6ae0ae0cH2gL6\r\n'
+ b'Content-Disposition: form-data; name="Upload"\r\n'
+ b'\r\n'
+ b'Submit Query\r\n'
+ # Flash apps omit the trailing \r\n on the last line:
+ b'------------KM7Ij5cH2KM7Ef1gL6ae0ae0cH2gL6--'
+ )
+ self.getPage('/flashupload', headers, 'POST', body)
+ self.assertBody('Upload: Submit Query, Filename: .project, '
+ 'Filedata: %r' % filedata)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_misc_tools.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_misc_tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..fb85b8f89923e403cae750dd01f62b5882914b74
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_misc_tools.py
@@ -0,0 +1,210 @@
+import os
+
+import cherrypy
+from cherrypy import tools
+from cherrypy.test import helper
+
+
+localDir = os.path.dirname(__file__)
+logfile = os.path.join(localDir, 'test_misc_tools.log')
+
+
+def setup_server():
+ class Root:
+
+ @cherrypy.expose
+ def index(self):
+ yield 'Hello, world'
+ h = [('Content-Language', 'en-GB'), ('Content-Type', 'text/plain')]
+ tools.response_headers(headers=h)(index)
+
+ @cherrypy.expose
+ @cherrypy.config(**{
+ 'tools.response_headers.on': True,
+ 'tools.response_headers.headers': [
+ ('Content-Language', 'fr'),
+ ('Content-Type', 'text/plain'),
+ ],
+ 'tools.log_hooks.on': True,
+ })
+ def other(self):
+ return 'salut'
+
+ @cherrypy.config(**{'tools.accept.on': True})
+ class Accept:
+
+ @cherrypy.expose
+ def index(self):
+ return '<a href="feed">Atom feed</a>'
+
+ @cherrypy.expose
+ @tools.accept(media='application/atom+xml')
+ def feed(self):
+ return """<?xml version="1.0" encoding="utf-8"?>
+<feed xmlns="http://www.w3.org/2005/Atom">
+ <title>Unknown Blog</title>
+</feed>"""
+
+ @cherrypy.expose
+ def select(self):
+ # We could also write this: mtype = cherrypy.lib.accept.accept(...)
+ mtype = tools.accept.callable(['text/html', 'text/plain'])
+ if mtype == 'text/html':
+ return '<h2>Page Title</h2>'
+ else:
+ return 'PAGE TITLE'
+
+ class Referer:
+
+ @cherrypy.expose
+ def accept(self):
+ return 'Accepted!'
+ reject = accept
+
+ class AutoVary:
+
+ @cherrypy.expose
+ def index(self):
+ # Read a header directly with 'get'
+ cherrypy.request.headers.get('Accept-Encoding')
+ # Read a header directly with '__getitem__'
+ cherrypy.request.headers['Host']
+ # Read a header directly with '__contains__'
+ 'If-Modified-Since' in cherrypy.request.headers
+ # Read a header directly
+ 'Range' in cherrypy.request.headers
+ # Call a lib function
+ tools.accept.callable(['text/html', 'text/plain'])
+ return 'Hello, world!'
+
+ conf = {'/referer': {'tools.referer.on': True,
+ 'tools.referer.pattern': r'http://[^/]*example\.com',
+ },
+ '/referer/reject': {'tools.referer.accept': False,
+ 'tools.referer.accept_missing': True,
+ },
+ '/autovary': {'tools.autovary.on': True},
+ }
+
+ root = Root()
+ root.referer = Referer()
+ root.accept = Accept()
+ root.autovary = AutoVary()
+ cherrypy.tree.mount(root, config=conf)
+ cherrypy.config.update({'log.error_file': logfile})
+
+
+class ResponseHeadersTest(helper.CPWebCase):
+ setup_server = staticmethod(setup_server)
+
+ def testResponseHeadersDecorator(self):
+ self.getPage('/')
+ self.assertHeader('Content-Language', 'en-GB')
+ self.assertHeader('Content-Type', 'text/plain;charset=utf-8')
+
+ def testResponseHeaders(self):
+ self.getPage('/other')
+ self.assertHeader('Content-Language', 'fr')
+ self.assertHeader('Content-Type', 'text/plain;charset=utf-8')
+
+
+class RefererTest(helper.CPWebCase):
+ setup_server = staticmethod(setup_server)
+
+ def testReferer(self):
+ self.getPage('/referer/accept')
+ self.assertErrorPage(403, 'Forbidden Referer header.')
+
+ self.getPage('/referer/accept',
+ headers=[('Referer', 'http://www.example.com/')])
+ self.assertStatus(200)
+ self.assertBody('Accepted!')
+
+ # Reject
+ self.getPage('/referer/reject')
+ self.assertStatus(200)
+ self.assertBody('Accepted!')
+
+ self.getPage('/referer/reject',
+ headers=[('Referer', 'http://www.example.com/')])
+ self.assertErrorPage(403, 'Forbidden Referer header.')
+
+
+class AcceptTest(helper.CPWebCase):
+ setup_server = staticmethod(setup_server)
+
+ def test_Accept_Tool(self):
+ # Test with no header provided
+ self.getPage('/accept/feed')
+ self.assertStatus(200)
+ self.assertInBody('<title>Unknown Blog</title>')
+
+ # Specify exact media type
+ self.getPage('/accept/feed',
+ headers=[('Accept', 'application/atom+xml')])
+ self.assertStatus(200)
+ self.assertInBody('<title>Unknown Blog</title>')
+
+ # Specify matching media range
+ self.getPage('/accept/feed', headers=[('Accept', 'application/*')])
+ self.assertStatus(200)
+ self.assertInBody('<title>Unknown Blog</title>')
+
+ # Specify all media ranges
+ self.getPage('/accept/feed', headers=[('Accept', '*/*')])
+ self.assertStatus(200)
+ self.assertInBody('<title>Unknown Blog</title>')
+
+ # Specify unacceptable media types
+ self.getPage('/accept/feed', headers=[('Accept', 'text/html')])
+ self.assertErrorPage(406,
+ 'Your client sent this Accept header: text/html. '
+ 'But this resource only emits these media types: '
+ 'application/atom+xml.')
+
+ # Test resource where tool is 'on' but media is None (not set).
+ self.getPage('/accept/')
+ self.assertStatus(200)
+ self.assertBody('<a href="feed">Atom feed</a>')
+
+ def test_accept_selection(self):
+ # Try both our expected media types
+ self.getPage('/accept/select', [('Accept', 'text/html')])
+ self.assertStatus(200)
+ self.assertBody('<h2>Page Title</h2>')
+ self.getPage('/accept/select', [('Accept', 'text/plain')])
+ self.assertStatus(200)
+ self.assertBody('PAGE TITLE')
+ self.getPage('/accept/select',
+ [('Accept', 'text/plain, text/*;q=0.5')])
+ self.assertStatus(200)
+ self.assertBody('PAGE TITLE')
+
+ # text/* and */* should prefer text/html since it comes first
+ # in our 'media' argument to tools.accept
+ self.getPage('/accept/select', [('Accept', 'text/*')])
+ self.assertStatus(200)
+ self.assertBody('<h2>Page Title</h2>')
+ self.getPage('/accept/select', [('Accept', '*/*')])
+ self.assertStatus(200)
+ self.assertBody('<h2>Page Title</h2>')
+
+ # Try unacceptable media types
+ self.getPage('/accept/select', [('Accept', 'application/xml')])
+ self.assertErrorPage(
+ 406,
+ 'Your client sent this Accept header: application/xml. '
+ 'But this resource only emits these media types: '
+ 'text/html, text/plain.')
+
+
+class AutoVaryTest(helper.CPWebCase):
+ setup_server = staticmethod(setup_server)
+
+ def testAutoVary(self):
+ self.getPage('/autovary/')
+ self.assertHeader(
+ 'Vary',
+ 'Accept, Accept-Charset, Accept-Encoding, '
+ 'Host, If-Modified-Since, Range'
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_native.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_native.py
new file mode 100644
index 0000000000000000000000000000000000000000..08bf999771d7f1e3a3d51f4487be71e0c359599c
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_native.py
@@ -0,0 +1,38 @@
+"""Test the native server."""
+
+import pytest
+from requests_toolbelt import sessions
+
+import cherrypy._cpnative_server
+
+
+pytestmark = pytest.mark.skipif(
+ 'sys.platform == "win32"',
+ reason='tests fail on Windows',
+)
+
+
+@pytest.fixture
+def cp_native_server(request):
+ """A native server."""
+ class Root(object):
+ @cherrypy.expose
+ def index(self):
+ return 'Hello World!'
+
+ cls = cherrypy._cpnative_server.CPHTTPServer
+ cherrypy.server.httpserver = cls(cherrypy.server)
+
+ cherrypy.tree.mount(Root(), '/')
+ cherrypy.engine.start()
+ request.addfinalizer(cherrypy.engine.stop)
+ url = 'http://localhost:{cherrypy.server.socket_port}'.format(**globals())
+ return sessions.BaseUrlSession(url)
+
+
+def test_basic_request(cp_native_server):
+ """A request to a native server should succeed."""
+ resp = cp_native_server.get('/')
+ assert resp.ok
+ assert resp.status_code == 200
+ assert resp.text == 'Hello World!'
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_objectmapping.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_objectmapping.py
new file mode 100644
index 0000000000000000000000000000000000000000..98402b8b94b5276565742a6a954dc4f0d5349d74
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_objectmapping.py
@@ -0,0 +1,430 @@
+import sys
+import cherrypy
+from cherrypy._cpcompat import ntou
+from cherrypy._cptree import Application
+from cherrypy.test import helper
+
+script_names = ['', '/foo', '/users/fred/blog', '/corp/blog']
+
+
+class ObjectMappingTest(helper.CPWebCase):
+
+ @staticmethod
+ def setup_server():
+ class Root:
+
+ @cherrypy.expose
+ def index(self, name='world'):
+ return name
+
+ @cherrypy.expose
+ def foobar(self):
+ return 'bar'
+
+ @cherrypy.expose
+ def default(self, *params, **kwargs):
+ return 'default:' + repr(params)
+
+ @cherrypy.expose
+ def other(self):
+ return 'other'
+
+ @cherrypy.expose
+ def extra(self, *p):
+ return repr(p)
+
+ @cherrypy.expose
+ def redirect(self):
+ raise cherrypy.HTTPRedirect('dir1/', 302)
+
+ def notExposed(self):
+ return 'not exposed'
+
+ @cherrypy.expose
+ def confvalue(self):
+ return cherrypy.request.config.get('user')
+
+ @cherrypy.expose
+ def redirect_via_url(self, path):
+ raise cherrypy.HTTPRedirect(cherrypy.url(path))
+
+ @cherrypy.expose
+ def translate_html(self):
+ return 'OK'
+
+ @cherrypy.expose
+ def mapped_func(self, ID=None):
+ return 'ID is %s' % ID
+ setattr(Root, 'Von B\xfclow', mapped_func)
+
+ class Exposing:
+
+ @cherrypy.expose
+ def base(self):
+ return 'expose works!'
+ cherrypy.expose(base, '1')
+ cherrypy.expose(base, '2')
+
+ class ExposingNewStyle(object):
+
+ @cherrypy.expose
+ def base(self):
+ return 'expose works!'
+ cherrypy.expose(base, '1')
+ cherrypy.expose(base, '2')
+
+ class Dir1:
+
+ @cherrypy.expose
+ def index(self):
+ return 'index for dir1'
+
+ @cherrypy.expose
+ @cherrypy.config(**{'tools.trailing_slash.extra': True})
+ def myMethod(self):
+ return 'myMethod from dir1, path_info is:' + repr(
+ cherrypy.request.path_info)
+
+ @cherrypy.expose
+ def default(self, *params):
+ return 'default for dir1, param is:' + repr(params)
+
+ class Dir2:
+
+ @cherrypy.expose
+ def index(self):
+ return 'index for dir2, path is:' + cherrypy.request.path_info
+
+ @cherrypy.expose
+ def script_name(self):
+ return cherrypy.tree.script_name()
+
+ @cherrypy.expose
+ def cherrypy_url(self):
+ return cherrypy.url('/extra')
+
+ @cherrypy.expose
+ def posparam(self, *vpath):
+ return '/'.join(vpath)
+
+ class Dir3:
+
+ def default(self):
+ return 'default for dir3, not exposed'
+
+ class Dir4:
+
+ def index(self):
+ return 'index for dir4, not exposed'
+
+ class DefNoIndex:
+
+ @cherrypy.expose
+ def default(self, *args):
+ raise cherrypy.HTTPRedirect('contact')
+
+ # MethodDispatcher code
+ @cherrypy.expose
+ class ByMethod:
+
+ def __init__(self, *things):
+ self.things = list(things)
+
+ def GET(self):
+ return repr(self.things)
+
+ def POST(self, thing):
+ self.things.append(thing)
+
+ class Collection:
+ default = ByMethod('a', 'bit')
+
+ Root.exposing = Exposing()
+ Root.exposingnew = ExposingNewStyle()
+ Root.dir1 = Dir1()
+ Root.dir1.dir2 = Dir2()
+ Root.dir1.dir2.dir3 = Dir3()
+ Root.dir1.dir2.dir3.dir4 = Dir4()
+ Root.defnoindex = DefNoIndex()
+ Root.bymethod = ByMethod('another')
+ Root.collection = Collection()
+
+ d = cherrypy.dispatch.MethodDispatcher()
+ for url in script_names:
+ conf = {'/': {'user': (url or '/').split('/')[-2]},
+ '/bymethod': {'request.dispatch': d},
+ '/collection': {'request.dispatch': d},
+ }
+ cherrypy.tree.mount(Root(), url, conf)
+
+ class Isolated:
+
+ @cherrypy.expose
+ def index(self):
+ return 'made it!'
+
+ cherrypy.tree.mount(Isolated(), '/isolated')
+
+ @cherrypy.expose
+ class AnotherApp:
+
+ def GET(self):
+ return 'milk'
+
+ cherrypy.tree.mount(AnotherApp(), '/app',
+ {'/': {'request.dispatch': d}})
+
+ def testObjectMapping(self):
+ for url in script_names:
+ self.script_name = url
+
+ self.getPage('/')
+ self.assertBody('world')
+
+ self.getPage('/dir1/myMethod')
+ self.assertBody(
+ "myMethod from dir1, path_info is:'/dir1/myMethod'")
+
+ self.getPage('/this/method/does/not/exist')
+ self.assertBody(
+ "default:('this', 'method', 'does', 'not', 'exist')")
+
+ self.getPage('/extra/too/much')
+ self.assertBody("('too', 'much')")
+
+ self.getPage('/other')
+ self.assertBody('other')
+
+ self.getPage('/notExposed')
+ self.assertBody("default:('notExposed',)")
+
+ self.getPage('/dir1/dir2/')
+ self.assertBody('index for dir2, path is:/dir1/dir2/')
+
+ # Test omitted trailing slash (should be redirected by default).
+ self.getPage('/dir1/dir2')
+ self.assertStatus(301)
+ self.assertHeader('Location', '%s/dir1/dir2/' % self.base())
+
+ # Test extra trailing slash (should be redirected if configured).
+ self.getPage('/dir1/myMethod/')
+ self.assertStatus(301)
+ self.assertHeader('Location', '%s/dir1/myMethod' % self.base())
+
+ # Test that default method must be exposed in order to match.
+ self.getPage('/dir1/dir2/dir3/dir4/index')
+ self.assertBody(
+ "default for dir1, param is:('dir2', 'dir3', 'dir4', 'index')")
+
+ # Test *vpath when default() is defined but not index()
+ # This also tests HTTPRedirect with default.
+ self.getPage('/defnoindex')
+ self.assertStatus((302, 303))
+ self.assertHeader('Location', '%s/contact' % self.base())
+ self.getPage('/defnoindex/')
+ self.assertStatus((302, 303))
+ self.assertHeader('Location', '%s/defnoindex/contact' %
+ self.base())
+ self.getPage('/defnoindex/page')
+ self.assertStatus((302, 303))
+ self.assertHeader('Location', '%s/defnoindex/contact' %
+ self.base())
+
+ self.getPage('/redirect')
+ self.assertStatus('302 Found')
+ self.assertHeader('Location', '%s/dir1/' % self.base())
+
+ if not getattr(cherrypy.server, 'using_apache', False):
+ # Test that we can use URL's which aren't all valid Python
+ # identifiers
+ # This should also test the %XX-unquoting of URL's.
+ self.getPage('/Von%20B%fclow?ID=14')
+ self.assertBody('ID is 14')
+
+ # Test that %2F in the path doesn't get unquoted too early;
+ # that is, it should not be used to separate path components.
+ # See ticket #393.
+ self.getPage('/page%2Fname')
+ self.assertBody("default:('page/name',)")
+
+ self.getPage('/dir1/dir2/script_name')
+ self.assertBody(url)
+ self.getPage('/dir1/dir2/cherrypy_url')
+ self.assertBody('%s/extra' % self.base())
+
+ # Test that configs don't overwrite each other from different apps
+ self.getPage('/confvalue')
+ self.assertBody((url or '/').split('/')[-2])
+
+ self.script_name = ''
+
+ # Test absoluteURI's in the Request-Line
+ self.getPage('http://%s:%s/' % (self.interface(), self.PORT))
+ self.assertBody('world')
+
+ self.getPage('http://%s:%s/abs/?service=http://192.168.0.1/x/y/z' %
+ (self.interface(), self.PORT))
+ self.assertBody("default:('abs',)")
+
+ self.getPage('/rel/?service=http://192.168.120.121:8000/x/y/z')
+ self.assertBody("default:('rel',)")
+
+ # Test that the "isolated" app doesn't leak url's into the root app.
+ # If it did leak, Root.default() would answer with
+ # "default:('isolated', 'doesnt', 'exist')".
+ self.getPage('/isolated/')
+ self.assertStatus('200 OK')
+ self.assertBody('made it!')
+ self.getPage('/isolated/doesnt/exist')
+ self.assertStatus('404 Not Found')
+
+ # Make sure /foobar maps to Root.foobar and not to the app
+ # mounted at /foo. See
+ # https://github.com/cherrypy/cherrypy/issues/573
+ self.getPage('/foobar')
+ self.assertBody('bar')
+
+ def test_translate(self):
+ self.getPage('/translate_html')
+ self.assertStatus('200 OK')
+ self.assertBody('OK')
+
+ self.getPage('/translate.html')
+ self.assertStatus('200 OK')
+ self.assertBody('OK')
+
+ self.getPage('/translate-html')
+ self.assertStatus('200 OK')
+ self.assertBody('OK')
+
+ def test_redir_using_url(self):
+ for url in script_names:
+ self.script_name = url
+
+ # Test the absolute path to the parent (leading slash)
+ self.getPage('/redirect_via_url?path=./')
+ self.assertStatus(('302 Found', '303 See Other'))
+ self.assertHeader('Location', '%s/' % self.base())
+
+ # Test the relative path to the parent (no leading slash)
+ self.getPage('/redirect_via_url?path=./')
+ self.assertStatus(('302 Found', '303 See Other'))
+ self.assertHeader('Location', '%s/' % self.base())
+
+ # Test the absolute path to the parent (leading slash)
+ self.getPage('/redirect_via_url/?path=./')
+ self.assertStatus(('302 Found', '303 See Other'))
+ self.assertHeader('Location', '%s/' % self.base())
+
+ # Test the relative path to the parent (no leading slash)
+ self.getPage('/redirect_via_url/?path=./')
+ self.assertStatus(('302 Found', '303 See Other'))
+ self.assertHeader('Location', '%s/' % self.base())
+
+ def testPositionalParams(self):
+ self.getPage('/dir1/dir2/posparam/18/24/hut/hike')
+ self.assertBody('18/24/hut/hike')
+
+ # intermediate index methods should not receive posparams;
+ # only the "final" index method should do so.
+ self.getPage('/dir1/dir2/5/3/sir')
+ self.assertBody("default for dir1, param is:('dir2', '5', '3', 'sir')")
+
+ # test that extra positional args raises an 404 Not Found
+ # See https://github.com/cherrypy/cherrypy/issues/733.
+ self.getPage('/dir1/dir2/script_name/extra/stuff')
+ self.assertStatus(404)
+
+ def testExpose(self):
+ # Test the cherrypy.expose function/decorator
+ self.getPage('/exposing/base')
+ self.assertBody('expose works!')
+
+ self.getPage('/exposing/1')
+ self.assertBody('expose works!')
+
+ self.getPage('/exposing/2')
+ self.assertBody('expose works!')
+
+ self.getPage('/exposingnew/base')
+ self.assertBody('expose works!')
+
+ self.getPage('/exposingnew/1')
+ self.assertBody('expose works!')
+
+ self.getPage('/exposingnew/2')
+ self.assertBody('expose works!')
+
+ def testMethodDispatch(self):
+ self.getPage('/bymethod')
+ self.assertBody("['another']")
+ self.assertHeader('Allow', 'GET, HEAD, POST')
+
+ self.getPage('/bymethod', method='HEAD')
+ self.assertBody('')
+ self.assertHeader('Allow', 'GET, HEAD, POST')
+
+ self.getPage('/bymethod', method='POST', body='thing=one')
+ self.assertBody('')
+ self.assertHeader('Allow', 'GET, HEAD, POST')
+
+ self.getPage('/bymethod')
+ self.assertBody(repr(['another', ntou('one')]))
+ self.assertHeader('Allow', 'GET, HEAD, POST')
+
+ self.getPage('/bymethod', method='PUT')
+ self.assertErrorPage(405)
+ self.assertHeader('Allow', 'GET, HEAD, POST')
+
+ # Test default with posparams
+ self.getPage('/collection/silly', method='POST')
+ self.getPage('/collection', method='GET')
+ self.assertBody("['a', 'bit', 'silly']")
+
+ # Test custom dispatcher set on app root (see #737).
+ self.getPage('/app')
+ self.assertBody('milk')
+
+ def testTreeMounting(self):
+ class Root(object):
+
+ @cherrypy.expose
+ def hello(self):
+ return 'Hello world!'
+
+ # When mounting an application instance,
+ # we can't specify a different script name in the call to mount.
+ a = Application(Root(), '/somewhere')
+ self.assertRaises(ValueError, cherrypy.tree.mount, a, '/somewhereelse')
+
+ # When mounting an application instance...
+ a = Application(Root(), '/somewhere')
+ # ...we MUST allow in identical script name in the call to mount...
+ cherrypy.tree.mount(a, '/somewhere')
+ self.getPage('/somewhere/hello')
+ self.assertStatus(200)
+ # ...and MUST allow a missing script_name.
+ del cherrypy.tree.apps['/somewhere']
+ cherrypy.tree.mount(a)
+ self.getPage('/somewhere/hello')
+ self.assertStatus(200)
+
+ # In addition, we MUST be able to create an Application using
+ # script_name == None for access to the wsgi_environ.
+ a = Application(Root(), script_name=None)
+ # However, this does not apply to tree.mount
+ self.assertRaises(TypeError, cherrypy.tree.mount, a, None)
+
+ def testKeywords(self):
+ if sys.version_info < (3,):
+ return self.skip('skipped (Python 3 only)')
+ exec("""class Root(object):
+ @cherrypy.expose
+ def hello(self, *, name='world'):
+ return 'Hello %s!' % name
+cherrypy.tree.mount(Application(Root(), '/keywords'))""")
+
+ self.getPage('/keywords/hello')
+ self.assertStatus(200)
+ self.getPage('/keywords/hello/extra')
+ self.assertStatus(404)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_params.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_params.py
new file mode 100644
index 0000000000000000000000000000000000000000..73b4cb4cfe2bb18a58d0789813840640e8fe5bd0
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_params.py
@@ -0,0 +1,61 @@
+import sys
+import textwrap
+
+import cherrypy
+from cherrypy.test import helper
+
+
+class ParamsTest(helper.CPWebCase):
+ @staticmethod
+ def setup_server():
+ class Root:
+ @cherrypy.expose
+ @cherrypy.tools.json_out()
+ @cherrypy.tools.params()
+ def resource(self, limit=None, sort=None):
+ return type(limit).__name__
+ # for testing on Py 2
+ resource.__annotations__ = {'limit': int}
+ conf = {'/': {'tools.params.on': True}}
+ cherrypy.tree.mount(Root(), config=conf)
+
+ def test_pass(self):
+ self.getPage('/resource')
+ self.assertStatus(200)
+ self.assertBody('"NoneType"')
+
+ self.getPage('/resource?limit=0')
+ self.assertStatus(200)
+ self.assertBody('"int"')
+
+ def test_error(self):
+ self.getPage('/resource?limit=')
+ self.assertStatus(400)
+ self.assertInBody('invalid literal for int')
+
+ cherrypy.config['tools.params.error'] = 422
+ self.getPage('/resource?limit=')
+ self.assertStatus(422)
+ self.assertInBody('invalid literal for int')
+
+ cherrypy.config['tools.params.exception'] = TypeError
+ self.getPage('/resource?limit=')
+ self.assertStatus(500)
+
+ def test_syntax(self):
+ if sys.version_info < (3,):
+ return self.skip('skipped (Python 3 only)')
+ code = textwrap.dedent("""
+ class Root:
+ @cherrypy.expose
+ @cherrypy.tools.params()
+ def resource(self, limit: int):
+ return type(limit).__name__
+ conf = {'/': {'tools.params.on': True}}
+ cherrypy.tree.mount(Root(), config=conf)
+ """)
+ exec(code)
+
+ self.getPage('/resource?limit=0')
+ self.assertStatus(200)
+ self.assertBody('int')
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_plugins.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_plugins.py
new file mode 100644
index 0000000000000000000000000000000000000000..4d3aa6b1aba3b6f632e232c24c1ba9cf49b5751c
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_plugins.py
@@ -0,0 +1,14 @@
+from cherrypy.process import plugins
+
+
+__metaclass__ = type
+
+
+class TestAutoreloader:
+ def test_file_for_file_module_when_None(self):
+ """No error when module.__file__ is None.
+ """
+ class test_module:
+ __file__ = None
+
+ assert plugins.Autoreloader._file_for_file_module(test_module) is None
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_proxy.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_proxy.py
new file mode 100644
index 0000000000000000000000000000000000000000..4d34440ab3fe0a251ce8e504cef1a323f06e3ee8
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_proxy.py
@@ -0,0 +1,154 @@
+import cherrypy
+from cherrypy.test import helper
+
+script_names = ['', '/path/to/myapp']
+
+
+class ProxyTest(helper.CPWebCase):
+
+ @staticmethod
+ def setup_server():
+
+ # Set up site
+ cherrypy.config.update({
+ 'tools.proxy.on': True,
+ 'tools.proxy.base': 'www.mydomain.test',
+ })
+
+ # Set up application
+
+ class Root:
+
+ def __init__(self, sn):
+ # Calculate a URL outside of any requests.
+ self.thisnewpage = cherrypy.url(
+ '/this/new/page', script_name=sn)
+
+ @cherrypy.expose
+ def pageurl(self):
+ return self.thisnewpage
+
+ @cherrypy.expose
+ def index(self):
+ raise cherrypy.HTTPRedirect('dummy')
+
+ @cherrypy.expose
+ def remoteip(self):
+ return cherrypy.request.remote.ip
+
+ @cherrypy.expose
+ @cherrypy.config(**{
+ 'tools.proxy.local': 'X-Host',
+ 'tools.trailing_slash.extra': True,
+ })
+ def xhost(self):
+ raise cherrypy.HTTPRedirect('blah')
+
+ @cherrypy.expose
+ def base(self):
+ return cherrypy.request.base
+
+ @cherrypy.expose
+ @cherrypy.config(**{'tools.proxy.scheme': 'X-Forwarded-Ssl'})
+ def ssl(self):
+ return cherrypy.request.base
+
+ @cherrypy.expose
+ def newurl(self):
+ return ("Browse to <a href='%s'>this page</a>."
+ % cherrypy.url('/this/new/page'))
+
+ @cherrypy.expose
+ @cherrypy.config(**{
+ 'tools.proxy.base': None,
+ })
+ def base_no_base(self):
+ return cherrypy.request.base
+
+ for sn in script_names:
+ cherrypy.tree.mount(Root(sn), sn)
+
+ def testProxy(self):
+ self.getPage('/')
+ self.assertHeader('Location',
+ '%s://www.mydomain.test%s/dummy' %
+ (self.scheme, self.prefix()))
+
+ # Test X-Forwarded-Host (Apache 1.3.33+ and Apache 2)
+ self.getPage(
+ '/', headers=[('X-Forwarded-Host', 'http://www.example.test')])
+ self.assertHeader('Location', 'http://www.example.test/dummy')
+ self.getPage('/', headers=[('X-Forwarded-Host', 'www.example.test')])
+ self.assertHeader('Location', '%s://www.example.test/dummy' %
+ self.scheme)
+ # Test multiple X-Forwarded-Host headers
+ self.getPage('/', headers=[
+ ('X-Forwarded-Host', 'http://www.example.test, www.cherrypy.test'),
+ ])
+ self.assertHeader('Location', 'http://www.example.test/dummy')
+
+ # Test X-Forwarded-For (Apache2)
+ self.getPage('/remoteip',
+ headers=[('X-Forwarded-For', '192.168.0.20')])
+ self.assertBody('192.168.0.20')
+ # Fix bug #1268
+ self.getPage('/remoteip',
+ headers=[
+ ('X-Forwarded-For', '67.15.36.43, 192.168.0.20')
+ ])
+ self.assertBody('67.15.36.43')
+
+ # Test X-Host (lighttpd; see https://trac.lighttpd.net/trac/ticket/418)
+ self.getPage('/xhost', headers=[('X-Host', 'www.example.test')])
+ self.assertHeader('Location', '%s://www.example.test/blah' %
+ self.scheme)
+
+ # Test X-Forwarded-Proto (lighttpd)
+ self.getPage('/base', headers=[('X-Forwarded-Proto', 'https')])
+ self.assertBody('https://www.mydomain.test')
+
+ # Test X-Forwarded-Ssl (webfaction?)
+ self.getPage('/ssl', headers=[('X-Forwarded-Ssl', 'on')])
+ self.assertBody('https://www.mydomain.test')
+
+ # Test cherrypy.url()
+ for sn in script_names:
+ # Test the value inside requests
+ self.getPage(sn + '/newurl')
+ self.assertBody(
+ "Browse to <a href='%s://www.mydomain.test" % self.scheme +
+ sn + "/this/new/page'>this page</a>.")
+ self.getPage(sn + '/newurl', headers=[('X-Forwarded-Host',
+ 'http://www.example.test')])
+ self.assertBody("Browse to <a href='http://www.example.test" +
+ sn + "/this/new/page'>this page</a>.")
+
+ # Test the value outside requests
+ port = ''
+ if self.scheme == 'http' and self.PORT != 80:
+ port = ':%s' % self.PORT
+ elif self.scheme == 'https' and self.PORT != 443:
+ port = ':%s' % self.PORT
+ host = self.HOST
+ if host in ('0.0.0.0', '::'):
+ import socket
+ host = socket.gethostname()
+ expected = ('%s://%s%s%s/this/new/page'
+ % (self.scheme, host, port, sn))
+ self.getPage(sn + '/pageurl')
+ self.assertBody(expected)
+
+ # Test trailing slash (see
+ # https://github.com/cherrypy/cherrypy/issues/562).
+ self.getPage('/xhost/', headers=[('X-Host', 'www.example.test')])
+ self.assertHeader('Location', '%s://www.example.test/xhost'
+ % self.scheme)
+
+ def test_no_base_port_in_host(self):
+ """
+ If no base is indicated, and the host header is used to resolve
+ the base, it should rely on the host header for the port also.
+ """
+ headers = {'Host': 'localhost:8080'}.items()
+ self.getPage('/base_no_base', headers=headers)
+ self.assertBody('http://localhost:8080')
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_refleaks.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_refleaks.py
new file mode 100644
index 0000000000000000000000000000000000000000..958136790b766dd338533cf877e441c80ccaab70
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_refleaks.py
@@ -0,0 +1,65 @@
+"""Tests for refleaks."""
+
+import itertools
+import platform
+import threading
+from http.client import HTTPConnection
+
+import cherrypy
+from cherrypy._cpcompat import HTTPSConnection
+from cherrypy.test import helper
+
+
+data = object()
+
+
+class ReferenceTests(helper.CPWebCase):
+
+ @staticmethod
+ def setup_server():
+
+ class Root:
+
+ @cherrypy.expose
+ def index(self, *args, **kwargs):
+ cherrypy.request.thing = data
+ return 'Hello world!'
+
+ cherrypy.tree.mount(Root())
+
+ def test_threadlocal_garbage(self):
+ if platform.system() == 'Darwin':
+ self.skip('queue issues; see #1474')
+ success = itertools.count()
+
+ def getpage():
+ host = '%s:%s' % (self.interface(), self.PORT)
+ if self.scheme == 'https':
+ c = HTTPSConnection(host)
+ else:
+ c = HTTPConnection(host)
+ try:
+ c.putrequest('GET', '/')
+ c.endheaders()
+ response = c.getresponse()
+ body = response.read()
+ self.assertEqual(response.status, 200)
+ self.assertEqual(body, b'Hello world!')
+ finally:
+ c.close()
+ next(success)
+
+ ITERATIONS = 25
+
+ ts = [
+ threading.Thread(target=getpage)
+ for _ in range(ITERATIONS)
+ ]
+
+ for t in ts:
+ t.start()
+
+ for t in ts:
+ t.join()
+
+ self.assertEqual(next(success), ITERATIONS)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_request_obj.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_request_obj.py
new file mode 100644
index 0000000000000000000000000000000000000000..31023e8fc5d4e3fb2782ea83ae362df4eaff52d8
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_request_obj.py
@@ -0,0 +1,930 @@
+"""Basic tests for the cherrypy.Request object."""
+
+from functools import wraps
+import os
+import sys
+import types
+import uuid
+from http.client import IncompleteRead
+
+import cherrypy
+from cherrypy._cpcompat import ntou
+from cherrypy.lib import httputil
+from cherrypy.test import helper
+
+localDir = os.path.dirname(__file__)
+
+defined_http_methods = ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT', 'DELETE',
+ 'TRACE', 'PROPFIND', 'PATCH')
+
+
+# Client-side code #
+
+
+class RequestObjectTests(helper.CPWebCase):
+
+ @staticmethod
+ def setup_server():
+ class Root:
+
+ @cherrypy.expose
+ def index(self):
+ return 'hello'
+
+ @cherrypy.expose
+ def scheme(self):
+ return cherrypy.request.scheme
+
+ @cherrypy.expose
+ def created_example_com_3128(self):
+ """Handle CONNECT method."""
+ cherrypy.response.status = 204
+
+ @cherrypy.expose
+ def body_example_com_3128(self):
+ """Handle CONNECT method."""
+ return (
+ cherrypy.request.method
+ + 'ed to '
+ + cherrypy.request.path_info
+ )
+
+ @cherrypy.expose
+ def request_uuid4(self):
+ return [
+ str(cherrypy.request.unique_id),
+ ' ',
+ str(cherrypy.request.unique_id),
+ ]
+
+ root = Root()
+
+ class TestType(type):
+ """Metaclass which automatically exposes all functions in each
+ subclass, and adds an instance of the subclass as an attribute
+ of root.
+ """
+ def __init__(cls, name, bases, dct):
+ type.__init__(cls, name, bases, dct)
+ for value in dct.values():
+ if isinstance(value, types.FunctionType):
+ value.exposed = True
+ setattr(root, name.lower(), cls())
+ Test = TestType('Test', (object,), {})
+
+ class PathInfo(Test):
+
+ def default(self, *args):
+ return cherrypy.request.path_info
+
+ class Params(Test):
+
+ def index(self, thing):
+ return repr(thing)
+
+ def ismap(self, x, y):
+ return 'Coordinates: %s, %s' % (x, y)
+
+ @cherrypy.config(**{'request.query_string_encoding': 'latin1'})
+ def default(self, *args, **kwargs):
+ return 'args: %s kwargs: %s' % (args, sorted(kwargs.items()))
+
+ @cherrypy.expose
+ class ParamErrorsCallable(object):
+
+ def __call__(self):
+ return 'data'
+
+ def handler_dec(f):
+ @wraps(f)
+ def wrapper(handler, *args, **kwargs):
+ return f(handler, *args, **kwargs)
+ return wrapper
+
+ class ParamErrors(Test):
+
+ @cherrypy.expose
+ def one_positional(self, param1):
+ return 'data'
+
+ @cherrypy.expose
+ def one_positional_args(self, param1, *args):
+ return 'data'
+
+ @cherrypy.expose
+ def one_positional_args_kwargs(self, param1, *args, **kwargs):
+ return 'data'
+
+ @cherrypy.expose
+ def one_positional_kwargs(self, param1, **kwargs):
+ return 'data'
+
+ @cherrypy.expose
+ def no_positional(self):
+ return 'data'
+
+ @cherrypy.expose
+ def no_positional_args(self, *args):
+ return 'data'
+
+ @cherrypy.expose
+ def no_positional_args_kwargs(self, *args, **kwargs):
+ return 'data'
+
+ @cherrypy.expose
+ def no_positional_kwargs(self, **kwargs):
+ return 'data'
+
+ callable_object = ParamErrorsCallable()
+
+ @cherrypy.expose
+ def raise_type_error(self, **kwargs):
+ raise TypeError('Client Error')
+
+ @cherrypy.expose
+ def raise_type_error_with_default_param(self, x, y=None):
+ return '%d' % 'a' # throw an exception
+
+ @cherrypy.expose
+ @handler_dec
+ def raise_type_error_decorated(self, *args, **kwargs):
+ raise TypeError('Client Error')
+
+ def callable_error_page(status, **kwargs):
+ return "Error %s - Well, I'm very sorry but you haven't paid!" % (
+ status)
+
+ @cherrypy.config(**{'tools.log_tracebacks.on': True})
+ class Error(Test):
+
+ def reason_phrase(self):
+ raise cherrypy.HTTPError("410 Gone fishin'")
+
+ @cherrypy.config(**{
+ 'error_page.404': os.path.join(localDir, 'static/index.html'),
+ 'error_page.401': callable_error_page,
+ })
+ def custom(self, err='404'):
+ raise cherrypy.HTTPError(
+ int(err), 'No, <b>really</b>, not found!')
+
+ @cherrypy.config(**{
+ 'error_page.default': callable_error_page,
+ })
+ def custom_default(self):
+ return 1 + 'a' # raise an unexpected error
+
+ @cherrypy.config(**{'error_page.404': 'nonexistent.html'})
+ def noexist(self):
+ raise cherrypy.HTTPError(404, 'No, <b>really</b>, not found!')
+
+ def page_method(self):
+ raise ValueError()
+
+ def page_yield(self):
+ yield 'howdy'
+ raise ValueError()
+
+ @cherrypy.config(**{'response.stream': True})
+ def page_streamed(self):
+ yield 'word up'
+ raise ValueError()
+ yield 'very oops'
+
+ @cherrypy.config(**{'request.show_tracebacks': False})
+ def cause_err_in_finalize(self):
+ # Since status must start with an int, this should error.
+ cherrypy.response.status = 'ZOO OK'
+
+ @cherrypy.config(**{'request.throw_errors': True})
+ def rethrow(self):
+ """Test that an error raised here will be thrown out to
+ the server.
+ """
+ raise ValueError()
+
+ class Expect(Test):
+
+ def expectation_failed(self):
+ expect = cherrypy.request.headers.elements('Expect')
+ if expect and expect[0].value != '100-continue':
+ raise cherrypy.HTTPError(400)
+ raise cherrypy.HTTPError(417, 'Expectation Failed')
+
+ class Headers(Test):
+
+ def default(self, headername):
+ """Spit back out the value for the requested header."""
+ return cherrypy.request.headers[headername]
+
+ def doubledheaders(self):
+ # From https://github.com/cherrypy/cherrypy/issues/165:
+ # "header field names should not be case sensitive sayes the
+ # rfc. if i set a headerfield in complete lowercase i end up
+ # with two header fields, one in lowercase, the other in
+ # mixed-case."
+
+ # Set the most common headers
+ hMap = cherrypy.response.headers
+ hMap['content-type'] = 'text/html'
+ hMap['content-length'] = 18
+ hMap['server'] = 'CherryPy headertest'
+ hMap['location'] = ('%s://%s:%s/headers/'
+ % (cherrypy.request.local.ip,
+ cherrypy.request.local.port,
+ cherrypy.request.scheme))
+
+ # Set a rare header for fun
+ hMap['Expires'] = 'Thu, 01 Dec 2194 16:00:00 GMT'
+
+ return 'double header test'
+
+ def ifmatch(self):
+ val = cherrypy.request.headers['If-Match']
+ assert isinstance(val, str)
+ cherrypy.response.headers['ETag'] = val
+ return val
+
+ class HeaderElements(Test):
+
+ def get_elements(self, headername):
+ e = cherrypy.request.headers.elements(headername)
+ return '\n'.join([str(x) for x in e])
+
+ class Method(Test):
+
+ def index(self):
+ m = cherrypy.request.method
+ if m in defined_http_methods or m == 'CONNECT':
+ return m
+
+ if m == 'LINK':
+ raise cherrypy.HTTPError(405)
+ else:
+ raise cherrypy.HTTPError(501)
+
+ def parameterized(self, data):
+ return data
+
+ def request_body(self):
+ # This should be a file object (temp file),
+ # which CP will just pipe back out if we tell it to.
+ return cherrypy.request.body
+
+ def reachable(self):
+ return 'success'
+
+ class Divorce(Test):
+
+ """HTTP Method handlers shouldn't collide with normal method names.
+ For example, a GET-handler shouldn't collide with a method named
+ 'get'.
+
+ If you build HTTP method dispatching into CherryPy, rewrite this
+ class to use your new dispatch mechanism and make sure that:
+ "GET /divorce HTTP/1.1" maps to divorce.index() and
+ "GET /divorce/get?ID=13 HTTP/1.1" maps to divorce.get()
+ """
+
+ documents = {}
+
+ @cherrypy.expose
+ def index(self):
+ yield '<h1>Choose your document</h1>\n'
+ yield '<ul>\n'
+ for id, contents in self.documents.items():
+ yield (
+ " <li><a href='/divorce/get?ID=%s'>%s</a>:"
+ ' %s</li>\n' % (id, id, contents))
+ yield '</ul>'
+
+ @cherrypy.expose
+ def get(self, ID):
+ return ('Divorce document %s: %s' %
+ (ID, self.documents.get(ID, 'empty')))
+
+ class ThreadLocal(Test):
+
+ def index(self):
+ existing = repr(getattr(cherrypy.request, 'asdf', None))
+ cherrypy.request.asdf = 'rassfrassin'
+ return existing
+
+ appconf = {
+ '/method': {
+ 'request.methods_with_bodies': ('POST', 'PUT', 'PROPFIND',
+ 'PATCH')
+ },
+ }
+ cherrypy.tree.mount(root, config=appconf)
+
+ def test_scheme(self):
+ self.getPage('/scheme')
+ self.assertBody(self.scheme)
+
+ def test_per_request_uuid4(self):
+ self.getPage('/request_uuid4')
+ first_uuid4, _, second_uuid4 = self.body.decode().partition(' ')
+ assert (
+ uuid.UUID(first_uuid4, version=4)
+ == uuid.UUID(second_uuid4, version=4)
+ )
+
+ self.getPage('/request_uuid4')
+ third_uuid4, _, _ = self.body.decode().partition(' ')
+ assert (
+ uuid.UUID(first_uuid4, version=4)
+ != uuid.UUID(third_uuid4, version=4)
+ )
+
+ def testRelativeURIPathInfo(self):
+ self.getPage('/pathinfo/foo/bar')
+ self.assertBody('/pathinfo/foo/bar')
+
+ def testAbsoluteURIPathInfo(self):
+ # http://cherrypy.org/ticket/1061
+ self.getPage('http://localhost/pathinfo/foo/bar')
+ self.assertBody('/pathinfo/foo/bar')
+
+ def testParams(self):
+ self.getPage('/params/?thing=a')
+ self.assertBody(repr(ntou('a')))
+
+ self.getPage('/params/?thing=a&thing=b&thing=c')
+ self.assertBody(repr([ntou('a'), ntou('b'), ntou('c')]))
+
+ # Test friendly error message when given params are not accepted.
+ cherrypy.config.update({'request.show_mismatched_params': True})
+ self.getPage('/params/?notathing=meeting')
+ self.assertInBody('Missing parameters: thing')
+ self.getPage('/params/?thing=meeting¬athing=meeting')
+ self.assertInBody('Unexpected query string parameters: notathing')
+
+ # Test ability to turn off friendly error messages
+ cherrypy.config.update({'request.show_mismatched_params': False})
+ self.getPage('/params/?notathing=meeting')
+ self.assertInBody('Not Found')
+ self.getPage('/params/?thing=meeting¬athing=meeting')
+ self.assertInBody('Not Found')
+
+ # Test "% HEX HEX"-encoded URL, param keys, and values
+ self.getPage('/params/%d4%20%e3/cheese?Gruy%E8re=Bulgn%e9ville')
+ self.assertBody('args: %s kwargs: %s' %
+ (('\xd4 \xe3', 'cheese'),
+ [('Gruy\xe8re', ntou('Bulgn\xe9ville'))]))
+
+ # Make sure that encoded = and & get parsed correctly
+ self.getPage(
+ '/params/code?url=http%3A//cherrypy.org/index%3Fa%3D1%26b%3D2')
+ self.assertBody('args: %s kwargs: %s' %
+ (('code',),
+ [('url', ntou('http://cherrypy.org/index?a=1&b=2'))]))
+
+ # Test coordinates sent by <img ismap>
+ self.getPage('/params/ismap?223,114')
+ self.assertBody('Coordinates: 223, 114')
+
+ # Test "name[key]" dict-like params
+ self.getPage('/params/dictlike?a[1]=1&a[2]=2&b=foo&b[bar]=baz')
+ self.assertBody('args: %s kwargs: %s' %
+ (('dictlike',),
+ [('a[1]', ntou('1')), ('a[2]', ntou('2')),
+ ('b', ntou('foo')), ('b[bar]', ntou('baz'))]))
+
+ def testParamErrors(self):
+
+ # test that all of the handlers work when given
+ # the correct parameters in order to ensure that the
+ # errors below aren't coming from some other source.
+ for uri in (
+ '/paramerrors/one_positional?param1=foo',
+ '/paramerrors/one_positional_args?param1=foo',
+ '/paramerrors/one_positional_args/foo',
+ '/paramerrors/one_positional_args/foo/bar/baz',
+ '/paramerrors/one_positional_args_kwargs?'
+ 'param1=foo¶m2=bar',
+ '/paramerrors/one_positional_args_kwargs/foo?'
+ 'param2=bar¶m3=baz',
+ '/paramerrors/one_positional_args_kwargs/foo/bar/baz?'
+ 'param2=bar¶m3=baz',
+ '/paramerrors/one_positional_kwargs?'
+ 'param1=foo¶m2=bar¶m3=baz',
+ '/paramerrors/one_positional_kwargs/foo?'
+ 'param4=foo¶m2=bar¶m3=baz',
+ '/paramerrors/no_positional',
+ '/paramerrors/no_positional_args/foo',
+ '/paramerrors/no_positional_args/foo/bar/baz',
+ '/paramerrors/no_positional_args_kwargs?param1=foo¶m2=bar',
+ '/paramerrors/no_positional_args_kwargs/foo?param2=bar',
+ '/paramerrors/no_positional_args_kwargs/foo/bar/baz?'
+ 'param2=bar¶m3=baz',
+ '/paramerrors/no_positional_kwargs?param1=foo¶m2=bar',
+ '/paramerrors/callable_object',
+ ):
+ self.getPage(uri)
+ self.assertStatus(200)
+
+ error_msgs = [
+ 'Missing parameters',
+ 'Nothing matches the given URI',
+ 'Multiple values for parameters',
+ 'Unexpected query string parameters',
+ 'Unexpected body parameters',
+ 'Invalid path in Request-URI',
+ 'Illegal #fragment in Request-URI',
+ ]
+
+ # uri should be tested for valid absolute path, the status must be 400.
+ for uri, error_idx in (
+ ('invalid/path/without/leading/slash', 5),
+ ('/valid/path#invalid=fragment', 6),
+ ):
+ self.getPage(uri)
+ self.assertStatus(400)
+ self.assertInBody(error_msgs[error_idx])
+
+ # query string parameters are part of the URI, so if they are wrong
+ # for a particular handler, the status MUST be a 404.
+ for uri, msg in (
+ ('/paramerrors/one_positional', error_msgs[0]),
+ ('/paramerrors/one_positional?foo=foo', error_msgs[0]),
+ ('/paramerrors/one_positional/foo/bar/baz', error_msgs[1]),
+ ('/paramerrors/one_positional/foo?param1=foo', error_msgs[2]),
+ ('/paramerrors/one_positional/foo?param1=foo¶m2=foo',
+ error_msgs[2]),
+ ('/paramerrors/one_positional_args/foo?param1=foo¶m2=foo',
+ error_msgs[2]),
+ ('/paramerrors/one_positional_args/foo/bar/baz?param2=foo',
+ error_msgs[3]),
+ ('/paramerrors/one_positional_args_kwargs/foo/bar/baz?'
+ 'param1=bar¶m3=baz',
+ error_msgs[2]),
+ ('/paramerrors/one_positional_kwargs/foo?'
+ 'param1=foo¶m2=bar¶m3=baz',
+ error_msgs[2]),
+ ('/paramerrors/no_positional/boo', error_msgs[1]),
+ ('/paramerrors/no_positional?param1=foo', error_msgs[3]),
+ ('/paramerrors/no_positional_args/boo?param1=foo', error_msgs[3]),
+ ('/paramerrors/no_positional_kwargs/boo?param1=foo',
+ error_msgs[1]),
+ ('/paramerrors/callable_object?param1=foo', error_msgs[3]),
+ ('/paramerrors/callable_object/boo', error_msgs[1]),
+ ):
+ for show_mismatched_params in (True, False):
+ cherrypy.config.update(
+ {'request.show_mismatched_params': show_mismatched_params})
+ self.getPage(uri)
+ self.assertStatus(404)
+ if show_mismatched_params:
+ self.assertInBody(msg)
+ else:
+ self.assertInBody('Not Found')
+
+ # if body parameters are wrong, a 400 must be returned.
+ for uri, body, msg in (
+ ('/paramerrors/one_positional/foo',
+ 'param1=foo', error_msgs[2]),
+ ('/paramerrors/one_positional/foo',
+ 'param1=foo¶m2=foo', error_msgs[2]),
+ ('/paramerrors/one_positional_args/foo',
+ 'param1=foo¶m2=foo', error_msgs[2]),
+ ('/paramerrors/one_positional_args/foo/bar/baz',
+ 'param2=foo', error_msgs[4]),
+ ('/paramerrors/one_positional_args_kwargs/foo/bar/baz',
+ 'param1=bar¶m3=baz', error_msgs[2]),
+ ('/paramerrors/one_positional_kwargs/foo',
+ 'param1=foo¶m2=bar¶m3=baz', error_msgs[2]),
+ ('/paramerrors/no_positional', 'param1=foo', error_msgs[4]),
+ ('/paramerrors/no_positional_args/boo',
+ 'param1=foo', error_msgs[4]),
+ ('/paramerrors/callable_object', 'param1=foo', error_msgs[4]),
+ ):
+ for show_mismatched_params in (True, False):
+ cherrypy.config.update(
+ {'request.show_mismatched_params': show_mismatched_params})
+ self.getPage(uri, method='POST', body=body)
+ self.assertStatus(400)
+ if show_mismatched_params:
+ self.assertInBody(msg)
+ else:
+ self.assertInBody('400 Bad')
+
+ # even if body parameters are wrong, if we get the uri wrong, then
+ # it's a 404
+ for uri, body, msg in (
+ ('/paramerrors/one_positional?param2=foo',
+ 'param1=foo', error_msgs[3]),
+ ('/paramerrors/one_positional/foo/bar',
+ 'param2=foo', error_msgs[1]),
+ ('/paramerrors/one_positional_args/foo/bar?param2=foo',
+ 'param3=foo', error_msgs[3]),
+ ('/paramerrors/one_positional_kwargs/foo/bar',
+ 'param2=bar¶m3=baz', error_msgs[1]),
+ ('/paramerrors/no_positional?param1=foo',
+ 'param2=foo', error_msgs[3]),
+ ('/paramerrors/no_positional_args/boo?param2=foo',
+ 'param1=foo', error_msgs[3]),
+ ('/paramerrors/callable_object?param2=bar',
+ 'param1=foo', error_msgs[3]),
+ ):
+ for show_mismatched_params in (True, False):
+ cherrypy.config.update(
+ {'request.show_mismatched_params': show_mismatched_params})
+ self.getPage(uri, method='POST', body=body)
+ self.assertStatus(404)
+ if show_mismatched_params:
+ self.assertInBody(msg)
+ else:
+ self.assertInBody('Not Found')
+
+ # In the case that a handler raises a TypeError we should
+ # let that type error through.
+ for uri in (
+ '/paramerrors/raise_type_error',
+ '/paramerrors/raise_type_error_with_default_param?x=0',
+ '/paramerrors/raise_type_error_with_default_param?x=0&y=0',
+ '/paramerrors/raise_type_error_decorated',
+ ):
+ self.getPage(uri, method='GET')
+ self.assertStatus(500)
+ self.assertTrue('Client Error', self.body)
+
+ def testErrorHandling(self):
+ self.getPage('/error/missing')
+ self.assertStatus(404)
+ self.assertErrorPage(404, "The path '/error/missing' was not found.")
+
+ ignore = helper.webtest.ignored_exceptions
+ ignore.append(ValueError)
+ try:
+ valerr = '\n raise ValueError()\nValueError'
+ self.getPage('/error/page_method')
+ self.assertErrorPage(500, pattern=valerr)
+
+ self.getPage('/error/page_yield')
+ self.assertErrorPage(500, pattern=valerr)
+
+ if (cherrypy.server.protocol_version == 'HTTP/1.0' or
+ getattr(cherrypy.server, 'using_apache', False)):
+ self.getPage('/error/page_streamed')
+ # Because this error is raised after the response body has
+ # started, the status should not change to an error status.
+ self.assertStatus(200)
+ self.assertBody('word up')
+ else:
+ # Under HTTP/1.1, the chunked transfer-coding is used.
+ # The HTTP client will choke when the output is incomplete.
+ self.assertRaises((ValueError, IncompleteRead), self.getPage,
+ '/error/page_streamed')
+
+ # No traceback should be present
+ self.getPage('/error/cause_err_in_finalize')
+ msg = "Illegal response status from server ('ZOO' is non-numeric)."
+ self.assertErrorPage(500, msg, None)
+ finally:
+ ignore.pop()
+
+ # Test HTTPError with a reason-phrase in the status arg.
+ self.getPage('/error/reason_phrase')
+ self.assertStatus("410 Gone fishin'")
+
+ # Test custom error page for a specific error.
+ self.getPage('/error/custom')
+ self.assertStatus(404)
+ self.assertBody('Hello, world\r\n' + (' ' * 499))
+
+ # Test custom error page for a specific error.
+ self.getPage('/error/custom?err=401')
+ self.assertStatus(401)
+ self.assertBody(
+ 'Error 401 Unauthorized - '
+ "Well, I'm very sorry but you haven't paid!")
+
+ # Test default custom error page.
+ self.getPage('/error/custom_default')
+ self.assertStatus(500)
+ self.assertBody(
+ 'Error 500 Internal Server Error - '
+ "Well, I'm very sorry but you haven't paid!".ljust(513))
+
+ # Test error in custom error page (ticket #305).
+ # Note that the message is escaped for HTML (ticket #310).
+ self.getPage('/error/noexist')
+ self.assertStatus(404)
+ if sys.version_info >= (3, 3):
+ exc_name = 'FileNotFoundError'
+ else:
+ exc_name = 'IOError'
+ msg = ('No, <b>really</b>, not found!<br />'
+ 'In addition, the custom error page failed:\n<br />'
+ '%s: [Errno 2] '
+ "No such file or directory: 'nonexistent.html'") % (exc_name,)
+ self.assertInBody(msg)
+
+ if getattr(cherrypy.server, 'using_apache', False):
+ pass
+ else:
+ # Test throw_errors (ticket #186).
+ self.getPage('/error/rethrow')
+ self.assertInBody('raise ValueError()')
+
+ def testExpect(self):
+ e = ('Expect', '100-continue')
+ self.getPage('/headerelements/get_elements?headername=Expect', [e])
+ self.assertBody('100-continue')
+
+ self.getPage('/expect/expectation_failed', [e])
+ self.assertStatus(417)
+
+ def testHeaderElements(self):
+ # Accept-* header elements should be sorted, with most preferred first.
+ h = [('Accept', 'audio/*; q=0.2, audio/basic')]
+ self.getPage('/headerelements/get_elements?headername=Accept', h)
+ self.assertStatus(200)
+ self.assertBody('audio/basic\n'
+ 'audio/*;q=0.2')
+
+ h = [
+ ('Accept',
+ 'text/plain; q=0.5, text/html, text/x-dvi; q=0.8, text/x-c')
+ ]
+ self.getPage('/headerelements/get_elements?headername=Accept', h)
+ self.assertStatus(200)
+ self.assertBody('text/x-c\n'
+ 'text/html\n'
+ 'text/x-dvi;q=0.8\n'
+ 'text/plain;q=0.5')
+
+ # Test that more specific media ranges get priority.
+ h = [('Accept', 'text/*, text/html, text/html;level=1, */*')]
+ self.getPage('/headerelements/get_elements?headername=Accept', h)
+ self.assertStatus(200)
+ self.assertBody('text/html;level=1\n'
+ 'text/html\n'
+ 'text/*\n'
+ '*/*')
+
+ # Test Accept-Charset
+ h = [('Accept-Charset', 'iso-8859-5, unicode-1-1;q=0.8')]
+ self.getPage(
+ '/headerelements/get_elements?headername=Accept-Charset', h)
+ self.assertStatus('200 OK')
+ self.assertBody('iso-8859-5\n'
+ 'unicode-1-1;q=0.8')
+
+ # Test Accept-Encoding
+ h = [('Accept-Encoding', 'gzip;q=1.0, identity; q=0.5, *;q=0')]
+ self.getPage(
+ '/headerelements/get_elements?headername=Accept-Encoding', h)
+ self.assertStatus('200 OK')
+ self.assertBody('gzip;q=1.0\n'
+ 'identity;q=0.5\n'
+ '*;q=0')
+
+ # Test Accept-Language
+ h = [('Accept-Language', 'da, en-gb;q=0.8, en;q=0.7')]
+ self.getPage(
+ '/headerelements/get_elements?headername=Accept-Language', h)
+ self.assertStatus('200 OK')
+ self.assertBody('da\n'
+ 'en-gb;q=0.8\n'
+ 'en;q=0.7')
+
+ # Test malformed header parsing. See
+ # https://github.com/cherrypy/cherrypy/issues/763.
+ self.getPage('/headerelements/get_elements?headername=Content-Type',
+ # Note the illegal trailing ";"
+ headers=[('Content-Type', 'text/html; charset=utf-8;')])
+ self.assertStatus(200)
+ self.assertBody('text/html;charset=utf-8')
+
+ def test_repeated_headers(self):
+ # Test that two request headers are collapsed into one.
+ # See https://github.com/cherrypy/cherrypy/issues/542.
+ self.getPage('/headers/Accept-Charset',
+ headers=[('Accept-Charset', 'iso-8859-5'),
+ ('Accept-Charset', 'unicode-1-1;q=0.8')])
+ self.assertBody('iso-8859-5, unicode-1-1;q=0.8')
+
+ # Tests that each header only appears once, regardless of case.
+ self.getPage('/headers/doubledheaders')
+ self.assertBody('double header test')
+ hnames = [name.title() for name, val in self.headers]
+ for key in ['Content-Length', 'Content-Type', 'Date',
+ 'Expires', 'Location', 'Server']:
+ self.assertEqual(hnames.count(key), 1, self.headers)
+
+ def test_encoded_headers(self):
+ # First, make sure the innards work like expected.
+ self.assertEqual(
+ httputil.decode_TEXT(ntou('=?utf-8?q?f=C3=BCr?=')), ntou('f\xfcr'))
+
+ if cherrypy.server.protocol_version == 'HTTP/1.1':
+ # Test RFC-2047-encoded request and response header values
+ u = ntou('\u212bngstr\xf6m', 'escape')
+ c = ntou('=E2=84=ABngstr=C3=B6m')
+ self.getPage('/headers/ifmatch',
+ [('If-Match', ntou('=?utf-8?q?%s?=') % c)])
+ # The body should be utf-8 encoded.
+ self.assertBody(b'\xe2\x84\xabngstr\xc3\xb6m')
+ # But the Etag header should be RFC-2047 encoded (binary)
+ self.assertHeader('ETag', ntou('=?utf-8?b?4oSrbmdzdHLDtm0=?='))
+
+ # Test a *LONG* RFC-2047-encoded request and response header value
+ self.getPage('/headers/ifmatch',
+ [('If-Match', ntou('=?utf-8?q?%s?=') % (c * 10))])
+ self.assertBody(b'\xe2\x84\xabngstr\xc3\xb6m' * 10)
+ # Note: this is different output for Python3, but it decodes fine.
+ etag = self.assertHeader(
+ 'ETag',
+ '=?utf-8?b?4oSrbmdzdHLDtm3ihKtuZ3N0csO2beKEq25nc3Ryw7Zt'
+ '4oSrbmdzdHLDtm3ihKtuZ3N0csO2beKEq25nc3Ryw7Zt'
+ '4oSrbmdzdHLDtm3ihKtuZ3N0csO2beKEq25nc3Ryw7Zt'
+ '4oSrbmdzdHLDtm0=?=')
+ self.assertEqual(httputil.decode_TEXT(etag), u * 10)
+
+ def test_header_presence(self):
+ # If we don't pass a Content-Type header, it should not be present
+ # in cherrypy.request.headers
+ self.getPage('/headers/Content-Type',
+ headers=[])
+ self.assertStatus(500)
+
+ # If Content-Type is present in the request, it should be present in
+ # cherrypy.request.headers
+ self.getPage('/headers/Content-Type',
+ headers=[('Content-type', 'application/json')])
+ self.assertBody('application/json')
+
+ def test_basic_HTTPMethods(self):
+ helper.webtest.methods_with_bodies = ('POST', 'PUT', 'PROPFIND',
+ 'PATCH')
+
+ # Test that all defined HTTP methods work.
+ for m in defined_http_methods:
+ self.getPage('/method/', method=m)
+
+ # HEAD requests should not return any body.
+ if m == 'HEAD':
+ self.assertBody('')
+ elif m == 'TRACE':
+ # Some HTTP servers (like modpy) have their own TRACE support
+ self.assertEqual(self.body[:5], b'TRACE')
+ else:
+ self.assertBody(m)
+
+ # test of PATCH requests
+ # Request a PATCH method with a form-urlencoded body
+ self.getPage('/method/parameterized', method='PATCH',
+ body='data=on+top+of+other+things')
+ self.assertBody('on top of other things')
+
+ # Request a PATCH method with a file body
+ b = 'one thing on top of another'
+ h = [('Content-Type', 'text/plain'),
+ ('Content-Length', str(len(b)))]
+ self.getPage('/method/request_body', headers=h, method='PATCH', body=b)
+ self.assertStatus(200)
+ self.assertBody(b)
+
+ # Request a PATCH method with a file body but no Content-Type.
+ # See https://github.com/cherrypy/cherrypy/issues/790.
+ b = b'one thing on top of another'
+ self.persistent = True
+ try:
+ conn = self.HTTP_CONN
+ conn.putrequest('PATCH', '/method/request_body', skip_host=True)
+ conn.putheader('Host', self.HOST)
+ conn.putheader('Content-Length', str(len(b)))
+ conn.endheaders()
+ conn.send(b)
+ response = conn.response_class(conn.sock, method='PATCH')
+ response.begin()
+ self.assertEqual(response.status, 200)
+ self.body = response.read()
+ self.assertBody(b)
+ finally:
+ self.persistent = False
+
+ # Request a PATCH method with no body whatsoever (not an empty one).
+ # See https://github.com/cherrypy/cherrypy/issues/650.
+ # Provide a C-T or webtest will provide one (and a C-L) for us.
+ h = [('Content-Type', 'text/plain')]
+ self.getPage('/method/reachable', headers=h, method='PATCH')
+ self.assertStatus(411)
+
+ # HTTP PUT tests
+ # Request a PUT method with a form-urlencoded body
+ self.getPage('/method/parameterized', method='PUT',
+ body='data=on+top+of+other+things')
+ self.assertBody('on top of other things')
+
+ # Request a PUT method with a file body
+ b = 'one thing on top of another'
+ h = [('Content-Type', 'text/plain'),
+ ('Content-Length', str(len(b)))]
+ self.getPage('/method/request_body', headers=h, method='PUT', body=b)
+ self.assertStatus(200)
+ self.assertBody(b)
+
+ # Request a PUT method with a file body but no Content-Type.
+ # See https://github.com/cherrypy/cherrypy/issues/790.
+ b = b'one thing on top of another'
+ self.persistent = True
+ try:
+ conn = self.HTTP_CONN
+ conn.putrequest('PUT', '/method/request_body', skip_host=True)
+ conn.putheader('Host', self.HOST)
+ conn.putheader('Content-Length', str(len(b)))
+ conn.endheaders()
+ conn.send(b)
+ response = conn.response_class(conn.sock, method='PUT')
+ response.begin()
+ self.assertEqual(response.status, 200)
+ self.body = response.read()
+ self.assertBody(b)
+ finally:
+ self.persistent = False
+
+ # Request a PUT method with no body whatsoever (not an empty one).
+ # See https://github.com/cherrypy/cherrypy/issues/650.
+ # Provide a C-T or webtest will provide one (and a C-L) for us.
+ h = [('Content-Type', 'text/plain')]
+ self.getPage('/method/reachable', headers=h, method='PUT')
+ self.assertStatus(411)
+
+ # Request a custom method with a request body
+ b = ('<?xml version="1.0" encoding="utf-8" ?>\n\n'
+ '<propfind xmlns="DAV:"><prop><getlastmodified/>'
+ '</prop></propfind>')
+ h = [('Content-Type', 'text/xml'),
+ ('Content-Length', str(len(b)))]
+ self.getPage('/method/request_body', headers=h,
+ method='PROPFIND', body=b)
+ self.assertStatus(200)
+ self.assertBody(b)
+
+ # Request a disallowed method
+ self.getPage('/method/', method='LINK')
+ self.assertStatus(405)
+
+ # Request an unknown method
+ self.getPage('/method/', method='SEARCH')
+ self.assertStatus(501)
+
+ # For method dispatchers: make sure that an HTTP method doesn't
+ # collide with a virtual path atom. If you build HTTP-method
+ # dispatching into the core, rewrite these handlers to use
+ # your dispatch idioms.
+ self.getPage('/divorce/get?ID=13')
+ self.assertBody('Divorce document 13: empty')
+ self.assertStatus(200)
+ self.getPage('/divorce/', method='GET')
+ self.assertBody('<h1>Choose your document</h1>\n<ul>\n</ul>')
+ self.assertStatus(200)
+
+ def test_CONNECT_method(self):
+ self.persistent = True
+ try:
+ conn = self.HTTP_CONN
+ conn.request('CONNECT', 'created.example.com:3128')
+ response = conn.response_class(conn.sock, method='CONNECT')
+ response.begin()
+ self.assertEqual(response.status, 204)
+ finally:
+ self.persistent = False
+
+ self.persistent = True
+ try:
+ conn = self.HTTP_CONN
+ conn.request('CONNECT', 'body.example.com:3128')
+ response = conn.response_class(conn.sock, method='CONNECT')
+ response.begin()
+ self.assertEqual(response.status, 200)
+ self.body = response.read()
+ self.assertBody(b'CONNECTed to /body.example.com:3128')
+ finally:
+ self.persistent = False
+
+ def test_CONNECT_method_invalid_authority(self):
+ for request_target in ['example.com', 'http://example.com:33',
+ '/path/', 'path/', '/?q=f', '#f']:
+ self.persistent = True
+ try:
+ conn = self.HTTP_CONN
+ conn.request('CONNECT', request_target)
+ response = conn.response_class(conn.sock, method='CONNECT')
+ response.begin()
+ self.assertEqual(response.status, 400)
+ self.body = response.read()
+ self.assertBody(b'Invalid path in Request-URI: request-target '
+ b'must match authority-form.')
+ finally:
+ self.persistent = False
+
+ def testEmptyThreadlocals(self):
+ results = []
+ for x in range(20):
+ self.getPage('/threadlocal/')
+ results.append(self.body)
+ self.assertEqual(results, [b'None'] * 20)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_routes.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_routes.py
new file mode 100644
index 0000000000000000000000000000000000000000..cc7147654a08a348525f18c3e284466e33289989
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_routes.py
@@ -0,0 +1,80 @@
+"""Test Routes dispatcher."""
+import os
+import importlib
+
+import pytest
+
+import cherrypy
+from cherrypy.test import helper
+
+curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
+
+
+class RoutesDispatchTest(helper.CPWebCase):
+ """Routes dispatcher test suite."""
+
+ @staticmethod
+ def setup_server():
+ """Set up cherrypy test instance."""
+ try:
+ importlib.import_module('routes')
+ except ImportError:
+ pytest.skip('Install routes to test RoutesDispatcher code')
+
+ class Dummy:
+
+ def index(self):
+ return 'I said good day!'
+
+ class City:
+
+ def __init__(self, name):
+ self.name = name
+ self.population = 10000
+
+ @cherrypy.config(**{
+ 'tools.response_headers.on': True,
+ 'tools.response_headers.headers': [
+ ('Content-Language', 'en-GB'),
+ ],
+ })
+ def index(self, **kwargs):
+ return 'Welcome to %s, pop. %s' % (self.name, self.population)
+
+ def update(self, **kwargs):
+ self.population = kwargs['pop']
+ return 'OK'
+
+ d = cherrypy.dispatch.RoutesDispatcher()
+ d.connect(action='index', name='hounslow', route='/hounslow',
+ controller=City('Hounslow'))
+ d.connect(
+ name='surbiton', route='/surbiton', controller=City('Surbiton'),
+ action='index', conditions=dict(method=['GET']))
+ d.mapper.connect('/surbiton', controller='surbiton',
+ action='update', conditions=dict(method=['POST']))
+ d.connect('main', ':action', controller=Dummy())
+
+ conf = {'/': {'request.dispatch': d}}
+ cherrypy.tree.mount(root=None, config=conf)
+
+ def test_Routes_Dispatch(self):
+ """Check that routes package based URI dispatching works correctly."""
+ self.getPage('/hounslow')
+ self.assertStatus('200 OK')
+ self.assertBody('Welcome to Hounslow, pop. 10000')
+
+ self.getPage('/foo')
+ self.assertStatus('404 Not Found')
+
+ self.getPage('/surbiton')
+ self.assertStatus('200 OK')
+ self.assertBody('Welcome to Surbiton, pop. 10000')
+
+ self.getPage('/surbiton', method='POST', body='pop=1327')
+ self.assertStatus('200 OK')
+ self.assertBody('OK')
+ self.getPage('/surbiton')
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Language', 'en-GB')
+ self.assertBody('Welcome to Surbiton, pop. 1327')
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_session.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_session.py
new file mode 100644
index 0000000000000000000000000000000000000000..2d869e4bd13c191bd3776b9c94a9564e4c37cca8
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_session.py
@@ -0,0 +1,547 @@
+import os
+import platform
+import threading
+import time
+from http.client import HTTPConnection
+
+from distutils.spawn import find_executable
+import pytest
+from path import Path
+from more_itertools import consume
+import portend
+
+import cherrypy
+from cherrypy._cpcompat import HTTPSConnection
+from cherrypy.lib import sessions
+from cherrypy.lib import reprconf
+from cherrypy.lib.httputil import response_codes
+from cherrypy.test import helper
+from cherrypy import _json as json
+
+localDir = Path(__file__).dirname()
+
+
+def http_methods_allowed(methods=['GET', 'HEAD']):
+ method = cherrypy.request.method.upper()
+ if method not in methods:
+ cherrypy.response.headers['Allow'] = ', '.join(methods)
+ raise cherrypy.HTTPError(405)
+
+
+cherrypy.tools.allow = cherrypy.Tool('on_start_resource', http_methods_allowed)
+
+
+def setup_server():
+
+ @cherrypy.config(**{
+ 'tools.sessions.on': True,
+ 'tools.sessions.storage_class': sessions.RamSession,
+ 'tools.sessions.storage_path': localDir,
+ 'tools.sessions.timeout': (1.0 / 60),
+ 'tools.sessions.clean_freq': (1.0 / 60),
+ })
+ class Root:
+
+ @cherrypy.expose
+ def clear(self):
+ cherrypy.session.cache.clear()
+
+ @cherrypy.expose
+ @cherrypy.tools.json_out()
+ def data(self):
+ cherrypy.session['aha'] = 'foo'
+ return cherrypy.session._data
+
+ @cherrypy.expose
+ def testGen(self):
+ counter = cherrypy.session.get('counter', 0) + 1
+ cherrypy.session['counter'] = counter
+ yield str(counter)
+
+ @cherrypy.expose
+ def testStr(self):
+ counter = cherrypy.session.get('counter', 0) + 1
+ cherrypy.session['counter'] = counter
+ return str(counter)
+
+ @cherrypy.expose
+ @cherrypy.config(**{'tools.sessions.on': False})
+ def set_session_cls(self, new_cls_name):
+ new_cls = reprconf.attributes(new_cls_name)
+ cfg = {'tools.sessions.storage_class': new_cls}
+ self.__class__._cp_config.update(cfg)
+ if hasattr(cherrypy, 'session'):
+ del cherrypy.session
+ if new_cls.clean_thread:
+ new_cls.clean_thread.stop()
+ new_cls.clean_thread.unsubscribe()
+ del new_cls.clean_thread
+
+ @cherrypy.expose
+ def index(self):
+ sess = cherrypy.session
+ c = sess.get('counter', 0) + 1
+ time.sleep(0.01)
+ sess['counter'] = c
+ return str(c)
+
+ @cherrypy.expose
+ def keyin(self, key):
+ return str(key in cherrypy.session)
+
+ @cherrypy.expose
+ def delete(self):
+ cherrypy.session.delete()
+ sessions.expire()
+ return 'done'
+
+ @cherrypy.expose
+ def delkey(self, key):
+ del cherrypy.session[key]
+ return 'OK'
+
+ @cherrypy.expose
+ def redir_target(self):
+ return self._cp_config['tools.sessions.storage_class'].__name__
+
+ @cherrypy.expose
+ def iredir(self):
+ raise cherrypy.InternalRedirect('/redir_target')
+
+ @cherrypy.expose
+ @cherrypy.config(**{
+ 'tools.allow.on': True,
+ 'tools.allow.methods': ['GET'],
+ })
+ def restricted(self):
+ return cherrypy.request.method
+
+ @cherrypy.expose
+ def regen(self):
+ cherrypy.tools.sessions.regenerate()
+ return 'logged in'
+
+ @cherrypy.expose
+ def length(self):
+ return str(len(cherrypy.session))
+
+ @cherrypy.expose
+ @cherrypy.config(**{
+ 'tools.sessions.path': '/session_cookie',
+ 'tools.sessions.name': 'temp',
+ 'tools.sessions.persistent': False,
+ })
+ def session_cookie(self):
+ # Must load() to start the clean thread.
+ cherrypy.session.load()
+ return cherrypy.session.id
+
+ cherrypy.tree.mount(Root())
+
+
+class SessionTest(helper.CPWebCase):
+ setup_server = staticmethod(setup_server)
+
+ @classmethod
+ def teardown_class(cls):
+ """Clean up sessions."""
+ super(cls, cls).teardown_class()
+ consume(
+ file.remove_p()
+ for file in localDir.listdir()
+ if file.basename().startswith(
+ sessions.FileSession.SESSION_PREFIX
+ )
+ )
+
+ def test_0_Session(self):
+ self.getPage('/set_session_cls/cherrypy.lib.sessions.RamSession')
+ self.getPage('/clear')
+
+ # Test that a normal request gets the same id in the cookies.
+ # Note: this wouldn't work if /data didn't load the session.
+ self.getPage('/data')
+ assert self.body == b'{"aha": "foo"}'
+ c = self.cookies[0]
+ self.getPage('/data', self.cookies)
+ self.cookies[0] == c
+
+ self.getPage('/testStr')
+ assert self.body == b'1'
+ cookie_parts = dict([p.strip().split('=')
+ for p in self.cookies[0][1].split(';')])
+ # Assert there is an 'expires' param
+ expected_cookie_keys = {'session_id', 'expires', 'Path', 'Max-Age'}
+ assert set(cookie_parts.keys()) == expected_cookie_keys
+ self.getPage('/testGen', self.cookies)
+ assert self.body == b'2'
+ self.getPage('/testStr', self.cookies)
+ assert self.body == b'3'
+ self.getPage('/data', self.cookies)
+ expected_data = {'counter': 3, 'aha': 'foo'}
+ assert json.decode(self.body.decode('utf-8')) == expected_data
+ self.getPage('/length', self.cookies)
+ assert self.body == b'2'
+ self.getPage('/delkey?key=counter', self.cookies)
+ assert self.status_code == 200
+
+ self.getPage('/set_session_cls/cherrypy.lib.sessions.FileSession')
+ self.getPage('/testStr')
+ assert self.body == b'1'
+ self.getPage('/testGen', self.cookies)
+ assert self.body == b'2'
+ self.getPage('/testStr', self.cookies)
+ assert self.body == b'3'
+ self.getPage('/delkey?key=counter', self.cookies)
+ assert self.status_code == 200
+
+ # Wait for the session.timeout (1 second)
+ time.sleep(2)
+ self.getPage('/')
+ assert self.body == b'1'
+ self.getPage('/length', self.cookies)
+ assert self.body == b'1'
+
+ # Test session __contains__
+ self.getPage('/keyin?key=counter', self.cookies)
+ assert self.body == b'True'
+ cookieset1 = self.cookies
+
+ # Make a new session and test __len__ again
+ self.getPage('/')
+ self.getPage('/length', self.cookies)
+ assert self.body == b'2'
+
+ # Test session delete
+ self.getPage('/delete', self.cookies)
+ assert self.body == b'done'
+ self.getPage('/delete', cookieset1)
+ assert self.body == b'done'
+
+ def f():
+ return [
+ x
+ for x in os.listdir(localDir)
+ if x.startswith('session-') and not x.endswith('.lock')
+ ]
+ assert f() == []
+
+ # Wait for the cleanup thread to delete remaining session files
+ self.getPage('/')
+ assert f() != []
+ time.sleep(2)
+ assert f() == []
+
+ def test_1_Ram_Concurrency(self):
+ self.getPage('/set_session_cls/cherrypy.lib.sessions.RamSession')
+ self._test_Concurrency()
+
+ def test_2_File_Concurrency(self):
+ self.getPage('/set_session_cls/cherrypy.lib.sessions.FileSession')
+ self._test_Concurrency()
+
+ def _test_Concurrency(self):
+ client_thread_count = 5
+ request_count = 30
+
+ # Get initial cookie
+ self.getPage('/')
+ assert self.body == b'1'
+ cookies = self.cookies
+
+ data_dict = {}
+ errors = []
+
+ def request(index):
+ if self.scheme == 'https':
+ c = HTTPSConnection('%s:%s' % (self.interface(), self.PORT))
+ else:
+ c = HTTPConnection('%s:%s' % (self.interface(), self.PORT))
+ for i in range(request_count):
+ c.putrequest('GET', '/')
+ for k, v in cookies:
+ c.putheader(k, v)
+ c.endheaders()
+ response = c.getresponse()
+ body = response.read()
+ if response.status != 200 or not body.isdigit():
+ errors.append((response.status, body))
+ else:
+ data_dict[index] = max(data_dict[index], int(body))
+ # Uncomment the following line to prove threads overlap.
+ # sys.stdout.write("%d " % index)
+
+ # Start <request_count> requests from each of
+ # <client_thread_count> concurrent clients
+ ts = []
+ for c in range(client_thread_count):
+ data_dict[c] = 0
+ t = threading.Thread(target=request, args=(c,))
+ ts.append(t)
+ t.start()
+
+ for t in ts:
+ t.join()
+
+ hitcount = max(data_dict.values())
+ expected = 1 + (client_thread_count * request_count)
+
+ for e in errors:
+ print(e)
+ assert len(errors) == 0
+ assert hitcount == expected
+
+ def test_3_Redirect(self):
+ # Start a new session
+ self.getPage('/testStr')
+ self.getPage('/iredir', self.cookies)
+ assert self.body == b'FileSession'
+
+ def test_4_File_deletion(self):
+ # Start a new session
+ self.getPage('/testStr')
+ # Delete the session file manually and retry.
+ id = self.cookies[0][1].split(';', 1)[0].split('=', 1)[1]
+ path = os.path.join(localDir, 'session-' + id)
+ os.unlink(path)
+ self.getPage('/testStr', self.cookies)
+
+ def test_5_Error_paths(self):
+ self.getPage('/unknown/page')
+ self.assertErrorPage(404, "The path '/unknown/page' was not found.")
+
+ # Note: this path is *not* the same as above. The above
+ # takes a normal route through the session code; this one
+ # skips the session code's before_handler and only calls
+ # before_finalize (save) and on_end (close). So the session
+ # code has to survive calling save/close without init.
+ self.getPage('/restricted', self.cookies, method='POST')
+ self.assertErrorPage(405, response_codes[405][1])
+
+ def test_6_regenerate(self):
+ self.getPage('/testStr')
+ # grab the cookie ID
+ id1 = self.cookies[0][1].split(';', 1)[0].split('=', 1)[1]
+ self.getPage('/regen')
+ assert self.body == b'logged in'
+ id2 = self.cookies[0][1].split(';', 1)[0].split('=', 1)[1]
+ assert id1 != id2
+
+ self.getPage('/testStr')
+ # grab the cookie ID
+ id1 = self.cookies[0][1].split(';', 1)[0].split('=', 1)[1]
+ self.getPage('/testStr',
+ headers=[
+ ('Cookie',
+ 'session_id=maliciousid; '
+ 'expires=Sat, 27 Oct 2017 04:18:28 GMT; Path=/;')])
+ id2 = self.cookies[0][1].split(';', 1)[0].split('=', 1)[1]
+ assert id1 != id2
+ assert id2 != 'maliciousid'
+
+ def test_7_session_cookies(self):
+ self.getPage('/set_session_cls/cherrypy.lib.sessions.RamSession')
+ self.getPage('/clear')
+ self.getPage('/session_cookie')
+ # grab the cookie ID
+ cookie_parts = dict([p.strip().split('=')
+ for p in self.cookies[0][1].split(';')])
+ # Assert there is no 'expires' param
+ assert set(cookie_parts.keys()) == {'temp', 'Path'}
+ id1 = cookie_parts['temp']
+ assert list(sessions.RamSession.cache) == [id1]
+
+ # Send another request in the same "browser session".
+ self.getPage('/session_cookie', self.cookies)
+ cookie_parts = dict([p.strip().split('=')
+ for p in self.cookies[0][1].split(';')])
+ # Assert there is no 'expires' param
+ assert set(cookie_parts.keys()) == {'temp', 'Path'}
+ assert self.body.decode('utf-8') == id1
+ assert list(sessions.RamSession.cache) == [id1]
+
+ # Simulate a browser close by just not sending the cookies
+ self.getPage('/session_cookie')
+ # grab the cookie ID
+ cookie_parts = dict([p.strip().split('=')
+ for p in self.cookies[0][1].split(';')])
+ # Assert there is no 'expires' param
+ assert set(cookie_parts.keys()) == {'temp', 'Path'}
+ # Assert a new id has been generated...
+ id2 = cookie_parts['temp']
+ assert id1 != id2
+ assert set(sessions.RamSession.cache.keys()) == {id1, id2}
+
+ # Wait for the session.timeout on both sessions
+ time.sleep(2.5)
+ cache = list(sessions.RamSession.cache)
+ if cache:
+ if cache == [id2]:
+ self.fail('The second session did not time out.')
+ else:
+ self.fail('Unknown session id in cache: %r', cache)
+
+ def test_8_Ram_Cleanup(self):
+ def lock():
+ s1 = sessions.RamSession()
+ s1.acquire_lock()
+ time.sleep(1)
+ s1.release_lock()
+
+ t = threading.Thread(target=lock)
+ t.start()
+ start = time.time()
+ while not sessions.RamSession.locks and time.time() - start < 5:
+ time.sleep(0.01)
+ assert len(sessions.RamSession.locks) == 1, 'Lock not acquired'
+ s2 = sessions.RamSession()
+ s2.clean_up()
+ msg = 'Clean up should not remove active lock'
+ assert len(sessions.RamSession.locks) == 1, msg
+ t.join()
+
+
+def is_memcached_present():
+ executable = find_executable('memcached')
+ return bool(executable)
+
+
+@pytest.fixture(scope='session')
+def memcached_server_present():
+ is_memcached_present() or pytest.skip('memcached not available')
+
+
+@pytest.fixture()
+def memcached_client_present():
+ pytest.importorskip('memcache')
+
+
+@pytest.fixture(scope='session')
+def memcached_instance(request, watcher_getter, memcached_server_present):
+ """
+ Start up an instance of memcached.
+ """
+
+ port = portend.find_available_local_port()
+
+ def is_occupied():
+ try:
+ portend.Checker().assert_free('localhost', port)
+ except Exception:
+ return True
+ return False
+
+ proc = watcher_getter(
+ name='memcached',
+ arguments=['-p', str(port)],
+ checker=is_occupied,
+ request=request,
+ )
+ return locals()
+
+
+@pytest.fixture
+def memcached_configured(
+ memcached_instance, monkeypatch,
+ memcached_client_present,
+):
+ server = 'localhost:{port}'.format_map(memcached_instance)
+ monkeypatch.setattr(
+ sessions.MemcachedSession,
+ 'servers',
+ [server],
+ )
+
+
+@pytest.mark.skipif(
+ platform.system() == 'Windows',
+ reason='pytest-services helper does not work under Windows',
+)
+@pytest.mark.usefixtures('memcached_configured')
+class MemcachedSessionTest(helper.CPWebCase):
+ setup_server = staticmethod(setup_server)
+
+ def test_0_Session(self):
+ self.getPage(
+ '/set_session_cls/cherrypy.lib.sessions.MemcachedSession'
+ )
+
+ self.getPage('/testStr')
+ assert self.body == b'1'
+ self.getPage('/testGen', self.cookies)
+ assert self.body == b'2'
+ self.getPage('/testStr', self.cookies)
+ assert self.body == b'3'
+ self.getPage('/length', self.cookies)
+ self.assertErrorPage(500)
+ assert b'NotImplementedError' in self.body
+ self.getPage('/delkey?key=counter', self.cookies)
+ assert self.status_code == 200
+
+ # Wait for the session.timeout (1 second)
+ time.sleep(1.25)
+ self.getPage('/')
+ assert self.body == b'1'
+
+ # Test session __contains__
+ self.getPage('/keyin?key=counter', self.cookies)
+ assert self.body == b'True'
+
+ # Test session delete
+ self.getPage('/delete', self.cookies)
+ assert self.body == b'done'
+
+ def test_1_Concurrency(self):
+ client_thread_count = 5
+ request_count = 30
+
+ # Get initial cookie
+ self.getPage('/')
+ assert self.body == b'1'
+ cookies = self.cookies
+
+ data_dict = {}
+
+ def request(index):
+ for i in range(request_count):
+ self.getPage('/', cookies)
+ # Uncomment the following line to prove threads overlap.
+ # sys.stdout.write("%d " % index)
+ if not self.body.isdigit():
+ self.fail(self.body)
+ data_dict[index] = int(self.body)
+
+ # Start <request_count> concurrent requests from
+ # each of <client_thread_count> clients
+ ts = []
+ for c in range(client_thread_count):
+ data_dict[c] = 0
+ t = threading.Thread(target=request, args=(c,))
+ ts.append(t)
+ t.start()
+
+ for t in ts:
+ t.join()
+
+ hitcount = max(data_dict.values())
+ expected = 1 + (client_thread_count * request_count)
+ assert hitcount == expected
+
+ def test_3_Redirect(self):
+ # Start a new session
+ self.getPage('/testStr')
+ self.getPage('/iredir', self.cookies)
+ assert self.body == b'MemcachedSession'
+
+ def test_5_Error_paths(self):
+ self.getPage('/unknown/page')
+ self.assertErrorPage(
+ 404, "The path '/unknown/page' was not found.")
+
+ # Note: this path is *not* the same as above. The above
+ # takes a normal route through the session code; this one
+ # skips the session code's before_handler and only calls
+ # before_finalize (save) and on_end (close). So the session
+ # code has to survive calling save/close without init.
+ self.getPage('/restricted', self.cookies, method='POST')
+ self.assertErrorPage(405, response_codes[405][1])
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_sessionauthenticate.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_sessionauthenticate.py
new file mode 100644
index 0000000000000000000000000000000000000000..63053fcb710671c2e7e0c31b2a949b86a249705a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_sessionauthenticate.py
@@ -0,0 +1,61 @@
+import cherrypy
+from cherrypy.test import helper
+
+
+class SessionAuthenticateTest(helper.CPWebCase):
+
+ @staticmethod
+ def setup_server():
+
+ def check(username, password):
+ # Dummy check_username_and_password function
+ if username != 'test' or password != 'password':
+ return 'Wrong login/password'
+
+ def augment_params():
+ # A simple tool to add some things to request.params
+ # This is to check to make sure that session_auth can handle
+ # request params (ticket #780)
+ cherrypy.request.params['test'] = 'test'
+
+ cherrypy.tools.augment_params = cherrypy.Tool(
+ 'before_handler', augment_params, None, priority=30)
+
+ class Test:
+
+ _cp_config = {
+ 'tools.sessions.on': True,
+ 'tools.session_auth.on': True,
+ 'tools.session_auth.check_username_and_password': check,
+ 'tools.augment_params.on': True,
+ }
+
+ @cherrypy.expose
+ def index(self, **kwargs):
+ return 'Hi %s, you are logged in' % cherrypy.request.login
+
+ cherrypy.tree.mount(Test())
+
+ def testSessionAuthenticate(self):
+ # request a page and check for login form
+ self.getPage('/')
+ self.assertInBody('<form method="post" action="do_login">')
+
+ # setup credentials
+ login_body = 'username=test&password=password&from_page=/'
+
+ # attempt a login
+ self.getPage('/do_login', method='POST', body=login_body)
+ self.assertStatus((302, 303))
+
+ # get the page now that we are logged in
+ self.getPage('/', self.cookies)
+ self.assertBody('Hi test, you are logged in')
+
+ # do a logout
+ self.getPage('/do_logout', self.cookies, method='POST')
+ self.assertStatus((302, 303))
+
+ # verify we are logged out
+ self.getPage('/', self.cookies)
+ self.assertInBody('<form method="post" action="do_login">')
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_states.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_states.py
new file mode 100644
index 0000000000000000000000000000000000000000..28dd651008811a36378cd614a1857c0c554da0b3
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_states.py
@@ -0,0 +1,471 @@
+import os
+import signal
+import time
+from http.client import BadStatusLine
+
+import pytest
+import portend
+
+import cherrypy
+import cherrypy.process.servers
+from cherrypy.test import helper
+
+
+engine = cherrypy.engine
+thisdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
+
+
+class Dependency:
+
+ def __init__(self, bus):
+ self.bus = bus
+ self.running = False
+ self.startcount = 0
+ self.gracecount = 0
+ self.threads = {}
+
+ def subscribe(self):
+ self.bus.subscribe('start', self.start)
+ self.bus.subscribe('stop', self.stop)
+ self.bus.subscribe('graceful', self.graceful)
+ self.bus.subscribe('start_thread', self.startthread)
+ self.bus.subscribe('stop_thread', self.stopthread)
+
+ def start(self):
+ self.running = True
+ self.startcount += 1
+
+ def stop(self):
+ self.running = False
+
+ def graceful(self):
+ self.gracecount += 1
+
+ def startthread(self, thread_id):
+ self.threads[thread_id] = None
+
+ def stopthread(self, thread_id):
+ del self.threads[thread_id]
+
+
+db_connection = Dependency(engine)
+
+
+def setup_server():
+ class Root:
+
+ @cherrypy.expose
+ def index(self):
+ return 'Hello World'
+
+ @cherrypy.expose
+ def ctrlc(self):
+ raise KeyboardInterrupt()
+
+ @cherrypy.expose
+ def graceful(self):
+ engine.graceful()
+ return 'app was (gracefully) restarted succesfully'
+
+ cherrypy.tree.mount(Root())
+ cherrypy.config.update({
+ 'environment': 'test_suite',
+ })
+
+ db_connection.subscribe()
+
+# ------------ Enough helpers. Time for real live test cases. ------------ #
+
+
+class ServerStateTests(helper.CPWebCase):
+ setup_server = staticmethod(setup_server)
+
+ def setUp(self):
+ cherrypy.server.socket_timeout = 0.1
+ self.do_gc_test = False
+
+ def test_0_NormalStateFlow(self):
+ engine.stop()
+ # Our db_connection should not be running
+ self.assertEqual(db_connection.running, False)
+ self.assertEqual(db_connection.startcount, 1)
+ self.assertEqual(len(db_connection.threads), 0)
+
+ # Test server start
+ engine.start()
+ self.assertEqual(engine.state, engine.states.STARTED)
+
+ host = cherrypy.server.socket_host
+ port = cherrypy.server.socket_port
+ portend.occupied(host, port, timeout=0.1)
+
+ # The db_connection should be running now
+ self.assertEqual(db_connection.running, True)
+ self.assertEqual(db_connection.startcount, 2)
+ self.assertEqual(len(db_connection.threads), 0)
+
+ self.getPage('/')
+ self.assertBody('Hello World')
+ self.assertEqual(len(db_connection.threads), 1)
+
+ # Test engine stop. This will also stop the HTTP server.
+ engine.stop()
+ self.assertEqual(engine.state, engine.states.STOPPED)
+
+ # Verify that our custom stop function was called
+ self.assertEqual(db_connection.running, False)
+ self.assertEqual(len(db_connection.threads), 0)
+
+ # Block the main thread now and verify that exit() works.
+ def exittest():
+ self.getPage('/')
+ self.assertBody('Hello World')
+ engine.exit()
+ cherrypy.server.start()
+ engine.start_with_callback(exittest)
+ engine.block()
+ self.assertEqual(engine.state, engine.states.EXITING)
+
+ def test_1_Restart(self):
+ cherrypy.server.start()
+ engine.start()
+
+ # The db_connection should be running now
+ self.assertEqual(db_connection.running, True)
+ grace = db_connection.gracecount
+
+ self.getPage('/')
+ self.assertBody('Hello World')
+ self.assertEqual(len(db_connection.threads), 1)
+
+ # Test server restart from this thread
+ engine.graceful()
+ self.assertEqual(engine.state, engine.states.STARTED)
+ self.getPage('/')
+ self.assertBody('Hello World')
+ self.assertEqual(db_connection.running, True)
+ self.assertEqual(db_connection.gracecount, grace + 1)
+ self.assertEqual(len(db_connection.threads), 1)
+
+ # Test server restart from inside a page handler
+ self.getPage('/graceful')
+ self.assertEqual(engine.state, engine.states.STARTED)
+ self.assertBody('app was (gracefully) restarted succesfully')
+ self.assertEqual(db_connection.running, True)
+ self.assertEqual(db_connection.gracecount, grace + 2)
+ # Since we are requesting synchronously, is only one thread used?
+ # Note that the "/graceful" request has been flushed.
+ self.assertEqual(len(db_connection.threads), 0)
+
+ engine.stop()
+ self.assertEqual(engine.state, engine.states.STOPPED)
+ self.assertEqual(db_connection.running, False)
+ self.assertEqual(len(db_connection.threads), 0)
+
+ def test_2_KeyboardInterrupt(self):
+ # Raise a keyboard interrupt in the HTTP server's main thread.
+ # We must start the server in this, the main thread
+ engine.start()
+ cherrypy.server.start()
+
+ self.persistent = True
+ try:
+ # Make the first request and assert there's no "Connection: close".
+ self.getPage('/')
+ self.assertStatus('200 OK')
+ self.assertBody('Hello World')
+ self.assertNoHeader('Connection')
+
+ cherrypy.server.httpserver.interrupt = KeyboardInterrupt
+ engine.block()
+
+ self.assertEqual(db_connection.running, False)
+ self.assertEqual(len(db_connection.threads), 0)
+ self.assertEqual(engine.state, engine.states.EXITING)
+ finally:
+ self.persistent = False
+
+ # Raise a keyboard interrupt in a page handler; on multithreaded
+ # servers, this should occur in one of the worker threads.
+ # This should raise a BadStatusLine error, since the worker
+ # thread will just die without writing a response.
+ engine.start()
+ cherrypy.server.start()
+ # From python3.5 a new exception is retuned when the connection
+ # ends abruptly:
+ # http.client.RemoteDisconnected
+ # RemoteDisconnected is a subclass of:
+ # (ConnectionResetError, http.client.BadStatusLine)
+ # and ConnectionResetError is an indirect subclass of:
+ # OSError
+ # From python 3.3 an up socket.error is an alias to OSError
+ # following PEP-3151, therefore http.client.RemoteDisconnected
+ # is considered a socket.error.
+ #
+ # raise_subcls specifies the classes that are not going
+ # to be considered as a socket.error for the retries.
+ # Given that RemoteDisconnected is part BadStatusLine
+ # we can use the same call for all py3 versions without
+ # sideffects. python < 3.5 will raise directly BadStatusLine
+ # which is not a subclass for socket.error/OSError.
+ try:
+ self.getPage('/ctrlc', raise_subcls=BadStatusLine)
+ except BadStatusLine:
+ pass
+ else:
+ print(self.body)
+ self.fail('AssertionError: BadStatusLine not raised')
+
+ engine.block()
+ self.assertEqual(db_connection.running, False)
+ self.assertEqual(len(db_connection.threads), 0)
+
+ @pytest.mark.xfail(
+ 'sys.platform == "Darwin" '
+ 'and sys.version_info > (3, 7) '
+ 'and os.environ["TRAVIS"]',
+ reason='https://github.com/cherrypy/cherrypy/issues/1693',
+ )
+ def test_4_Autoreload(self):
+ # If test_3 has not been executed, the server won't be stopped,
+ # so we'll have to do it.
+ if engine.state != engine.states.EXITING:
+ engine.exit()
+
+ # Start the demo script in a new process
+ p = helper.CPProcess(ssl=(self.scheme.lower() == 'https'))
+ p.write_conf(extra='test_case_name: "test_4_Autoreload"')
+ p.start(imports='cherrypy.test._test_states_demo')
+ try:
+ self.getPage('/start')
+ start = float(self.body)
+
+ # Give the autoreloader time to cache the file time.
+ time.sleep(2)
+
+ # Touch the file
+ os.utime(os.path.join(thisdir, '_test_states_demo.py'), None)
+
+ # Give the autoreloader time to re-exec the process
+ time.sleep(2)
+ host = cherrypy.server.socket_host
+ port = cherrypy.server.socket_port
+ portend.occupied(host, port, timeout=5)
+
+ self.getPage('/start')
+ if not (float(self.body) > start):
+ raise AssertionError('start time %s not greater than %s' %
+ (float(self.body), start))
+ finally:
+ # Shut down the spawned process
+ self.getPage('/exit')
+ p.join()
+
+ def test_5_Start_Error(self):
+ # If test_3 has not been executed, the server won't be stopped,
+ # so we'll have to do it.
+ if engine.state != engine.states.EXITING:
+ engine.exit()
+
+ # If a process errors during start, it should stop the engine
+ # and exit with a non-zero exit code.
+ p = helper.CPProcess(ssl=(self.scheme.lower() == 'https'),
+ wait=True)
+ p.write_conf(
+ extra="""starterror: True
+test_case_name: "test_5_Start_Error"
+"""
+ )
+ p.start(imports='cherrypy.test._test_states_demo')
+ if p.exit_code == 0:
+ self.fail('Process failed to return nonzero exit code.')
+
+
+class PluginTests(helper.CPWebCase):
+
+ def test_daemonize(self):
+ if os.name not in ['posix']:
+ return self.skip('skipped (not on posix) ')
+ self.HOST = '127.0.0.1'
+ self.PORT = 8081
+ # Spawn the process and wait, when this returns, the original process
+ # is finished. If it daemonized properly, we should still be able
+ # to access pages.
+ p = helper.CPProcess(ssl=(self.scheme.lower() == 'https'),
+ wait=True, daemonize=True,
+ socket_host='127.0.0.1',
+ socket_port=8081)
+ p.write_conf(
+ extra='test_case_name: "test_daemonize"')
+ p.start(imports='cherrypy.test._test_states_demo')
+ try:
+ # Just get the pid of the daemonization process.
+ self.getPage('/pid')
+ self.assertStatus(200)
+ page_pid = int(self.body)
+ self.assertEqual(page_pid, p.get_pid())
+ finally:
+ # Shut down the spawned process
+ self.getPage('/exit')
+ p.join()
+
+ # Wait until here to test the exit code because we want to ensure
+ # that we wait for the daemon to finish running before we fail.
+ if p.exit_code != 0:
+ self.fail('Daemonized parent process failed to exit cleanly.')
+
+
+class SignalHandlingTests(helper.CPWebCase):
+
+ def test_SIGHUP_tty(self):
+ # When not daemonized, SIGHUP should shut down the server.
+ try:
+ from signal import SIGHUP
+ except ImportError:
+ return self.skip('skipped (no SIGHUP) ')
+
+ # Spawn the process.
+ p = helper.CPProcess(ssl=(self.scheme.lower() == 'https'))
+ p.write_conf(
+ extra='test_case_name: "test_SIGHUP_tty"')
+ p.start(imports='cherrypy.test._test_states_demo')
+ # Send a SIGHUP
+ os.kill(p.get_pid(), SIGHUP)
+ # This might hang if things aren't working right, but meh.
+ p.join()
+
+ def test_SIGHUP_daemonized(self):
+ # When daemonized, SIGHUP should restart the server.
+ try:
+ from signal import SIGHUP
+ except ImportError:
+ return self.skip('skipped (no SIGHUP) ')
+
+ if os.name not in ['posix']:
+ return self.skip('skipped (not on posix) ')
+
+ # Spawn the process and wait, when this returns, the original process
+ # is finished. If it daemonized properly, we should still be able
+ # to access pages.
+ p = helper.CPProcess(ssl=(self.scheme.lower() == 'https'),
+ wait=True, daemonize=True)
+ p.write_conf(
+ extra='test_case_name: "test_SIGHUP_daemonized"')
+ p.start(imports='cherrypy.test._test_states_demo')
+
+ pid = p.get_pid()
+ try:
+ # Send a SIGHUP
+ os.kill(pid, SIGHUP)
+ # Give the server some time to restart
+ time.sleep(2)
+ self.getPage('/pid')
+ self.assertStatus(200)
+ new_pid = int(self.body)
+ self.assertNotEqual(new_pid, pid)
+ finally:
+ # Shut down the spawned process
+ self.getPage('/exit')
+ p.join()
+
+ def _require_signal_and_kill(self, signal_name):
+ if not hasattr(signal, signal_name):
+ self.skip('skipped (no %(signal_name)s)' % vars())
+
+ if not hasattr(os, 'kill'):
+ self.skip('skipped (no os.kill)')
+
+ def test_SIGTERM(self):
+ 'SIGTERM should shut down the server whether daemonized or not.'
+ self._require_signal_and_kill('SIGTERM')
+
+ # Spawn a normal, undaemonized process.
+ p = helper.CPProcess(ssl=(self.scheme.lower() == 'https'))
+ p.write_conf(
+ extra='test_case_name: "test_SIGTERM"')
+ p.start(imports='cherrypy.test._test_states_demo')
+ # Send a SIGTERM
+ os.kill(p.get_pid(), signal.SIGTERM)
+ # This might hang if things aren't working right, but meh.
+ p.join()
+
+ if os.name in ['posix']:
+ # Spawn a daemonized process and test again.
+ p = helper.CPProcess(ssl=(self.scheme.lower() == 'https'),
+ wait=True, daemonize=True)
+ p.write_conf(
+ extra='test_case_name: "test_SIGTERM_2"')
+ p.start(imports='cherrypy.test._test_states_demo')
+ # Send a SIGTERM
+ os.kill(p.get_pid(), signal.SIGTERM)
+ # This might hang if things aren't working right, but meh.
+ p.join()
+
+ def test_signal_handler_unsubscribe(self):
+ self._require_signal_and_kill('SIGTERM')
+
+ # Although Windows has `os.kill` and SIGTERM is defined, the
+ # platform does not implement signals and sending SIGTERM
+ # will result in a forced termination of the process.
+ # Therefore, this test is not suitable for Windows.
+ if os.name == 'nt':
+ self.skip('SIGTERM not available')
+
+ # Spawn a normal, undaemonized process.
+ p = helper.CPProcess(ssl=(self.scheme.lower() == 'https'))
+ p.write_conf(
+ extra="""unsubsig: True
+test_case_name: "test_signal_handler_unsubscribe"
+""")
+ p.start(imports='cherrypy.test._test_states_demo')
+ # Ask the process to quit
+ os.kill(p.get_pid(), signal.SIGTERM)
+ # This might hang if things aren't working right, but meh.
+ p.join()
+
+ # Assert the old handler ran.
+ log_lines = list(open(p.error_log, 'rb'))
+ assert any(
+ line.endswith(b'I am an old SIGTERM handler.\n')
+ for line in log_lines
+ )
+
+
+def test_safe_wait_INADDR_ANY(): # pylint: disable=invalid-name
+ """
+ Wait on INADDR_ANY should not raise IOError
+
+ In cases where the loopback interface does not exist, CherryPy cannot
+ effectively determine if a port binding to INADDR_ANY was effected.
+ In this situation, CherryPy should assume that it failed to detect
+ the binding (not that the binding failed) and only warn that it could
+ not verify it.
+ """
+ # At such a time that CherryPy can reliably determine one or more
+ # viable IP addresses of the host, this test may be removed.
+
+ # Simulate the behavior we observe when no loopback interface is
+ # present by: finding a port that's not occupied, then wait on it.
+
+ free_port = portend.find_available_local_port()
+
+ servers = cherrypy.process.servers
+
+ inaddr_any = '0.0.0.0'
+
+ # Wait on the free port that's unbound
+ with pytest.warns(
+ UserWarning,
+ match='Unable to verify that the server is bound on ',
+ ) as warnings:
+ # pylint: disable=protected-access
+ with servers._safe_wait(inaddr_any, free_port):
+ portend.occupied(inaddr_any, free_port, timeout=1)
+ assert len(warnings) == 1
+
+ # The wait should still raise an IO error if INADDR_ANY was
+ # not supplied.
+ with pytest.raises(IOError):
+ # pylint: disable=protected-access
+ with servers._safe_wait('127.0.0.1', free_port):
+ portend.occupied('127.0.0.1', free_port, timeout=1)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_static.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_static.py
new file mode 100644
index 0000000000000000000000000000000000000000..bfe2f40fe3867031ddedcf9740bdf4e90c1e1dad
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_static.py
@@ -0,0 +1,464 @@
+# -*- coding: utf-8 -*-
+import io
+import os
+import sys
+import re
+import platform
+import tempfile
+import urllib.parse
+import unittest.mock
+from http.client import HTTPConnection
+
+import pytest
+import py.path
+import path
+
+import cherrypy
+from cherrypy.lib import static
+from cherrypy._cpcompat import HTTPSConnection, ntou, tonative
+from cherrypy.test import helper
+
+
+@pytest.fixture
+def unicode_filesystem(tmpdir):
+ _check_unicode_filesystem(tmpdir)
+
+
+def _check_unicode_filesystem(tmpdir):
+ filename = tmpdir / ntou('☃', 'utf-8')
+ tmpl = 'File system encoding ({encoding}) cannot support unicode filenames'
+ msg = tmpl.format(encoding=sys.getfilesystemencoding())
+ try:
+ io.open(str(filename), 'w').close()
+ except UnicodeEncodeError:
+ pytest.skip(msg)
+
+
+def ensure_unicode_filesystem():
+ """
+ TODO: replace with simply pytest fixtures once webtest.TestCase
+ no longer implies unittest.
+ """
+ tmpdir = py.path.local(tempfile.mkdtemp())
+ try:
+ _check_unicode_filesystem(tmpdir)
+ finally:
+ tmpdir.remove()
+
+
+curdir = path.Path(__file__).dirname()
+has_space_filepath = curdir / 'static' / 'has space.html'
+bigfile_filepath = curdir / 'static' / 'bigfile.log'
+
+# The file size needs to be big enough such that half the size of it
+# won't be socket-buffered (or server-buffered) all in one go. See
+# test_file_stream.
+MB = 2 ** 20
+BIGFILE_SIZE = 32 * MB
+
+
+class StaticTest(helper.CPWebCase):
+ files_to_remove = []
+
+ @staticmethod
+ def setup_server():
+ if not os.path.exists(has_space_filepath):
+ with open(has_space_filepath, 'wb') as f:
+ f.write(b'Hello, world\r\n')
+ needs_bigfile = (
+ not os.path.exists(bigfile_filepath) or
+ os.path.getsize(bigfile_filepath) != BIGFILE_SIZE
+ )
+ if needs_bigfile:
+ with open(bigfile_filepath, 'wb') as f:
+ f.write(b'x' * BIGFILE_SIZE)
+
+ class Root:
+
+ @cherrypy.expose
+ @cherrypy.config(**{'response.stream': True})
+ def bigfile(self):
+ self.f = static.serve_file(bigfile_filepath)
+ return self.f
+
+ @cherrypy.expose
+ def tell(self):
+ if self.f.input.closed:
+ return ''
+ return repr(self.f.input.tell()).rstrip('L')
+
+ @cherrypy.expose
+ def fileobj(self):
+ f = open(os.path.join(curdir, 'style.css'), 'rb')
+ return static.serve_fileobj(f, content_type='text/css')
+
+ @cherrypy.expose
+ def bytesio(self):
+ f = io.BytesIO(b'Fee\nfie\nfo\nfum')
+ return static.serve_fileobj(f, content_type='text/plain')
+
+ @cherrypy.expose
+ def serve_file_utf8_filename(self):
+ return static.serve_file(
+ __file__,
+ disposition='attachment',
+ name='has_utf-8_character_☃.html')
+
+ @cherrypy.expose
+ def serve_fileobj_utf8_filename(self):
+ return static.serve_fileobj(
+ io.BytesIO('☃\nfie\nfo\nfum'.encode('utf-8')),
+ disposition='attachment',
+ name='has_utf-8_character_☃.html')
+
+ class Static:
+
+ @cherrypy.expose
+ def index(self):
+ return 'You want the Baron? You can have the Baron!'
+
+ @cherrypy.expose
+ def dynamic(self):
+ return 'This is a DYNAMIC page'
+
+ root = Root()
+ root.static = Static()
+
+ rootconf = {
+ '/static': {
+ 'tools.staticdir.on': True,
+ 'tools.staticdir.dir': 'static',
+ 'tools.staticdir.root': curdir,
+ },
+ '/static-long': {
+ 'tools.staticdir.on': True,
+ 'tools.staticdir.dir': r'\\?\%s' % curdir,
+ },
+ '/style.css': {
+ 'tools.staticfile.on': True,
+ 'tools.staticfile.filename': os.path.join(curdir, 'style.css'),
+ },
+ '/docroot': {
+ 'tools.staticdir.on': True,
+ 'tools.staticdir.root': curdir,
+ 'tools.staticdir.dir': 'static',
+ 'tools.staticdir.index': 'index.html',
+ },
+ '/error': {
+ 'tools.staticdir.on': True,
+ 'request.show_tracebacks': True,
+ },
+ '/404test': {
+ 'tools.staticdir.on': True,
+ 'tools.staticdir.root': curdir,
+ 'tools.staticdir.dir': 'static',
+ 'error_page.404': error_page_404,
+ }
+ }
+ rootApp = cherrypy.Application(root)
+ rootApp.merge(rootconf)
+
+ test_app_conf = {
+ '/test': {
+ 'tools.staticdir.index': 'index.html',
+ 'tools.staticdir.on': True,
+ 'tools.staticdir.root': curdir,
+ 'tools.staticdir.dir': 'static',
+ },
+ }
+ testApp = cherrypy.Application(Static())
+ testApp.merge(test_app_conf)
+
+ vhost = cherrypy._cpwsgi.VirtualHost(rootApp, {'virt.net': testApp})
+ cherrypy.tree.graft(vhost)
+
+ @classmethod
+ def teardown_class(cls):
+ super(cls, cls).teardown_class()
+ files_to_remove = has_space_filepath, bigfile_filepath
+ files_to_remove += tuple(cls.files_to_remove)
+ for file in files_to_remove:
+ file.remove_p()
+
+ def test_static(self):
+ self.getPage('/static/index.html')
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Type', 'text/html')
+ self.assertBody('Hello, world\r\n')
+
+ # Using a staticdir.root value in a subdir...
+ self.getPage('/docroot/index.html')
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Type', 'text/html')
+ self.assertBody('Hello, world\r\n')
+
+ # Check a filename with spaces in it
+ self.getPage('/static/has%20space.html')
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Type', 'text/html')
+ self.assertBody('Hello, world\r\n')
+
+ self.getPage('/style.css')
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Type', 'text/css')
+ # Note: The body should be exactly 'Dummy stylesheet\n', but
+ # unfortunately some tools such as WinZip sometimes turn \n
+ # into \r\n on Windows when extracting the CherryPy tarball so
+ # we just check the content
+ self.assertMatchesBody('^Dummy stylesheet')
+
+ # Check a filename with utf-8 characters in it
+ ascii_fn = 'has_utf-8_character_.html'
+ url_quote_fn = 'has_utf-8_character_%E2%98%83.html' # %E2%98%83 == ☃
+ expected_content_disposition = (
+ 'attachment; filename="{!s}"; filename*=UTF-8\'\'{!s}'.
+ format(ascii_fn, url_quote_fn)
+ )
+
+ self.getPage('/serve_file_utf8_filename')
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Disposition', expected_content_disposition)
+
+ self.getPage('/serve_fileobj_utf8_filename')
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Disposition', expected_content_disposition)
+
+ @pytest.mark.skipif(platform.system() != 'Windows', reason='Windows only')
+ def test_static_longpath(self):
+ """Test serving of a file in subdir of a Windows long-path
+ staticdir."""
+ self.getPage('/static-long/static/index.html')
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Type', 'text/html')
+ self.assertBody('Hello, world\r\n')
+
+ def test_fallthrough(self):
+ # Test that NotFound will then try dynamic handlers (see [878]).
+ self.getPage('/static/dynamic')
+ self.assertBody('This is a DYNAMIC page')
+
+ # Check a directory via fall-through to dynamic handler.
+ self.getPage('/static/')
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Type', 'text/html;charset=utf-8')
+ self.assertBody('You want the Baron? You can have the Baron!')
+
+ def test_index(self):
+ # Check a directory via "staticdir.index".
+ self.getPage('/docroot/')
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Type', 'text/html')
+ self.assertBody('Hello, world\r\n')
+ # The same page should be returned even if redirected.
+ self.getPage('/docroot')
+ self.assertStatus(301)
+ self.assertHeader('Location', '%s/docroot/' % self.base())
+ self.assertMatchesBody(
+ "This resource .* <a href=(['\"])%s/docroot/\\1>"
+ '%s/docroot/</a>.'
+ % (self.base(), self.base())
+ )
+
+ def test_config_errors(self):
+ # Check that we get an error if no .file or .dir
+ self.getPage('/error/thing.html')
+ self.assertErrorPage(500)
+ if sys.version_info >= (3, 3):
+ errmsg = (
+ r'TypeError: staticdir\(\) missing 2 '
+ 'required positional arguments'
+ )
+ else:
+ errmsg = (
+ r'TypeError: staticdir\(\) takes at least 2 '
+ r'(positional )?arguments \(0 given\)'
+ )
+ self.assertMatchesBody(errmsg.encode('ascii'))
+
+ def test_security(self):
+ # Test up-level security
+ self.getPage('/static/../../test/style.css')
+ self.assertStatus((400, 403))
+
+ def test_modif(self):
+ # Test modified-since on a reasonably-large file
+ self.getPage('/static/dirback.jpg')
+ self.assertStatus('200 OK')
+ lastmod = ''
+ for k, v in self.headers:
+ if k == 'Last-Modified':
+ lastmod = v
+ ims = ('If-Modified-Since', lastmod)
+ self.getPage('/static/dirback.jpg', headers=[ims])
+ self.assertStatus(304)
+ self.assertNoHeader('Content-Type')
+ self.assertNoHeader('Content-Length')
+ self.assertNoHeader('Content-Disposition')
+ self.assertBody('')
+
+ def test_755_vhost(self):
+ self.getPage('/test/', [('Host', 'virt.net')])
+ self.assertStatus(200)
+ self.getPage('/test', [('Host', 'virt.net')])
+ self.assertStatus(301)
+ self.assertHeader('Location', self.scheme + '://virt.net/test/')
+
+ def test_serve_fileobj(self):
+ self.getPage('/fileobj')
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Type', 'text/css;charset=utf-8')
+ self.assertMatchesBody('^Dummy stylesheet')
+
+ def test_serve_bytesio(self):
+ self.getPage('/bytesio')
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Type', 'text/plain;charset=utf-8')
+ self.assertHeader('Content-Length', 14)
+ self.assertMatchesBody('Fee\nfie\nfo\nfum')
+
+ @pytest.mark.xfail(reason='#1475')
+ def test_file_stream(self):
+ if cherrypy.server.protocol_version != 'HTTP/1.1':
+ return self.skip()
+
+ self.PROTOCOL = 'HTTP/1.1'
+
+ # Make an initial request
+ self.persistent = True
+ conn = self.HTTP_CONN
+ conn.putrequest('GET', '/bigfile', skip_host=True)
+ conn.putheader('Host', self.HOST)
+ conn.endheaders()
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ self.assertEqual(response.status, 200)
+
+ body = b''
+ remaining = BIGFILE_SIZE
+ while remaining > 0:
+ data = response.fp.read(65536)
+ if not data:
+ break
+ body += data
+ remaining -= len(data)
+
+ if self.scheme == 'https':
+ newconn = HTTPSConnection
+ else:
+ newconn = HTTPConnection
+ s, h, b = helper.webtest.openURL(
+ b'/tell', headers=[], host=self.HOST, port=self.PORT,
+ http_conn=newconn)
+ if not b:
+ # The file was closed on the server.
+ tell_position = BIGFILE_SIZE
+ else:
+ tell_position = int(b)
+
+ read_so_far = len(body)
+
+ # It is difficult for us to force the server to only read
+ # the bytes that we ask for - there are going to be buffers
+ # inbetween.
+ #
+ # CherryPy will attempt to write as much data as it can to
+ # the socket, and we don't have a way to determine what that
+ # size will be. So we make the following assumption - by
+ # the time we have read in the entire file on the server,
+ # we will have at least received half of it. If this is not
+ # the case, then this is an indicator that either:
+ # - machines that are running this test are using buffer
+ # sizes greater than half of BIGFILE_SIZE; or
+ # - streaming is broken.
+ #
+ # At the time of writing, we seem to have encountered
+ # buffer sizes bigger than 512K, so we've increased
+ # BIGFILE_SIZE to 4MB and in 2016 to 20MB and then 32MB.
+ # This test is going to keep failing according to the
+ # improvements in hardware and OS buffers.
+ if tell_position >= BIGFILE_SIZE:
+ if read_so_far < (BIGFILE_SIZE / 2):
+ self.fail(
+ 'The file should have advanced to position %r, but '
+ 'has already advanced to the end of the file. It '
+ 'may not be streamed as intended, or at the wrong '
+ 'chunk size (64k)' % read_so_far)
+ elif tell_position < read_so_far:
+ self.fail(
+ 'The file should have advanced to position %r, but has '
+ 'only advanced to position %r. It may not be streamed '
+ 'as intended, or at the wrong chunk size (64k)' %
+ (read_so_far, tell_position))
+
+ if body != b'x' * BIGFILE_SIZE:
+ self.fail("Body != 'x' * %d. Got %r instead (%d bytes)." %
+ (BIGFILE_SIZE, body[:50], len(body)))
+ conn.close()
+
+ def test_file_stream_deadlock(self):
+ if cherrypy.server.protocol_version != 'HTTP/1.1':
+ return self.skip()
+
+ self.PROTOCOL = 'HTTP/1.1'
+
+ # Make an initial request but abort early.
+ self.persistent = True
+ conn = self.HTTP_CONN
+ conn.putrequest('GET', '/bigfile', skip_host=True)
+ conn.putheader('Host', self.HOST)
+ conn.endheaders()
+ response = conn.response_class(conn.sock, method='GET')
+ response.begin()
+ self.assertEqual(response.status, 200)
+ body = response.fp.read(65536)
+ if body != b'x' * len(body):
+ self.fail("Body != 'x' * %d. Got %r instead (%d bytes)." %
+ (65536, body[:50], len(body)))
+ response.close()
+ conn.close()
+
+ # Make a second request, which should fetch the whole file.
+ self.persistent = False
+ self.getPage('/bigfile')
+ if self.body != b'x' * BIGFILE_SIZE:
+ self.fail("Body != 'x' * %d. Got %r instead (%d bytes)." %
+ (BIGFILE_SIZE, self.body[:50], len(body)))
+
+ def test_error_page_with_serve_file(self):
+ self.getPage('/404test/yunyeen')
+ self.assertStatus(404)
+ self.assertInBody("I couldn't find that thing")
+
+ @unittest.mock.patch(
+ 'http.client._contains_disallowed_url_pchar_re',
+ re.compile(r'[\n]'),
+ create=True,
+ )
+ def test_null_bytes(self):
+ self.getPage('/static/\x00')
+ self.assertStatus('404 Not Found')
+
+ @classmethod
+ def unicode_file(cls):
+ filename = ntou('Слава Україні.html', 'utf-8')
+ filepath = curdir / 'static' / filename
+ with filepath.open('w', encoding='utf-8')as strm:
+ strm.write(ntou('Героям Слава!', 'utf-8'))
+ cls.files_to_remove.append(filepath)
+
+ def test_unicode(self):
+ ensure_unicode_filesystem()
+ self.unicode_file()
+ url = ntou('/static/Слава Україні.html', 'utf-8')
+ # quote function requires str
+ url = tonative(url, 'utf-8')
+ url = urllib.parse.quote(url)
+ self.getPage(url)
+
+ expected = ntou('Героям Слава!', 'utf-8')
+ self.assertInBody(expected)
+
+
+def error_page_404(status, message, traceback, version):
+ path = os.path.join(curdir, 'static', '404.html')
+ return static.serve_file(path, content_type='text/html')
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_tools.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..40de2e5213f3551448c2931bf205bd3abce59f33
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_tools.py
@@ -0,0 +1,475 @@
+"""Test the various means of instantiating and invoking tools."""
+
+import gzip
+import io
+import sys
+import time
+import types
+import unittest
+import operator
+from http.client import IncompleteRead
+
+import cherrypy
+from cherrypy import tools
+from cherrypy._cpcompat import ntou
+from cherrypy.test import helper, _test_decorators
+
+
+*PY_VER_MINOR, _ = PY_VER_PATCH = sys.version_info[:3]
+# Refs:
+# bugs.python.org/issue39389
+# docs.python.org/3.7/whatsnew/changelog.html#python-3-7-7-release-candidate-1
+# docs.python.org/3.8/whatsnew/changelog.html#python-3-8-2-release-candidate-1
+HAS_GZIP_COMPRESSION_HEADER_FIXED = PY_VER_PATCH >= (3, 8, 2) or (
+ PY_VER_MINOR == (3, 7) and PY_VER_PATCH >= (3, 7, 7)
+)
+
+
+timeout = 0.2
+europoundUnicode = ntou('\x80\xa3')
+
+
+# Client-side code #
+
+
+class ToolTests(helper.CPWebCase):
+
+ @staticmethod
+ def setup_server():
+
+ # Put check_access in a custom toolbox with its own namespace
+ myauthtools = cherrypy._cptools.Toolbox('myauth')
+
+ def check_access(default=False):
+ if not getattr(cherrypy.request, 'userid', default):
+ raise cherrypy.HTTPError(401)
+ myauthtools.check_access = cherrypy.Tool(
+ 'before_request_body', check_access)
+
+ def numerify():
+ def number_it(body):
+ for chunk in body:
+ for k, v in cherrypy.request.numerify_map:
+ chunk = chunk.replace(k, v)
+ yield chunk
+ cherrypy.response.body = number_it(cherrypy.response.body)
+
+ class NumTool(cherrypy.Tool):
+
+ def _setup(self):
+ def makemap():
+ m = self._merged_args().get('map', {})
+ cherrypy.request.numerify_map = list(m.items())
+ cherrypy.request.hooks.attach('on_start_resource', makemap)
+
+ def critical():
+ cherrypy.request.error_response = cherrypy.HTTPError(
+ 502).set_response
+ critical.failsafe = True
+
+ cherrypy.request.hooks.attach('on_start_resource', critical)
+ cherrypy.request.hooks.attach(self._point, self.callable)
+
+ tools.numerify = NumTool('before_finalize', numerify)
+
+ # It's not mandatory to inherit from cherrypy.Tool.
+ class NadsatTool:
+
+ def __init__(self):
+ self.ended = {}
+ self._name = 'nadsat'
+
+ def nadsat(self):
+ def nadsat_it_up(body):
+ for chunk in body:
+ chunk = chunk.replace(b'good', b'horrorshow')
+ chunk = chunk.replace(b'piece', b'lomtick')
+ yield chunk
+ cherrypy.response.body = nadsat_it_up(cherrypy.response.body)
+ nadsat.priority = 0
+
+ def cleanup(self):
+ # This runs after the request has been completely written out.
+ cherrypy.response.body = [b'razdrez']
+ id = cherrypy.request.params.get('id')
+ if id:
+ self.ended[id] = True
+ cleanup.failsafe = True
+
+ def _setup(self):
+ cherrypy.request.hooks.attach('before_finalize', self.nadsat)
+ cherrypy.request.hooks.attach('on_end_request', self.cleanup)
+ tools.nadsat = NadsatTool()
+
+ def pipe_body():
+ cherrypy.request.process_request_body = False
+ clen = int(cherrypy.request.headers['Content-Length'])
+ cherrypy.request.body = cherrypy.request.rfile.read(clen)
+
+ # Assert that we can use a callable object instead of a function.
+ class Rotator(object):
+
+ def __call__(self, scale):
+ r = cherrypy.response
+ r.collapse_body()
+ r.body = [bytes([(x + scale) % 256 for x in r.body[0]])]
+ cherrypy.tools.rotator = cherrypy.Tool('before_finalize', Rotator())
+
+ def stream_handler(next_handler, *args, **kwargs):
+ actual = cherrypy.request.config.get('tools.streamer.arg')
+ assert actual == 'arg value'
+ cherrypy.response.output = o = io.BytesIO()
+ try:
+ next_handler(*args, **kwargs)
+ # Ignore the response and return our accumulated output
+ # instead.
+ return o.getvalue()
+ finally:
+ o.close()
+ cherrypy.tools.streamer = cherrypy._cptools.HandlerWrapperTool(
+ stream_handler)
+
+ class Root:
+
+ @cherrypy.expose
+ def index(self):
+ return 'Howdy earth!'
+
+ @cherrypy.expose
+ @cherrypy.config(**{
+ 'tools.streamer.on': True,
+ 'tools.streamer.arg': 'arg value',
+ })
+ def tarfile(self):
+ actual = cherrypy.request.config.get('tools.streamer.arg')
+ assert actual == 'arg value'
+ cherrypy.response.output.write(b'I am ')
+ cherrypy.response.output.write(b'a tarfile')
+
+ @cherrypy.expose
+ def euro(self):
+ hooks = list(cherrypy.request.hooks['before_finalize'])
+ hooks.sort()
+ cbnames = [x.callback.__name__ for x in hooks]
+ assert cbnames == ['gzip'], cbnames
+ priorities = [x.priority for x in hooks]
+ assert priorities == [80], priorities
+ yield ntou('Hello,')
+ yield ntou('world')
+ yield europoundUnicode
+
+ # Bare hooks
+ @cherrypy.expose
+ @cherrypy.config(**{'hooks.before_request_body': pipe_body})
+ def pipe(self):
+ return cherrypy.request.body
+
+ # Multiple decorators; include kwargs just for fun.
+ # Note that rotator must run before gzip.
+ @cherrypy.expose
+ def decorated_euro(self, *vpath):
+ yield ntou('Hello,')
+ yield ntou('world')
+ yield europoundUnicode
+ decorated_euro = tools.gzip(compress_level=6)(decorated_euro)
+ decorated_euro = tools.rotator(scale=3)(decorated_euro)
+
+ root = Root()
+
+ class TestType(type):
+ """Metaclass which automatically exposes all functions in each
+ subclass, and adds an instance of the subclass as an attribute
+ of root.
+ """
+ def __init__(cls, name, bases, dct):
+ type.__init__(cls, name, bases, dct)
+ for value in dct.values():
+ if isinstance(value, types.FunctionType):
+ cherrypy.expose(value)
+ setattr(root, name.lower(), cls())
+ Test = TestType('Test', (object,), {})
+
+ # METHOD ONE:
+ # Declare Tools in _cp_config
+ @cherrypy.config(**{'tools.nadsat.on': True})
+ class Demo(Test):
+
+ def index(self, id=None):
+ return 'A good piece of cherry pie'
+
+ def ended(self, id):
+ return repr(tools.nadsat.ended[id])
+
+ def err(self, id=None):
+ raise ValueError()
+
+ def errinstream(self, id=None):
+ yield 'nonconfidential'
+ raise ValueError()
+ yield 'confidential'
+
+ # METHOD TWO: decorator using Tool()
+ # We support Python 2.3, but the @-deco syntax would look like
+ # this:
+ # @tools.check_access()
+ def restricted(self):
+ return 'Welcome!'
+ restricted = myauthtools.check_access()(restricted)
+ userid = restricted
+
+ def err_in_onstart(self):
+ return 'success!'
+
+ @cherrypy.config(**{'response.stream': True})
+ def stream(self, id=None):
+ for x in range(100000000):
+ yield str(x)
+
+ conf = {
+ # METHOD THREE:
+ # Declare Tools in detached config
+ '/demo': {
+ 'tools.numerify.on': True,
+ 'tools.numerify.map': {b'pie': b'3.14159'},
+ },
+ '/demo/restricted': {
+ 'request.show_tracebacks': False,
+ },
+ '/demo/userid': {
+ 'request.show_tracebacks': False,
+ 'myauth.check_access.default': True,
+ },
+ '/demo/errinstream': {
+ 'response.stream': True,
+ },
+ '/demo/err_in_onstart': {
+ # Because this isn't a dict, on_start_resource will error.
+ 'tools.numerify.map': 'pie->3.14159'
+ },
+ # Combined tools
+ '/euro': {
+ 'tools.gzip.on': True,
+ 'tools.encode.on': True,
+ },
+ # Priority specified in config
+ '/decorated_euro/subpath': {
+ 'tools.gzip.priority': 10,
+ },
+ # Handler wrappers
+ '/tarfile': {'tools.streamer.on': True}
+ }
+ app = cherrypy.tree.mount(root, config=conf)
+ app.request_class.namespaces['myauth'] = myauthtools
+
+ root.tooldecs = _test_decorators.ToolExamples()
+
+ def testHookErrors(self):
+ self.getPage('/demo/?id=1')
+ # If body is "razdrez", then on_end_request is being called too early.
+ self.assertBody('A horrorshow lomtick of cherry 3.14159')
+ # If this fails, then on_end_request isn't being called at all.
+ time.sleep(0.1)
+ self.getPage('/demo/ended/1')
+ self.assertBody('True')
+
+ valerr = '\n raise ValueError()\nValueError'
+ self.getPage('/demo/err?id=3')
+ # If body is "razdrez", then on_end_request is being called too early.
+ self.assertErrorPage(502, pattern=valerr)
+ # If this fails, then on_end_request isn't being called at all.
+ time.sleep(0.1)
+ self.getPage('/demo/ended/3')
+ self.assertBody('True')
+
+ # If body is "razdrez", then on_end_request is being called too early.
+ if (cherrypy.server.protocol_version == 'HTTP/1.0' or
+ getattr(cherrypy.server, 'using_apache', False)):
+ self.getPage('/demo/errinstream?id=5')
+ # Because this error is raised after the response body has
+ # started, the status should not change to an error status.
+ self.assertStatus('200 OK')
+ self.assertBody('nonconfidential')
+ else:
+ # Because this error is raised after the response body has
+ # started, and because it's chunked output, an error is raised by
+ # the HTTP client when it encounters incomplete output.
+ self.assertRaises((ValueError, IncompleteRead), self.getPage,
+ '/demo/errinstream?id=5')
+ # If this fails, then on_end_request isn't being called at all.
+ time.sleep(0.1)
+ self.getPage('/demo/ended/5')
+ self.assertBody('True')
+
+ # Test the "__call__" technique (compile-time decorator).
+ self.getPage('/demo/restricted')
+ self.assertErrorPage(401)
+
+ # Test compile-time decorator with kwargs from config.
+ self.getPage('/demo/userid')
+ self.assertBody('Welcome!')
+
+ def testEndRequestOnDrop(self):
+ old_timeout = None
+ try:
+ httpserver = cherrypy.server.httpserver
+ old_timeout = httpserver.timeout
+ except (AttributeError, IndexError):
+ return self.skip()
+
+ try:
+ httpserver.timeout = timeout
+
+ # Test that on_end_request is called even if the client drops.
+ self.persistent = True
+ try:
+ conn = self.HTTP_CONN
+ conn.putrequest('GET', '/demo/stream?id=9', skip_host=True)
+ conn.putheader('Host', self.HOST)
+ conn.endheaders()
+ # Skip the rest of the request and close the conn. This will
+ # cause the server's active socket to error, which *should*
+ # result in the request being aborted, and request.close being
+ # called all the way up the stack (including WSGI middleware),
+ # eventually calling our on_end_request hook.
+ finally:
+ self.persistent = False
+ time.sleep(timeout * 2)
+ # Test that the on_end_request hook was called.
+ self.getPage('/demo/ended/9')
+ self.assertBody('True')
+ finally:
+ if old_timeout is not None:
+ httpserver.timeout = old_timeout
+
+ def testGuaranteedHooks(self):
+ # The 'critical' on_start_resource hook is 'failsafe' (guaranteed
+ # to run even if there are failures in other on_start methods).
+ # This is NOT true of the other hooks.
+ # Here, we have set up a failure in NumerifyTool.numerify_map,
+ # but our 'critical' hook should run and set the error to 502.
+ self.getPage('/demo/err_in_onstart')
+ self.assertErrorPage(502)
+ tmpl = "AttributeError: 'str' object has no attribute '{attr}'"
+ expected_msg = tmpl.format(attr='items')
+ self.assertInBody(expected_msg)
+
+ def testCombinedTools(self):
+ expectedResult = (ntou('Hello,world') +
+ europoundUnicode).encode('utf-8')
+ zbuf = io.BytesIO()
+ zfile = gzip.GzipFile(mode='wb', fileobj=zbuf, compresslevel=9)
+ zfile.write(expectedResult)
+ zfile.close()
+
+ self.getPage('/euro',
+ headers=[
+ ('Accept-Encoding', 'gzip'),
+ ('Accept-Charset', 'ISO-8859-1,utf-8;q=0.7,*;q=0.7')])
+ self.assertInBody(zbuf.getvalue()[:3])
+
+ if not HAS_GZIP_COMPRESSION_HEADER_FIXED:
+ # NOTE: CherryPy adopts a fix from the CPython bug 39389
+ # NOTE: introducing a variable compression XFL flag that
+ # NOTE: was hardcoded to "best compression" before. And so
+ # NOTE: we can only test it on CPython versions that also
+ # NOTE: implement this fix.
+ return
+ zbuf = io.BytesIO()
+ zfile = gzip.GzipFile(mode='wb', fileobj=zbuf, compresslevel=6)
+ zfile.write(expectedResult)
+ zfile.close()
+
+ self.getPage('/decorated_euro', headers=[('Accept-Encoding', 'gzip')])
+ self.assertInBody(zbuf.getvalue()[:3])
+
+ # This returns a different value because gzip's priority was
+ # lowered in conf, allowing the rotator to run after gzip.
+ # Of course, we don't want breakage in production apps,
+ # but it proves the priority was changed.
+ self.getPage('/decorated_euro/subpath',
+ headers=[('Accept-Encoding', 'gzip')])
+ self.assertInBody(bytes([(x + 3) % 256 for x in zbuf.getvalue()]))
+
+ def testBareHooks(self):
+ content = 'bit of a pain in me gulliver'
+ self.getPage('/pipe',
+ headers=[('Content-Length', str(len(content))),
+ ('Content-Type', 'text/plain')],
+ method='POST', body=content)
+ self.assertBody(content)
+
+ def testHandlerWrapperTool(self):
+ self.getPage('/tarfile')
+ self.assertBody('I am a tarfile')
+
+ def testToolWithConfig(self):
+ if not sys.version_info >= (2, 5):
+ return self.skip('skipped (Python 2.5+ only)')
+
+ self.getPage('/tooldecs/blah')
+ self.assertHeader('Content-Type', 'application/data')
+
+ def testWarnToolOn(self):
+ # get
+ try:
+ cherrypy.tools.numerify.on
+ except AttributeError:
+ pass
+ else:
+ raise AssertionError('Tool.on did not error as it should have.')
+
+ # set
+ try:
+ cherrypy.tools.numerify.on = True
+ except AttributeError:
+ pass
+ else:
+ raise AssertionError('Tool.on did not error as it should have.')
+
+ def testDecorator(self):
+ @cherrypy.tools.register('on_start_resource')
+ def example():
+ pass
+ self.assertTrue(isinstance(cherrypy.tools.example, cherrypy.Tool))
+ self.assertEqual(cherrypy.tools.example._point, 'on_start_resource')
+
+ @cherrypy.tools.register( # noqa: F811
+ 'before_finalize', name='renamed', priority=60,
+ )
+ def example(): # noqa: F811
+ pass
+ self.assertTrue(isinstance(cherrypy.tools.renamed, cherrypy.Tool))
+ self.assertEqual(cherrypy.tools.renamed._point, 'before_finalize')
+ self.assertEqual(cherrypy.tools.renamed._name, 'renamed')
+ self.assertEqual(cherrypy.tools.renamed._priority, 60)
+
+
+class SessionAuthTest(unittest.TestCase):
+
+ def test_login_screen_returns_bytes(self):
+ """
+ login_screen must return bytes even if unicode parameters are passed.
+ Issue 1132 revealed that login_screen would return unicode if the
+ username and password were unicode.
+ """
+ sa = cherrypy.lib.cptools.SessionAuth()
+ res = sa.login_screen(None, username=str('nobody'),
+ password=str('anypass'))
+ self.assertTrue(isinstance(res, bytes))
+
+
+class TestHooks:
+ def test_priorities(self):
+ """
+ Hooks should sort by priority order.
+ """
+ Hook = cherrypy._cprequest.Hook
+ hooks = [
+ Hook(None, priority=48),
+ Hook(None),
+ Hook(None, priority=49),
+ ]
+ hooks.sort()
+ by_priority = operator.attrgetter('priority')
+ priorities = list(map(by_priority, hooks))
+ assert priorities == [48, 49, 50]
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_tutorials.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_tutorials.py
new file mode 100644
index 0000000000000000000000000000000000000000..39ca4d6f2393c90a4ed318b9faa0d99bd219183a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_tutorials.py
@@ -0,0 +1,204 @@
+import sys
+import importlib
+
+import cherrypy
+from cherrypy.test import helper
+
+
+class TutorialTest(helper.CPWebCase):
+
+ @classmethod
+ def setup_server(cls):
+ """
+ Mount something so the engine starts.
+ """
+ class Dummy:
+ pass
+ cherrypy.tree.mount(Dummy())
+
+ @staticmethod
+ def load_module(name):
+ """
+ Import or reload tutorial module as needed.
+ """
+ target = 'cherrypy.tutorial.' + name
+ if target in sys.modules:
+ module = importlib.reload(sys.modules[target])
+ else:
+ module = importlib.import_module(target)
+ return module
+
+ @classmethod
+ def setup_tutorial(cls, name, root_name, config={}):
+ cherrypy.config.reset()
+ module = cls.load_module(name)
+ root = getattr(module, root_name)
+ conf = getattr(module, 'tutconf')
+ class_types = type,
+ if isinstance(root, class_types):
+ root = root()
+ cherrypy.tree.mount(root, config=conf)
+ cherrypy.config.update(config)
+
+ def test01HelloWorld(self):
+ self.setup_tutorial('tut01_helloworld', 'HelloWorld')
+ self.getPage('/')
+ self.assertBody('Hello world!')
+
+ def test02ExposeMethods(self):
+ self.setup_tutorial('tut02_expose_methods', 'HelloWorld')
+ self.getPage('/show_msg')
+ self.assertBody('Hello world!')
+
+ def test03GetAndPost(self):
+ self.setup_tutorial('tut03_get_and_post', 'WelcomePage')
+
+ # Try different GET queries
+ self.getPage('/greetUser?name=Bob')
+ self.assertBody("Hey Bob, what's up?")
+
+ self.getPage('/greetUser')
+ self.assertBody('Please enter your name <a href="./">here</a>.')
+
+ self.getPage('/greetUser?name=')
+ self.assertBody('No, really, enter your name <a href="./">here</a>.')
+
+ # Try the same with POST
+ self.getPage('/greetUser', method='POST', body='name=Bob')
+ self.assertBody("Hey Bob, what's up?")
+
+ self.getPage('/greetUser', method='POST', body='name=')
+ self.assertBody('No, really, enter your name <a href="./">here</a>.')
+
+ def test04ComplexSite(self):
+ self.setup_tutorial('tut04_complex_site', 'root')
+
+ msg = '''
+ <p>Here are some extra useful links:</p>
+
+ <ul>
+ <li><a href="http://del.icio.us">del.icio.us</a></li>
+ <li><a href="http://www.cherrypy.org">CherryPy</a></li>
+ </ul>
+
+ <p>[<a href="../">Return to links page</a>]</p>'''
+ self.getPage('/links/extra/')
+ self.assertBody(msg)
+
+ def test05DerivedObjects(self):
+ self.setup_tutorial('tut05_derived_objects', 'HomePage')
+ msg = '''
+ <html>
+ <head>
+ <title>Another Page</title>
+ <head>
+ <body>
+ <h2>Another Page</h2>
+
+ <p>
+ And this is the amazing second page!
+ </p>
+
+ </body>
+ </html>
+ '''
+ # the tutorial has some annoying spaces in otherwise blank lines
+ msg = msg.replace('</h2>\n\n', '</h2>\n \n')
+ msg = msg.replace('</p>\n\n', '</p>\n \n')
+ self.getPage('/another/')
+ self.assertBody(msg)
+
+ def test06DefaultMethod(self):
+ self.setup_tutorial('tut06_default_method', 'UsersPage')
+ self.getPage('/hendrik')
+ self.assertBody('Hendrik Mans, CherryPy co-developer & crazy German '
+ '(<a href="./">back</a>)')
+
+ def test07Sessions(self):
+ self.setup_tutorial('tut07_sessions', 'HitCounter')
+
+ self.getPage('/')
+ self.assertBody(
+ "\n During your current session, you've viewed this"
+ '\n page 1 times! Your life is a patio of fun!'
+ '\n ')
+
+ self.getPage('/', self.cookies)
+ self.assertBody(
+ "\n During your current session, you've viewed this"
+ '\n page 2 times! Your life is a patio of fun!'
+ '\n ')
+
+ def test08GeneratorsAndYield(self):
+ self.setup_tutorial('tut08_generators_and_yield', 'GeneratorDemo')
+ self.getPage('/')
+ self.assertBody('<html><body><h2>Generators rule!</h2>'
+ '<h3>List of users:</h3>'
+ 'Remi<br/>Carlos<br/>Hendrik<br/>Lorenzo Lamas<br/>'
+ '</body></html>')
+
+ def test09Files(self):
+ self.setup_tutorial('tut09_files', 'FileDemo')
+
+ # Test upload
+ filesize = 5
+ h = [('Content-type', 'multipart/form-data; boundary=x'),
+ ('Content-Length', str(105 + filesize))]
+ b = ('--x\n'
+ 'Content-Disposition: form-data; name="myFile"; '
+ 'filename="hello.txt"\r\n'
+ 'Content-Type: text/plain\r\n'
+ '\r\n')
+ b += 'a' * filesize + '\n' + '--x--\n'
+ self.getPage('/upload', h, 'POST', b)
+ self.assertBody('''<html>
+ <body>
+ myFile length: %d<br />
+ myFile filename: hello.txt<br />
+ myFile mime-type: text/plain
+ </body>
+ </html>''' % filesize)
+
+ # Test download
+ self.getPage('/download')
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Type', 'application/x-download')
+ self.assertHeader('Content-Disposition',
+ # Make sure the filename is quoted.
+ 'attachment; filename="pdf_file.pdf"')
+ self.assertEqual(len(self.body), 85698)
+
+ def test10HTTPErrors(self):
+ self.setup_tutorial('tut10_http_errors', 'HTTPErrorDemo')
+
+ @cherrypy.expose
+ def traceback_setting():
+ return repr(cherrypy.request.show_tracebacks)
+ cherrypy.tree.mount(traceback_setting, '/traceback_setting')
+
+ self.getPage('/')
+ self.assertInBody("""<a href="toggleTracebacks">""")
+ self.assertInBody("""<a href="/doesNotExist">""")
+ self.assertInBody("""<a href="/error?code=403">""")
+ self.assertInBody("""<a href="/error?code=500">""")
+ self.assertInBody("""<a href="/messageArg">""")
+
+ self.getPage('/traceback_setting')
+ setting = self.body
+ self.getPage('/toggleTracebacks')
+ self.assertStatus((302, 303))
+ self.getPage('/traceback_setting')
+ self.assertBody(str(not eval(setting)))
+
+ self.getPage('/error?code=500')
+ self.assertStatus(500)
+ self.assertInBody('The server encountered an unexpected condition '
+ 'which prevented it from fulfilling the request.')
+
+ self.getPage('/error?code=403')
+ self.assertStatus(403)
+ self.assertInBody("<h2>You can't do that!</h2>")
+
+ self.getPage('/messageArg')
+ self.assertStatus(500)
+ self.assertInBody("If you construct an HTTPError with a 'message'")
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_virtualhost.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_virtualhost.py
new file mode 100644
index 0000000000000000000000000000000000000000..de88f9272aa6b519392c6bc2d4e5243bac543c36
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_virtualhost.py
@@ -0,0 +1,113 @@
+import os
+
+import cherrypy
+from cherrypy.test import helper
+
+curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
+
+
+class VirtualHostTest(helper.CPWebCase):
+
+ @staticmethod
+ def setup_server():
+ class Root:
+
+ @cherrypy.expose
+ def index(self):
+ return 'Hello, world'
+
+ @cherrypy.expose
+ def dom4(self):
+ return 'Under construction'
+
+ @cherrypy.expose
+ def method(self, value):
+ return 'You sent %s' % value
+
+ class VHost:
+
+ def __init__(self, sitename):
+ self.sitename = sitename
+
+ @cherrypy.expose
+ def index(self):
+ return 'Welcome to %s' % self.sitename
+
+ @cherrypy.expose
+ def vmethod(self, value):
+ return 'You sent %s' % value
+
+ @cherrypy.expose
+ def url(self):
+ return cherrypy.url('nextpage')
+
+ # Test static as a handler (section must NOT include vhost prefix)
+ static = cherrypy.tools.staticdir.handler(
+ section='/static', dir=curdir)
+
+ root = Root()
+ root.mydom2 = VHost('Domain 2')
+ root.mydom3 = VHost('Domain 3')
+ hostmap = {'www.mydom2.com': '/mydom2',
+ 'www.mydom3.com': '/mydom3',
+ 'www.mydom4.com': '/dom4',
+ }
+ cherrypy.tree.mount(root, config={
+ '/': {
+ 'request.dispatch': cherrypy.dispatch.VirtualHost(**hostmap)
+ },
+ # Test static in config (section must include vhost prefix)
+ '/mydom2/static2': {
+ 'tools.staticdir.on': True,
+ 'tools.staticdir.root': curdir,
+ 'tools.staticdir.dir': 'static',
+ 'tools.staticdir.index': 'index.html',
+ },
+ })
+
+ def testVirtualHost(self):
+ self.getPage('/', [('Host', 'www.mydom1.com')])
+ self.assertBody('Hello, world')
+ self.getPage('/mydom2/', [('Host', 'www.mydom1.com')])
+ self.assertBody('Welcome to Domain 2')
+
+ self.getPage('/', [('Host', 'www.mydom2.com')])
+ self.assertBody('Welcome to Domain 2')
+ self.getPage('/', [('Host', 'www.mydom3.com')])
+ self.assertBody('Welcome to Domain 3')
+ self.getPage('/', [('Host', 'www.mydom4.com')])
+ self.assertBody('Under construction')
+
+ # Test GET, POST, and positional params
+ self.getPage('/method?value=root')
+ self.assertBody('You sent root')
+ self.getPage('/vmethod?value=dom2+GET', [('Host', 'www.mydom2.com')])
+ self.assertBody('You sent dom2 GET')
+ self.getPage('/vmethod', [('Host', 'www.mydom3.com')], method='POST',
+ body='value=dom3+POST')
+ self.assertBody('You sent dom3 POST')
+ self.getPage('/vmethod/pos', [('Host', 'www.mydom3.com')])
+ self.assertBody('You sent pos')
+
+ # Test that cherrypy.url uses the browser url, not the virtual url
+ self.getPage('/url', [('Host', 'www.mydom2.com')])
+ self.assertBody('%s://www.mydom2.com/nextpage' % self.scheme)
+
+ def test_VHost_plus_Static(self):
+ # Test static as a handler
+ self.getPage('/static/style.css', [('Host', 'www.mydom2.com')])
+ self.assertStatus('200 OK')
+ self.assertHeader('Content-Type', 'text/css;charset=utf-8')
+
+ # Test static in config
+ self.getPage('/static2/dirback.jpg', [('Host', 'www.mydom2.com')])
+ self.assertStatus('200 OK')
+ self.assertHeaderIn('Content-Type', ['image/jpeg', 'image/pjpeg'])
+
+ # Test static config with "index" arg
+ self.getPage('/static2/', [('Host', 'www.mydom2.com')])
+ self.assertStatus('200 OK')
+ self.assertBody('Hello, world\r\n')
+ # Since tools.trailing_slash is on by default, this should redirect
+ self.getPage('/static2', [('Host', 'www.mydom2.com')])
+ self.assertStatus(301)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_wsgi_ns.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_wsgi_ns.py
new file mode 100644
index 0000000000000000000000000000000000000000..3545724c54d340d414f310cd593f3ec10ca2c16c
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_wsgi_ns.py
@@ -0,0 +1,93 @@
+import cherrypy
+from cherrypy.test import helper
+
+
+class WSGI_Namespace_Test(helper.CPWebCase):
+
+ @staticmethod
+ def setup_server():
+
+ class WSGIResponse(object):
+
+ def __init__(self, appresults):
+ self.appresults = appresults
+ self.iter = iter(appresults)
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ return self.iter.next()
+
+ def __next__(self):
+ return next(self.iter)
+
+ def close(self):
+ if hasattr(self.appresults, 'close'):
+ self.appresults.close()
+
+ class ChangeCase(object):
+
+ def __init__(self, app, to=None):
+ self.app = app
+ self.to = to
+
+ def __call__(self, environ, start_response):
+ res = self.app(environ, start_response)
+
+ class CaseResults(WSGIResponse):
+
+ def next(this):
+ return getattr(this.iter.next(), self.to)()
+
+ def __next__(this):
+ return getattr(next(this.iter), self.to)()
+ return CaseResults(res)
+
+ class Replacer(object):
+
+ def __init__(self, app, map={}):
+ self.app = app
+ self.map = map
+
+ def __call__(self, environ, start_response):
+ res = self.app(environ, start_response)
+
+ class ReplaceResults(WSGIResponse):
+
+ def next(this):
+ line = this.iter.next()
+ for k, v in self.map.iteritems():
+ line = line.replace(k, v)
+ return line
+
+ def __next__(this):
+ line = next(this.iter)
+ for k, v in self.map.items():
+ line = line.replace(k, v)
+ return line
+ return ReplaceResults(res)
+
+ class Root(object):
+
+ @cherrypy.expose
+ def index(self):
+ return 'HellO WoRlD!'
+
+ root_conf = {'wsgi.pipeline': [('replace', Replacer)],
+ 'wsgi.replace.map': {b'L': b'X',
+ b'l': b'r'},
+ }
+
+ app = cherrypy.Application(Root())
+ app.wsgiapp.pipeline.append(('changecase', ChangeCase))
+ app.wsgiapp.config['changecase'] = {'to': 'upper'}
+ cherrypy.tree.mount(app, config={'/': root_conf})
+
+ def test_pipeline(self):
+ if not cherrypy.server.httpserver:
+ return self.skip()
+
+ self.getPage('/')
+ # If body is "HEXXO WORXD!", the middleware was applied out of order.
+ self.assertBody('HERRO WORRD!')
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_wsgi_unix_socket.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_wsgi_unix_socket.py
new file mode 100644
index 0000000000000000000000000000000000000000..df0ab5f8d7033666dc9499a9274f25915f7e3650
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_wsgi_unix_socket.py
@@ -0,0 +1,92 @@
+import os
+import socket
+import atexit
+import tempfile
+from http.client import HTTPConnection
+
+import pytest
+
+import cherrypy
+from cherrypy.test import helper
+
+
+def usocket_path():
+ fd, path = tempfile.mkstemp('cp_test.sock')
+ os.close(fd)
+ os.remove(path)
+ return path
+
+
+USOCKET_PATH = usocket_path()
+
+
+class USocketHTTPConnection(HTTPConnection):
+ """
+ HTTPConnection over a unix socket.
+ """
+
+ def __init__(self, path):
+ HTTPConnection.__init__(self, 'localhost')
+ self.path = path
+
+ def __call__(self, *args, **kwargs):
+ """
+ Catch-all method just to present itself as a constructor for the
+ HTTPConnection.
+ """
+ return self
+
+ def connect(self):
+ """
+ Override the connect method and assign a unix socket as a transport.
+ """
+ sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+ sock.connect(self.path)
+ self.sock = sock
+ atexit.register(lambda: os.remove(self.path))
+
+
+@pytest.mark.skipif("sys.platform == 'win32'")
+class WSGI_UnixSocket_Test(helper.CPWebCase):
+ """
+ Test basic behavior on a cherrypy wsgi server listening
+ on a unix socket.
+
+ It exercises the config option `server.socket_file`.
+ """
+ HTTP_CONN = USocketHTTPConnection(USOCKET_PATH)
+
+ @staticmethod
+ def setup_server():
+ class Root(object):
+
+ @cherrypy.expose
+ def index(self):
+ return 'Test OK'
+
+ @cherrypy.expose
+ def error(self):
+ raise Exception('Invalid page')
+
+ config = {
+ 'server.socket_file': USOCKET_PATH
+ }
+ cherrypy.config.update(config)
+ cherrypy.tree.mount(Root())
+
+ def tearDown(self):
+ cherrypy.config.update({'server.socket_file': None})
+
+ def test_simple_request(self):
+ self.getPage('/')
+ self.assertStatus('200 OK')
+ self.assertInBody('Test OK')
+
+ def test_not_found(self):
+ self.getPage('/invalid_path')
+ self.assertStatus('404 Not Found')
+
+ def test_internal_error(self):
+ self.getPage('/error')
+ self.assertStatus('500 Internal Server Error')
+ self.assertInBody('Invalid page')
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_wsgi_vhost.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_wsgi_vhost.py
new file mode 100644
index 0000000000000000000000000000000000000000..2b6e5ba90944ab91cd7ff3e52ee91fdd38b62d38
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_wsgi_vhost.py
@@ -0,0 +1,35 @@
+import cherrypy
+from cherrypy.test import helper
+
+
+class WSGI_VirtualHost_Test(helper.CPWebCase):
+
+ @staticmethod
+ def setup_server():
+
+ class ClassOfRoot(object):
+
+ def __init__(self, name):
+ self.name = name
+
+ @cherrypy.expose
+ def index(self):
+ return 'Welcome to the %s website!' % self.name
+
+ default = cherrypy.Application(None)
+
+ domains = {}
+ for year in range(1997, 2008):
+ app = cherrypy.Application(ClassOfRoot('Class of %s' % year))
+ domains['www.classof%s.example' % year] = app
+
+ cherrypy.tree.graft(cherrypy._cpwsgi.VirtualHost(default, domains))
+
+ def test_welcome(self):
+ if not cherrypy.server.using_wsgi:
+ return self.skip('skipped (not using WSGI)... ')
+
+ for year in range(1997, 2008):
+ self.getPage(
+ '/', headers=[('Host', 'www.classof%s.example' % year)])
+ self.assertBody('Welcome to the Class of %s website!' % year)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_wsgiapps.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_wsgiapps.py
new file mode 100644
index 0000000000000000000000000000000000000000..1b3bf28faeb68d7abf04290ebebed281edc20751
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_wsgiapps.py
@@ -0,0 +1,120 @@
+import sys
+
+import cherrypy
+from cherrypy._cpcompat import ntob
+from cherrypy.test import helper
+
+
+class WSGIGraftTests(helper.CPWebCase):
+
+ @staticmethod
+ def setup_server():
+
+ def test_app(environ, start_response):
+ status = '200 OK'
+ response_headers = [('Content-type', 'text/plain')]
+ start_response(status, response_headers)
+ output = ['Hello, world!\n',
+ 'This is a wsgi app running within CherryPy!\n\n']
+ keys = list(environ.keys())
+ keys.sort()
+ for k in keys:
+ output.append('%s: %s\n' % (k, environ[k]))
+ return [ntob(x, 'utf-8') for x in output]
+
+ def test_empty_string_app(environ, start_response):
+ status = '200 OK'
+ response_headers = [('Content-type', 'text/plain')]
+ start_response(status, response_headers)
+ return [
+ b'Hello', b'', b' ', b'', b'world',
+ ]
+
+ class WSGIResponse(object):
+
+ def __init__(self, appresults):
+ self.appresults = appresults
+ self.iter = iter(appresults)
+
+ def __iter__(self):
+ return self
+
+ if sys.version_info >= (3, 0):
+ def __next__(self):
+ return next(self.iter)
+ else:
+ def next(self):
+ return self.iter.next()
+
+ def close(self):
+ if hasattr(self.appresults, 'close'):
+ self.appresults.close()
+
+ class ReversingMiddleware(object):
+
+ def __init__(self, app):
+ self.app = app
+
+ def __call__(self, environ, start_response):
+ results = app(environ, start_response)
+
+ class Reverser(WSGIResponse):
+
+ if sys.version_info >= (3, 0):
+ def __next__(this):
+ line = list(next(this.iter))
+ line.reverse()
+ return bytes(line)
+ else:
+ def next(this):
+ line = list(this.iter.next())
+ line.reverse()
+ return ''.join(line)
+
+ return Reverser(results)
+
+ class Root:
+
+ @cherrypy.expose
+ def index(self):
+ return ntob("I'm a regular CherryPy page handler!")
+
+ cherrypy.tree.mount(Root())
+
+ cherrypy.tree.graft(test_app, '/hosted/app1')
+ cherrypy.tree.graft(test_empty_string_app, '/hosted/app3')
+
+ # Set script_name explicitly to None to signal CP that it should
+ # be pulled from the WSGI environ each time.
+ app = cherrypy.Application(Root(), script_name=None)
+ cherrypy.tree.graft(ReversingMiddleware(app), '/hosted/app2')
+
+ wsgi_output = '''Hello, world!
+This is a wsgi app running within CherryPy!'''
+
+ def test_01_standard_app(self):
+ self.getPage('/')
+ self.assertBody("I'm a regular CherryPy page handler!")
+
+ def test_04_pure_wsgi(self):
+ if not cherrypy.server.using_wsgi:
+ return self.skip('skipped (not using WSGI)... ')
+ self.getPage('/hosted/app1')
+ self.assertHeader('Content-Type', 'text/plain')
+ self.assertInBody(self.wsgi_output)
+
+ def test_05_wrapped_cp_app(self):
+ if not cherrypy.server.using_wsgi:
+ return self.skip('skipped (not using WSGI)... ')
+ self.getPage('/hosted/app2/')
+ body = list("I'm a regular CherryPy page handler!")
+ body.reverse()
+ body = ''.join(body)
+ self.assertInBody(body)
+
+ def test_06_empty_string_app(self):
+ if not cherrypy.server.using_wsgi:
+ return self.skip('skipped (not using WSGI)... ')
+ self.getPage('/hosted/app3')
+ self.assertHeader('Content-Type', 'text/plain')
+ self.assertInBody('Hello world')
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_xmlrpc.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_xmlrpc.py
new file mode 100644
index 0000000000000000000000000000000000000000..61fde8bb2d303ea72bb2dd9b21a2a0ccefb8184a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/test_xmlrpc.py
@@ -0,0 +1,149 @@
+import sys
+import socket
+
+from xmlrpc.client import (
+ DateTime, Fault,
+ ServerProxy, SafeTransport
+)
+
+import cherrypy
+from cherrypy import _cptools
+from cherrypy.test import helper
+
+HTTPSTransport = SafeTransport
+
+# Python 3.0's SafeTransport still mistakenly checks for socket.ssl
+if not hasattr(socket, 'ssl'):
+ socket.ssl = True
+
+
+def setup_server():
+
+ class Root:
+
+ @cherrypy.expose
+ def index(self):
+ return "I'm a standard index!"
+
+ class XmlRpc(_cptools.XMLRPCController):
+
+ @cherrypy.expose
+ def foo(self):
+ return 'Hello world!'
+
+ @cherrypy.expose
+ def return_single_item_list(self):
+ return [42]
+
+ @cherrypy.expose
+ def return_string(self):
+ return 'here is a string'
+
+ @cherrypy.expose
+ def return_tuple(self):
+ return ('here', 'is', 1, 'tuple')
+
+ @cherrypy.expose
+ def return_dict(self):
+ return dict(a=1, b=2, c=3)
+
+ @cherrypy.expose
+ def return_composite(self):
+ return dict(a=1, z=26), 'hi', ['welcome', 'friend']
+
+ @cherrypy.expose
+ def return_int(self):
+ return 42
+
+ @cherrypy.expose
+ def return_float(self):
+ return 3.14
+
+ @cherrypy.expose
+ def return_datetime(self):
+ return DateTime((2003, 10, 7, 8, 1, 0, 1, 280, -1))
+
+ @cherrypy.expose
+ def return_boolean(self):
+ return True
+
+ @cherrypy.expose
+ def test_argument_passing(self, num):
+ return num * 2
+
+ @cherrypy.expose
+ def test_returning_Fault(self):
+ return Fault(1, 'custom Fault response')
+
+ root = Root()
+ root.xmlrpc = XmlRpc()
+ cherrypy.tree.mount(root, config={'/': {
+ 'request.dispatch': cherrypy.dispatch.XMLRPCDispatcher(),
+ 'tools.xmlrpc.allow_none': 0,
+ }})
+
+
+class XmlRpcTest(helper.CPWebCase):
+ setup_server = staticmethod(setup_server)
+
+ def testXmlRpc(self):
+
+ scheme = self.scheme
+ if scheme == 'https':
+ url = 'https://%s:%s/xmlrpc/' % (self.interface(), self.PORT)
+ proxy = ServerProxy(url, transport=HTTPSTransport())
+ else:
+ url = 'http://%s:%s/xmlrpc/' % (self.interface(), self.PORT)
+ proxy = ServerProxy(url)
+
+ # begin the tests ...
+ self.getPage('/xmlrpc/foo')
+ self.assertBody('Hello world!')
+
+ self.assertEqual(proxy.return_single_item_list(), [42])
+ self.assertNotEqual(proxy.return_single_item_list(), 'one bazillion')
+ self.assertEqual(proxy.return_string(), 'here is a string')
+ self.assertEqual(proxy.return_tuple(),
+ list(('here', 'is', 1, 'tuple')))
+ self.assertEqual(proxy.return_dict(), {'a': 1, 'c': 3, 'b': 2})
+ self.assertEqual(proxy.return_composite(),
+ [{'a': 1, 'z': 26}, 'hi', ['welcome', 'friend']])
+ self.assertEqual(proxy.return_int(), 42)
+ self.assertEqual(proxy.return_float(), 3.14)
+ self.assertEqual(proxy.return_datetime(),
+ DateTime((2003, 10, 7, 8, 1, 0, 1, 280, -1)))
+ self.assertEqual(proxy.return_boolean(), True)
+ self.assertEqual(proxy.test_argument_passing(22), 22 * 2)
+
+ # Test an error in the page handler (should raise an xmlrpclib.Fault)
+ try:
+ proxy.test_argument_passing({})
+ except Exception:
+ x = sys.exc_info()[1]
+ self.assertEqual(x.__class__, Fault)
+ self.assertEqual(x.faultString, ('unsupported operand type(s) '
+ "for *: 'dict' and 'int'"))
+ else:
+ self.fail('Expected xmlrpclib.Fault')
+
+ # https://github.com/cherrypy/cherrypy/issues/533
+ # if a method is not found, an xmlrpclib.Fault should be raised
+ try:
+ proxy.non_method()
+ except Exception:
+ x = sys.exc_info()[1]
+ self.assertEqual(x.__class__, Fault)
+ self.assertEqual(x.faultString,
+ 'method "non_method" is not supported')
+ else:
+ self.fail('Expected xmlrpclib.Fault')
+
+ # Test returning a Fault from the page handler.
+ try:
+ proxy.test_returning_Fault()
+ except Exception:
+ x = sys.exc_info()[1]
+ self.assertEqual(x.__class__, Fault)
+ self.assertEqual(x.faultString, ('custom Fault response'))
+ else:
+ self.fail('Expected xmlrpclib.Fault')
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/test/webtest.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/webtest.py
new file mode 100644
index 0000000000000000000000000000000000000000..9fb6ce62b768b0e782294027d9a312349da910bf
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/test/webtest.py
@@ -0,0 +1,11 @@
+# for compatibility, expose cheroot webtest here
+import warnings
+
+from cheroot.test.webtest import ( # noqa
+ interface,
+ WebCase, cleanHeaders, shb, openURL,
+ ServerError, server_error,
+)
+
+
+warnings.warn('Use cheroot.test.webtest', DeprecationWarning)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/README.rst b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/README.rst
new file mode 100644
index 0000000000000000000000000000000000000000..c47e7d3259905dbc4cb7e572f61d662f97a331cc
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/README.rst
@@ -0,0 +1,16 @@
+CherryPy Tutorials
+------------------
+
+This is a series of tutorials explaining how to develop dynamic web
+applications using CherryPy. A couple of notes:
+
+
+- Each of these tutorials builds on the ones before it. If you're
+ new to CherryPy, we recommend you start with 01_helloworld.py and
+ work your way upwards. :)
+
+- In most of these tutorials, you will notice that all output is done
+ by returning normal Python strings, often using simple Python
+ variable substitution. In most real-world applications, you will
+ probably want to use a separate template package (like Cheetah,
+ CherryTemplate or XML/XSL).
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__init__.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..08c142c5f5755781f8790b7fbbf4274bd93b3702
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__init__.py
@@ -0,0 +1,3 @@
+
+# This is used in test_config to test unrepr of "from A import B"
+thing2 = object()
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..155fb2809b8082a206e47602f9121a97dd5e7c74
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut01_helloworld.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut01_helloworld.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4a7c2fa903bd3ac52a3afe3b7818138dc88923cc
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut01_helloworld.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut02_expose_methods.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut02_expose_methods.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7a25b3243966c69f143f5d6da140078b7af0fa96
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut02_expose_methods.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut03_get_and_post.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut03_get_and_post.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fa3b79e51b66b7bc3733be97116296dc638779fc
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut03_get_and_post.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut04_complex_site.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut04_complex_site.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..31a7db1aa57b677136f28bd70d946883a62448d0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut04_complex_site.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut05_derived_objects.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut05_derived_objects.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c42c7cf58c3427c467af827321f93cf26816d79a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut05_derived_objects.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut06_default_method.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut06_default_method.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bdc83574596bfb1993690c17d503079c63a9ced9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut06_default_method.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut07_sessions.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut07_sessions.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bccbf5d9eb13a4e29da073e2de230504035ba641
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut07_sessions.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut08_generators_and_yield.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut08_generators_and_yield.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..21e22607d48f4b5f5976070332afeeb8d195734f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut08_generators_and_yield.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut09_files.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut09_files.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..70c774efa097ee956c65d37ae532f76863bb112e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut09_files.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut10_http_errors.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut10_http_errors.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..852ee2c84dd7b9b39e31dbc48d462d0332e9725e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/__pycache__/tut10_http_errors.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/custom_error.html b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/custom_error.html
new file mode 100644
index 0000000000000000000000000000000000000000..d0f30c8abe44a4bf00b214d8ebf37df3862be1cd
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/custom_error.html
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html>
+<head>
+ <title>403 Unauthorized</title>
+</head>
+ <body>
+ <h2>You can't do that!</h2>
+ <p>%(message)s</p>
+ <p>This is a custom error page that is read from a file.<p>
+ <pre>%(traceback)s</pre>
+ </body>
+</html>
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/pdf_file.pdf b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/pdf_file.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..38b4f15eabdd65d4a674cb32034361245aa7b97e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/pdf_file.pdf differ
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut01_helloworld.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut01_helloworld.py
new file mode 100644
index 0000000000000000000000000000000000000000..e86793c8d6514698f0fe822e88e617cc80f66e96
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut01_helloworld.py
@@ -0,0 +1,34 @@
+"""
+Tutorial - Hello World
+
+The most basic (working) CherryPy application possible.
+"""
+
+import os.path
+
+# Import CherryPy global namespace
+import cherrypy
+
+
+class HelloWorld:
+
+ """ Sample request handler class. """
+
+ # Expose the index method through the web. CherryPy will never
+ # publish methods that don't have the exposed attribute set to True.
+ @cherrypy.expose
+ def index(self):
+ # CherryPy will call this method for the root URI ("/") and send
+ # its return value to the client. Because this is tutorial
+ # lesson number 01, we'll just send something really simple.
+ # How about...
+ return 'Hello world!'
+
+
+tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
+
+if __name__ == '__main__':
+ # CherryPy always starts with app.root when trying to map request URIs
+ # to objects, so we need to mount a request handler root. A request
+ # to '/' will be mapped to HelloWorld().index().
+ cherrypy.quickstart(HelloWorld(), config=tutconf)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut02_expose_methods.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut02_expose_methods.py
new file mode 100644
index 0000000000000000000000000000000000000000..8afbf7d8b054e6466f8f2adc145f3b191d893910
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut02_expose_methods.py
@@ -0,0 +1,32 @@
+"""
+Tutorial - Multiple methods
+
+This tutorial shows you how to link to other methods of your request
+handler.
+"""
+
+import os.path
+
+import cherrypy
+
+
+class HelloWorld:
+
+ @cherrypy.expose
+ def index(self):
+ # Let's link to another method here.
+ return 'We have an <a href="show_msg">important message</a> for you!'
+
+ @cherrypy.expose
+ def show_msg(self):
+ # Here's the important message!
+ return 'Hello world!'
+
+
+tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
+
+if __name__ == '__main__':
+ # CherryPy always starts with app.root when trying to map request URIs
+ # to objects, so we need to mount a request handler root. A request
+ # to '/' will be mapped to HelloWorld().index().
+ cherrypy.quickstart(HelloWorld(), config=tutconf)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut03_get_and_post.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut03_get_and_post.py
new file mode 100644
index 0000000000000000000000000000000000000000..0b3d46131f944a6b2f5d4ed543d05f2aba4c4090
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut03_get_and_post.py
@@ -0,0 +1,51 @@
+"""
+Tutorial - Passing variables
+
+This tutorial shows you how to pass GET/POST variables to methods.
+"""
+
+import os.path
+
+import cherrypy
+
+
+class WelcomePage:
+
+ @cherrypy.expose
+ def index(self):
+ # Ask for the user's name.
+ return '''
+ <form action="greetUser" method="GET">
+ What is your name?
+ <input type="text" name="name" />
+ <input type="submit" />
+ </form>'''
+
+ @cherrypy.expose
+ def greetUser(self, name=None):
+ # CherryPy passes all GET and POST variables as method parameters.
+ # It doesn't make a difference where the variables come from, how
+ # large their contents are, and so on.
+ #
+ # You can define default parameter values as usual. In this
+ # example, the "name" parameter defaults to None so we can check
+ # if a name was actually specified.
+
+ if name:
+ # Greet the user!
+ return "Hey %s, what's up?" % name
+ else:
+ if name is None:
+ # No name was specified
+ return 'Please enter your name <a href="./">here</a>.'
+ else:
+ return 'No, really, enter your name <a href="./">here</a>.'
+
+
+tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
+
+if __name__ == '__main__':
+ # CherryPy always starts with app.root when trying to map request URIs
+ # to objects, so we need to mount a request handler root. A request
+ # to '/' will be mapped to HelloWorld().index().
+ cherrypy.quickstart(WelcomePage(), config=tutconf)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut04_complex_site.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut04_complex_site.py
new file mode 100644
index 0000000000000000000000000000000000000000..3caa1775d4d927db1710b63af08344c2f4290e15
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut04_complex_site.py
@@ -0,0 +1,103 @@
+"""
+Tutorial - Multiple objects
+
+This tutorial shows you how to create a site structure through multiple
+possibly nested request handler objects.
+"""
+
+import os.path
+
+import cherrypy
+
+
+class HomePage:
+
+ @cherrypy.expose
+ def index(self):
+ return '''
+ <p>Hi, this is the home page! Check out the other
+ fun stuff on this site:</p>
+
+ <ul>
+ <li><a href="/joke/">A silly joke</a></li>
+ <li><a href="/links/">Useful links</a></li>
+ </ul>'''
+
+
+class JokePage:
+
+ @cherrypy.expose
+ def index(self):
+ return '''
+ <p>"In Python, how do you create a string of random
+ characters?" -- "Read a Perl file!"</p>
+ <p>[<a href="../">Return</a>]</p>'''
+
+
+class LinksPage:
+
+ def __init__(self):
+ # Request handler objects can create their own nested request
+ # handler objects. Simply create them inside their __init__
+ # methods!
+ self.extra = ExtraLinksPage()
+
+ @cherrypy.expose
+ def index(self):
+ # Note the way we link to the extra links page (and back).
+ # As you can see, this object doesn't really care about its
+ # absolute position in the site tree, since we use relative
+ # links exclusively.
+ return '''
+ <p>Here are some useful links:</p>
+
+ <ul>
+ <li>
+ <a href="http://www.cherrypy.org">The CherryPy Homepage</a>
+ </li>
+ <li>
+ <a href="http://www.python.org">The Python Homepage</a>
+ </li>
+ </ul>
+
+ <p>You can check out some extra useful
+ links <a href="./extra/">here</a>.</p>
+
+ <p>[<a href="../">Return</a>]</p>
+ '''
+
+
+class ExtraLinksPage:
+
+ @cherrypy.expose
+ def index(self):
+ # Note the relative link back to the Links page!
+ return '''
+ <p>Here are some extra useful links:</p>
+
+ <ul>
+ <li><a href="http://del.icio.us">del.icio.us</a></li>
+ <li><a href="http://www.cherrypy.org">CherryPy</a></li>
+ </ul>
+
+ <p>[<a href="../">Return to links page</a>]</p>'''
+
+
+# Of course we can also mount request handler objects right here!
+root = HomePage()
+root.joke = JokePage()
+root.links = LinksPage()
+
+# Remember, we don't need to mount ExtraLinksPage here, because
+# LinksPage does that itself on initialization. In fact, there is
+# no reason why you shouldn't let your root object take care of
+# creating all contained request handler objects.
+
+
+tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
+
+if __name__ == '__main__':
+ # CherryPy always starts with app.root when trying to map request URIs
+ # to objects, so we need to mount a request handler root. A request
+ # to '/' will be mapped to HelloWorld().index().
+ cherrypy.quickstart(root, config=tutconf)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut05_derived_objects.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut05_derived_objects.py
new file mode 100644
index 0000000000000000000000000000000000000000..f626e03f00b81379c8a80c263666b570cd45b5a4
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut05_derived_objects.py
@@ -0,0 +1,80 @@
+"""
+Tutorial - Object inheritance
+
+You are free to derive your request handler classes from any base
+class you wish. In most real-world applications, you will probably
+want to create a central base class used for all your pages, which takes
+care of things like printing a common page header and footer.
+"""
+
+import os.path
+
+import cherrypy
+
+
+class Page:
+ # Store the page title in a class attribute
+ title = 'Untitled Page'
+
+ def header(self):
+ return '''
+ <html>
+ <head>
+ <title>%s</title>
+ <head>
+ <body>
+ <h2>%s</h2>
+ ''' % (self.title, self.title)
+
+ def footer(self):
+ return '''
+ </body>
+ </html>
+ '''
+
+ # Note that header and footer don't get their exposed attributes
+ # set to True. This isn't necessary since the user isn't supposed
+ # to call header or footer directly; instead, we'll call them from
+ # within the actually exposed handler methods defined in this
+ # class' subclasses.
+
+
+class HomePage(Page):
+ # Different title for this page
+ title = 'Tutorial 5'
+
+ def __init__(self):
+ # create a subpage
+ self.another = AnotherPage()
+
+ @cherrypy.expose
+ def index(self):
+ # Note that we call the header and footer methods inherited
+ # from the Page class!
+ return self.header() + '''
+ <p>
+ Isn't this exciting? There's
+ <a href="./another/">another page</a>, too!
+ </p>
+ ''' + self.footer()
+
+
+class AnotherPage(Page):
+ title = 'Another Page'
+
+ @cherrypy.expose
+ def index(self):
+ return self.header() + '''
+ <p>
+ And this is the amazing second page!
+ </p>
+ ''' + self.footer()
+
+
+tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
+
+if __name__ == '__main__':
+ # CherryPy always starts with app.root when trying to map request URIs
+ # to objects, so we need to mount a request handler root. A request
+ # to '/' will be mapped to HelloWorld().index().
+ cherrypy.quickstart(HomePage(), config=tutconf)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut06_default_method.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut06_default_method.py
new file mode 100644
index 0000000000000000000000000000000000000000..0ce4cabe83e8f029d88a40a3d2669c04602bea55
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut06_default_method.py
@@ -0,0 +1,61 @@
+"""
+Tutorial - The default method
+
+Request handler objects can implement a method called "default" that
+is called when no other suitable method/object could be found.
+Essentially, if CherryPy2 can't find a matching request handler object
+for the given request URI, it will use the default method of the object
+located deepest on the URI path.
+
+Using this mechanism you can easily simulate virtual URI structures
+by parsing the extra URI string, which you can access through
+cherrypy.request.virtualPath.
+
+The application in this tutorial simulates an URI structure looking
+like /users/<username>. Since the <username> bit will not be found (as
+there are no matching methods), it is handled by the default method.
+"""
+
+import os.path
+
+import cherrypy
+
+
+class UsersPage:
+
+ @cherrypy.expose
+ def index(self):
+ # Since this is just a stupid little example, we'll simply
+ # display a list of links to random, made-up users. In a real
+ # application, this could be generated from a database result set.
+ return '''
+ <a href="./remi">Remi Delon</a><br/>
+ <a href="./hendrik">Hendrik Mans</a><br/>
+ <a href="./lorenzo">Lorenzo Lamas</a><br/>
+ '''
+
+ @cherrypy.expose
+ def default(self, user):
+ # Here we react depending on the virtualPath -- the part of the
+ # path that could not be mapped to an object method. In a real
+ # application, we would probably do some database lookups here
+ # instead of the silly if/elif/else construct.
+ if user == 'remi':
+ out = 'Remi Delon, CherryPy lead developer'
+ elif user == 'hendrik':
+ out = 'Hendrik Mans, CherryPy co-developer & crazy German'
+ elif user == 'lorenzo':
+ out = 'Lorenzo Lamas, famous actor and singer!'
+ else:
+ out = 'Unknown user. :-('
+
+ return '%s (<a href="./">back</a>)' % out
+
+
+tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
+
+if __name__ == '__main__':
+ # CherryPy always starts with app.root when trying to map request URIs
+ # to objects, so we need to mount a request handler root. A request
+ # to '/' will be mapped to HelloWorld().index().
+ cherrypy.quickstart(UsersPage(), config=tutconf)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut07_sessions.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut07_sessions.py
new file mode 100644
index 0000000000000000000000000000000000000000..204322b58a79d93be8a5100eefb3e4f6e73a798f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut07_sessions.py
@@ -0,0 +1,41 @@
+"""
+Tutorial - Sessions
+
+Storing session data in CherryPy applications is very easy: cherrypy
+provides a dictionary called "session" that represents the session
+data for the current user. If you use RAM based sessions, you can store
+any kind of object into that dictionary; otherwise, you are limited to
+objects that can be pickled.
+"""
+
+import os.path
+
+import cherrypy
+
+
+class HitCounter:
+
+ _cp_config = {'tools.sessions.on': True}
+
+ @cherrypy.expose
+ def index(self):
+ # Increase the silly hit counter
+ count = cherrypy.session.get('count', 0) + 1
+
+ # Store the new value in the session dictionary
+ cherrypy.session['count'] = count
+
+ # And display a silly hit count message!
+ return '''
+ During your current session, you've viewed this
+ page %s times! Your life is a patio of fun!
+ ''' % count
+
+
+tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
+
+if __name__ == '__main__':
+ # CherryPy always starts with app.root when trying to map request URIs
+ # to objects, so we need to mount a request handler root. A request
+ # to '/' will be mapped to HelloWorld().index().
+ cherrypy.quickstart(HitCounter(), config=tutconf)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut08_generators_and_yield.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut08_generators_and_yield.py
new file mode 100644
index 0000000000000000000000000000000000000000..18f42f9340b5d1e4f660861025e6d2902755e8e2
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut08_generators_and_yield.py
@@ -0,0 +1,44 @@
+"""
+Bonus Tutorial: Using generators to return result bodies
+
+Instead of returning a complete result string, you can use the yield
+statement to return one result part after another. This may be convenient
+in situations where using a template package like CherryPy or Cheetah
+would be overkill, and messy string concatenation too uncool. ;-)
+"""
+
+import os.path
+
+import cherrypy
+
+
+class GeneratorDemo:
+
+ def header(self):
+ return '<html><body><h2>Generators rule!</h2>'
+
+ def footer(self):
+ return '</body></html>'
+
+ @cherrypy.expose
+ def index(self):
+ # Let's make up a list of users for presentation purposes
+ users = ['Remi', 'Carlos', 'Hendrik', 'Lorenzo Lamas']
+
+ # Every yield line adds one part to the total result body.
+ yield self.header()
+ yield '<h3>List of users:</h3>'
+
+ for user in users:
+ yield '%s<br/>' % user
+
+ yield self.footer()
+
+
+tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
+
+if __name__ == '__main__':
+ # CherryPy always starts with app.root when trying to map request URIs
+ # to objects, so we need to mount a request handler root. A request
+ # to '/' will be mapped to HelloWorld().index().
+ cherrypy.quickstart(GeneratorDemo(), config=tutconf)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut09_files.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut09_files.py
new file mode 100644
index 0000000000000000000000000000000000000000..48585cbe1c7add02c925c20c0a6184efadf54074
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut09_files.py
@@ -0,0 +1,105 @@
+"""
+
+Tutorial: File upload and download
+
+Uploads
+-------
+
+When a client uploads a file to a CherryPy application, it's placed
+on disk immediately. CherryPy will pass it to your exposed method
+as an argument (see "myFile" below); that arg will have a "file"
+attribute, which is a handle to the temporary uploaded file.
+If you wish to permanently save the file, you need to read()
+from myFile.file and write() somewhere else.
+
+Note the use of 'enctype="multipart/form-data"' and 'input type="file"'
+in the HTML which the client uses to upload the file.
+
+
+Downloads
+---------
+
+If you wish to send a file to the client, you have two options:
+First, you can simply return a file-like object from your page handler.
+CherryPy will read the file and serve it as the content (HTTP body)
+of the response. However, that doesn't tell the client that
+the response is a file to be saved, rather than displayed.
+Use cherrypy.lib.static.serve_file for that; it takes four
+arguments:
+
+serve_file(path, content_type=None, disposition=None, name=None)
+
+Set "name" to the filename that you expect clients to use when they save
+your file. Note that the "name" argument is ignored if you don't also
+provide a "disposition" (usually "attachement"). You can manually set
+"content_type", but be aware that if you also use the encoding tool, it
+may choke if the file extension is not recognized as belonging to a known
+Content-Type. Setting the content_type to "application/x-download" works
+in most cases, and should prompt the user with an Open/Save dialog in
+popular browsers.
+
+"""
+
+import os
+import os.path
+
+import cherrypy
+from cherrypy.lib import static
+
+localDir = os.path.dirname(__file__)
+absDir = os.path.join(os.getcwd(), localDir)
+
+
+class FileDemo(object):
+
+ @cherrypy.expose
+ def index(self):
+ return """
+ <html><body>
+ <h2>Upload a file</h2>
+ <form action="upload" method="post" enctype="multipart/form-data">
+ filename: <input type="file" name="myFile" /><br />
+ <input type="submit" />
+ </form>
+ <h2>Download a file</h2>
+ <a href='download'>This one</a>
+ </body></html>
+ """
+
+ @cherrypy.expose
+ def upload(self, myFile):
+ out = """<html>
+ <body>
+ myFile length: %s<br />
+ myFile filename: %s<br />
+ myFile mime-type: %s
+ </body>
+ </html>"""
+
+ # Although this just counts the file length, it demonstrates
+ # how to read large files in chunks instead of all at once.
+ # CherryPy reads the uploaded file into a temporary file;
+ # myFile.file.read reads from that.
+ size = 0
+ while True:
+ data = myFile.file.read(8192)
+ if not data:
+ break
+ size += len(data)
+
+ return out % (size, myFile.filename, myFile.content_type)
+
+ @cherrypy.expose
+ def download(self):
+ path = os.path.join(absDir, 'pdf_file.pdf')
+ return static.serve_file(path, 'application/x-download',
+ 'attachment', os.path.basename(path))
+
+
+tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
+
+if __name__ == '__main__':
+ # CherryPy always starts with app.root when trying to map request URIs
+ # to objects, so we need to mount a request handler root. A request
+ # to '/' will be mapped to HelloWorld().index().
+ cherrypy.quickstart(FileDemo(), config=tutconf)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut10_http_errors.py b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut10_http_errors.py
new file mode 100644
index 0000000000000000000000000000000000000000..18f02fd0e9a572c54f207324002852e72b3126c7
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tut10_http_errors.py
@@ -0,0 +1,84 @@
+"""
+
+Tutorial: HTTP errors
+
+HTTPError is used to return an error response to the client.
+CherryPy has lots of options regarding how such errors are
+logged, displayed, and formatted.
+
+"""
+
+import os
+import os.path
+
+import cherrypy
+
+localDir = os.path.dirname(__file__)
+curpath = os.path.normpath(os.path.join(os.getcwd(), localDir))
+
+
+class HTTPErrorDemo(object):
+
+ # Set a custom response for 403 errors.
+ _cp_config = {'error_page.403':
+ os.path.join(curpath, 'custom_error.html')}
+
+ @cherrypy.expose
+ def index(self):
+ # display some links that will result in errors
+ tracebacks = cherrypy.request.show_tracebacks
+ if tracebacks:
+ trace = 'off'
+ else:
+ trace = 'on'
+
+ return """
+ <html><body>
+ <p>Toggle tracebacks <a href="toggleTracebacks">%s</a></p>
+ <p><a href="/doesNotExist">Click me; I'm a broken link!</a></p>
+ <p>
+ <a href="/error?code=403">
+ Use a custom error page from a file.
+ </a>
+ </p>
+ <p>These errors are explicitly raised by the application:</p>
+ <ul>
+ <li><a href="/error?code=400">400</a></li>
+ <li><a href="/error?code=401">401</a></li>
+ <li><a href="/error?code=402">402</a></li>
+ <li><a href="/error?code=500">500</a></li>
+ </ul>
+ <p><a href="/messageArg">You can also set the response body
+ when you raise an error.</a></p>
+ </body></html>
+ """ % trace
+
+ @cherrypy.expose
+ def toggleTracebacks(self):
+ # simple function to toggle tracebacks on and off
+ tracebacks = cherrypy.request.show_tracebacks
+ cherrypy.config.update({'request.show_tracebacks': not tracebacks})
+
+ # redirect back to the index
+ raise cherrypy.HTTPRedirect('/')
+
+ @cherrypy.expose
+ def error(self, code):
+ # raise an error based on the get query
+ raise cherrypy.HTTPError(status=code)
+
+ @cherrypy.expose
+ def messageArg(self):
+ message = ("If you construct an HTTPError with a 'message' "
+ 'argument, it wil be placed on the error page '
+ '(underneath the status line by default).')
+ raise cherrypy.HTTPError(500, message=message)
+
+
+tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
+
+if __name__ == '__main__':
+ # CherryPy always starts with app.root when trying to map request URIs
+ # to objects, so we need to mount a request handler root. A request
+ # to '/' will be mapped to HelloWorld().index().
+ cherrypy.quickstart(HTTPErrorDemo(), config=tutconf)
diff --git a/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tutorial.conf b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tutorial.conf
new file mode 100644
index 0000000000000000000000000000000000000000..6537fd30c0ef53719ac6b96fb23837c12870ab0e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/cherrypy/tutorial/tutorial.conf
@@ -0,0 +1,4 @@
+[global]
+server.socket_host = "127.0.0.1"
+server.socket_port = 8080
+server.thread_pool = 10
diff --git a/monEnvTP/lib/python3.8/site-packages/easy_install.py b/monEnvTP/lib/python3.8/site-packages/easy_install.py
new file mode 100644
index 0000000000000000000000000000000000000000..d87e984034b6e6e9eb456ebcb2b3f420c07a48bc
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/easy_install.py
@@ -0,0 +1,5 @@
+"""Run the EasyInstall command"""
+
+if __name__ == '__main__':
+ from setuptools.command.easy_install import main
+ main()
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__init__.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..07a4d94fd2df317e4332fa46f7782615bda047a0
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__init__.py
@@ -0,0 +1,33 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2007 Google Inc. All Rights Reserved.
+
+__version__ = '3.19.3'
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5ac04be22fa64061177274a9bc407005bc504491
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/any_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/any_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..03733ae1536c83887f2041dbb9b10706341bea01
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/any_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/api_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/api_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..aba08f3c559ca2388047229166c489347ad30f04
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/api_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/descriptor.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/descriptor.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2802238770ccbb03870e174e850ae8d8917caeb8
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/descriptor.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/descriptor_database.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/descriptor_database.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0d30835ad63c09456deec14fc8fcf7cebb741e8a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/descriptor_database.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/descriptor_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/descriptor_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7edb34656516d67d36615827b3d2aa26c78edb35
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/descriptor_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/descriptor_pool.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/descriptor_pool.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..54f1e3ce04888d82f4df824312ca1380a4dfd5b3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/descriptor_pool.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/duration_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/duration_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..566d878535842a9bb53ea802139d81fa88c2517c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/duration_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/empty_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/empty_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..dd791796fd8466283321d102cfa5daa1f4a34997
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/empty_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/field_mask_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/field_mask_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c1c2db8282c882cbe3181df0654a1caf407dfa8d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/field_mask_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/json_format.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/json_format.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5ff8ef5fc5d167fb0d86b9dd74d75e0e25453593
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/json_format.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/message.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/message.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..973b30283cfec59d1c458930dc3396ac1ceb7558
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/message.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/message_factory.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/message_factory.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d15ef7cdb97f8eb3387c2c36d009f052009d0004
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/message_factory.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/proto_builder.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/proto_builder.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8455bfd93e9a5da6797822b23eaa5a31b6f916d1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/proto_builder.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/reflection.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/reflection.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e0c270d9f7dd1b2792d0d0da213e8ff6554ff521
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/reflection.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/service.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/service.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fc18eed28c835bd8edda84f1ea116f1984432d0c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/service.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/service_reflection.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/service_reflection.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2917be6b4ad276ce8b3654f4cacfab015bace2da
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/service_reflection.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/source_context_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/source_context_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..898ffdeefa056fc8a336e31a149e00e66fb5d47d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/source_context_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/struct_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/struct_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0b9b101e2bba61492da8356be59670293621210d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/struct_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/symbol_database.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/symbol_database.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d534e989b2a75251b21344b10fb95627898a3632
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/symbol_database.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/text_encoding.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/text_encoding.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..524217024fc3a6c029e5794e6c7e3a86473f0563
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/text_encoding.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/text_format.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/text_format.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2d5c11743a90fd1ed4cce39bf719ef50fe8fcbad
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/text_format.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/timestamp_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/timestamp_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3dff9ab6f2fac4c4f884920f015b30ec569c7f74
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/timestamp_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/type_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/type_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7f1ccc62e764423226b92d3cde5cd20229cf713c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/type_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/wrappers_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/wrappers_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b9610a86df9e715b0e445cc9a99c00e6108e288f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/__pycache__/wrappers_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/any_pb2.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/any_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..6f1adbc27b0b84e5eafbf44ee0a714ba82d3233b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/any_pb2.py
@@ -0,0 +1,35 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/any.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"&\n\x03\x41ny\x12\x10\n\x08type_url\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x42v\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z,google.golang.org/protobuf/types/known/anypb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
+
+
+
+_ANY = DESCRIPTOR.message_types_by_name['Any']
+Any = _reflection.GeneratedProtocolMessageType('Any', (_message.Message,), {
+ 'DESCRIPTOR' : _ANY,
+ '__module__' : 'google.protobuf.any_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.Any)
+ })
+_sym_db.RegisterMessage(Any)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010AnyProtoP\001Z,google.golang.org/protobuf/types/known/anypb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
+ _ANY._serialized_start=46
+ _ANY._serialized_end=84
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/api_pb2.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/api_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..703618161bf147ebb7b7e434fcddf6c41918d032
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/api_pb2.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/api.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2
+from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\x81\x02\n\x03\x41pi\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Method\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12&\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.Mixin\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x01\n\x06Method\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10request_type_url\x18\x02 \x01(\t\x12\x19\n\x11request_streaming\x18\x03 \x01(\x08\x12\x19\n\x11response_type_url\x18\x04 \x01(\t\x12\x1a\n\x12response_streaming\x18\x05 \x01(\x08\x12(\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.Option\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"#\n\x05Mixin\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\tBv\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z,google.golang.org/protobuf/types/known/apipb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
+
+
+
+_API = DESCRIPTOR.message_types_by_name['Api']
+_METHOD = DESCRIPTOR.message_types_by_name['Method']
+_MIXIN = DESCRIPTOR.message_types_by_name['Mixin']
+Api = _reflection.GeneratedProtocolMessageType('Api', (_message.Message,), {
+ 'DESCRIPTOR' : _API,
+ '__module__' : 'google.protobuf.api_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.Api)
+ })
+_sym_db.RegisterMessage(Api)
+
+Method = _reflection.GeneratedProtocolMessageType('Method', (_message.Message,), {
+ 'DESCRIPTOR' : _METHOD,
+ '__module__' : 'google.protobuf.api_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.Method)
+ })
+_sym_db.RegisterMessage(Method)
+
+Mixin = _reflection.GeneratedProtocolMessageType('Mixin', (_message.Message,), {
+ 'DESCRIPTOR' : _MIXIN,
+ '__module__' : 'google.protobuf.api_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.Mixin)
+ })
+_sym_db.RegisterMessage(Mixin)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010ApiProtoP\001Z,google.golang.org/protobuf/types/known/apipb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
+ _API._serialized_start=113
+ _API._serialized_end=370
+ _METHOD._serialized_start=373
+ _METHOD._serialized_end=586
+ _MIXIN._serialized_start=588
+ _MIXIN._serialized_end=623
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/compiler/__init__.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/compiler/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/compiler/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/compiler/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..71953144d3bbc5302d5953e146704053bc7687dd
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/compiler/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/compiler/__pycache__/plugin_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/compiler/__pycache__/plugin_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a29b17ccc4cc61098e6da4a3ae899263e5a6a81e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/compiler/__pycache__/plugin_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/compiler/plugin_pb2.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/compiler/plugin_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..a6daa2c938946cdc227e5d2070df58b1a346658f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/compiler/plugin_pb2.py
@@ -0,0 +1,70 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/compiler/plugin.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"F\n\x07Version\x12\r\n\x05major\x18\x01 \x01(\x05\x12\r\n\x05minor\x18\x02 \x01(\x05\x12\r\n\x05patch\x18\x03 \x01(\x05\x12\x0e\n\x06suffix\x18\x04 \x01(\t\"\xba\x01\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\x12;\n\x10\x63ompiler_version\x18\x03 \x01(\x0b\x32!.google.protobuf.compiler.Version\"\xc1\x02\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x1a\n\x12supported_features\x18\x02 \x01(\x04\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a\x7f\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\t\x12?\n\x13generated_code_info\x18\x10 \x01(\x0b\x32\".google.protobuf.GeneratedCodeInfo\"8\n\x07\x46\x65\x61ture\x12\x10\n\x0c\x46\x45\x41TURE_NONE\x10\x00\x12\x1b\n\x17\x46\x45\x41TURE_PROTO3_OPTIONAL\x10\x01\x42W\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ)google.golang.org/protobuf/types/pluginpb')
+
+
+
+_VERSION = DESCRIPTOR.message_types_by_name['Version']
+_CODEGENERATORREQUEST = DESCRIPTOR.message_types_by_name['CodeGeneratorRequest']
+_CODEGENERATORRESPONSE = DESCRIPTOR.message_types_by_name['CodeGeneratorResponse']
+_CODEGENERATORRESPONSE_FILE = _CODEGENERATORRESPONSE.nested_types_by_name['File']
+_CODEGENERATORRESPONSE_FEATURE = _CODEGENERATORRESPONSE.enum_types_by_name['Feature']
+Version = _reflection.GeneratedProtocolMessageType('Version', (_message.Message,), {
+ 'DESCRIPTOR' : _VERSION,
+ '__module__' : 'google.protobuf.compiler.plugin_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.compiler.Version)
+ })
+_sym_db.RegisterMessage(Version)
+
+CodeGeneratorRequest = _reflection.GeneratedProtocolMessageType('CodeGeneratorRequest', (_message.Message,), {
+ 'DESCRIPTOR' : _CODEGENERATORREQUEST,
+ '__module__' : 'google.protobuf.compiler.plugin_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorRequest)
+ })
+_sym_db.RegisterMessage(CodeGeneratorRequest)
+
+CodeGeneratorResponse = _reflection.GeneratedProtocolMessageType('CodeGeneratorResponse', (_message.Message,), {
+
+ 'File' : _reflection.GeneratedProtocolMessageType('File', (_message.Message,), {
+ 'DESCRIPTOR' : _CODEGENERATORRESPONSE_FILE,
+ '__module__' : 'google.protobuf.compiler.plugin_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorResponse.File)
+ })
+ ,
+ 'DESCRIPTOR' : _CODEGENERATORRESPONSE,
+ '__module__' : 'google.protobuf.compiler.plugin_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorResponse)
+ })
+_sym_db.RegisterMessage(CodeGeneratorResponse)
+_sym_db.RegisterMessage(CodeGeneratorResponse.File)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtosZ)google.golang.org/protobuf/types/pluginpb'
+ _VERSION._serialized_start=101
+ _VERSION._serialized_end=171
+ _CODEGENERATORREQUEST._serialized_start=174
+ _CODEGENERATORREQUEST._serialized_end=360
+ _CODEGENERATORRESPONSE._serialized_start=363
+ _CODEGENERATORRESPONSE._serialized_end=684
+ _CODEGENERATORRESPONSE_FILE._serialized_start=499
+ _CODEGENERATORRESPONSE_FILE._serialized_end=626
+ _CODEGENERATORRESPONSE_FEATURE._serialized_start=628
+ _CODEGENERATORRESPONSE_FEATURE._serialized_end=684
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/descriptor.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/descriptor.py
new file mode 100644
index 0000000000000000000000000000000000000000..61c242f9dff0729876972c770f4ea19d003a7e80
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/descriptor.py
@@ -0,0 +1,1182 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Descriptors essentially contain exactly the information found in a .proto
+file, in types that make this information accessible in Python.
+"""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+import threading
+import warnings
+
+from google.protobuf.internal import api_implementation
+
+_USE_C_DESCRIPTORS = False
+if api_implementation.Type() == 'cpp':
+ # Used by MakeDescriptor in cpp mode
+ import binascii
+ import os
+ from google.protobuf.pyext import _message
+ _USE_C_DESCRIPTORS = True
+
+
+class Error(Exception):
+ """Base error for this module."""
+
+
+class TypeTransformationError(Error):
+ """Error transforming between python proto type and corresponding C++ type."""
+
+
+if _USE_C_DESCRIPTORS:
+ # This metaclass allows to override the behavior of code like
+ # isinstance(my_descriptor, FieldDescriptor)
+ # and make it return True when the descriptor is an instance of the extension
+ # type written in C++.
+ class DescriptorMetaclass(type):
+ def __instancecheck__(cls, obj):
+ if super(DescriptorMetaclass, cls).__instancecheck__(obj):
+ return True
+ if isinstance(obj, cls._C_DESCRIPTOR_CLASS):
+ return True
+ return False
+else:
+ # The standard metaclass; nothing changes.
+ DescriptorMetaclass = type
+
+
+class _Lock(object):
+ """Wrapper class of threading.Lock(), which is allowed by 'with'."""
+
+ def __new__(cls):
+ self = object.__new__(cls)
+ self._lock = threading.Lock() # pylint: disable=protected-access
+ return self
+
+ def __enter__(self):
+ self._lock.acquire()
+
+ def __exit__(self, exc_type, exc_value, exc_tb):
+ self._lock.release()
+
+
+_lock = threading.Lock()
+
+
+def _Deprecated(name):
+ if _Deprecated.count > 0:
+ _Deprecated.count -= 1
+ warnings.warn(
+ 'Call to deprecated create function %s(). Note: Create unlinked '
+ 'descriptors is going to go away. Please use get/find descriptors from '
+ 'generated code or query the descriptor_pool.'
+ % name,
+ category=DeprecationWarning, stacklevel=3)
+
+
+# Deprecated warnings will print 100 times at most which should be enough for
+# users to notice and do not cause timeout.
+_Deprecated.count = 100
+
+
+_internal_create_key = object()
+
+
+class DescriptorBase(metaclass=DescriptorMetaclass):
+
+ """Descriptors base class.
+
+ This class is the base of all descriptor classes. It provides common options
+ related functionality.
+
+ Attributes:
+ has_options: True if the descriptor has non-default options. Usually it
+ is not necessary to read this -- just call GetOptions() which will
+ happily return the default instance. However, it's sometimes useful
+ for efficiency, and also useful inside the protobuf implementation to
+ avoid some bootstrapping issues.
+ """
+
+ if _USE_C_DESCRIPTORS:
+ # The class, or tuple of classes, that are considered as "virtual
+ # subclasses" of this descriptor class.
+ _C_DESCRIPTOR_CLASS = ()
+
+ def __init__(self, options, serialized_options, options_class_name):
+ """Initialize the descriptor given its options message and the name of the
+ class of the options message. The name of the class is required in case
+ the options message is None and has to be created.
+ """
+ self._options = options
+ self._options_class_name = options_class_name
+ self._serialized_options = serialized_options
+
+ # Does this descriptor have non-default options?
+ self.has_options = (options is not None) or (serialized_options is not None)
+
+ def _SetOptions(self, options, options_class_name):
+ """Sets the descriptor's options
+
+ This function is used in generated proto2 files to update descriptor
+ options. It must not be used outside proto2.
+ """
+ self._options = options
+ self._options_class_name = options_class_name
+
+ # Does this descriptor have non-default options?
+ self.has_options = options is not None
+
+ def GetOptions(self):
+ """Retrieves descriptor options.
+
+ This method returns the options set or creates the default options for the
+ descriptor.
+ """
+ if self._options:
+ return self._options
+
+ from google.protobuf import descriptor_pb2
+ try:
+ options_class = getattr(descriptor_pb2,
+ self._options_class_name)
+ except AttributeError:
+ raise RuntimeError('Unknown options class name %s!' %
+ (self._options_class_name))
+
+ with _lock:
+ if self._serialized_options is None:
+ self._options = options_class()
+ else:
+ self._options = _ParseOptions(options_class(),
+ self._serialized_options)
+
+ return self._options
+
+
+class _NestedDescriptorBase(DescriptorBase):
+ """Common class for descriptors that can be nested."""
+
+ def __init__(self, options, options_class_name, name, full_name,
+ file, containing_type, serialized_start=None,
+ serialized_end=None, serialized_options=None):
+ """Constructor.
+
+ Args:
+ options: Protocol message options or None
+ to use default message options.
+ options_class_name (str): The class name of the above options.
+ name (str): Name of this protocol message type.
+ full_name (str): Fully-qualified name of this protocol message type,
+ which will include protocol "package" name and the name of any
+ enclosing types.
+ file (FileDescriptor): Reference to file info.
+ containing_type: if provided, this is a nested descriptor, with this
+ descriptor as parent, otherwise None.
+ serialized_start: The start index (inclusive) in block in the
+ file.serialized_pb that describes this descriptor.
+ serialized_end: The end index (exclusive) in block in the
+ file.serialized_pb that describes this descriptor.
+ serialized_options: Protocol message serialized options or None.
+ """
+ super(_NestedDescriptorBase, self).__init__(
+ options, serialized_options, options_class_name)
+
+ self.name = name
+ # TODO(falk): Add function to calculate full_name instead of having it in
+ # memory?
+ self.full_name = full_name
+ self.file = file
+ self.containing_type = containing_type
+
+ self._serialized_start = serialized_start
+ self._serialized_end = serialized_end
+
+ def CopyToProto(self, proto):
+ """Copies this to the matching proto in descriptor_pb2.
+
+ Args:
+ proto: An empty proto instance from descriptor_pb2.
+
+ Raises:
+ Error: If self couldn't be serialized, due to to few constructor
+ arguments.
+ """
+ if (self.file is not None and
+ self._serialized_start is not None and
+ self._serialized_end is not None):
+ proto.ParseFromString(self.file.serialized_pb[
+ self._serialized_start:self._serialized_end])
+ else:
+ raise Error('Descriptor does not contain serialization.')
+
+
+class Descriptor(_NestedDescriptorBase):
+
+ """Descriptor for a protocol message type.
+
+ Attributes:
+ name (str): Name of this protocol message type.
+ full_name (str): Fully-qualified name of this protocol message type,
+ which will include protocol "package" name and the name of any
+ enclosing types.
+ containing_type (Descriptor): Reference to the descriptor of the type
+ containing us, or None if this is top-level.
+ fields (list[FieldDescriptor]): Field descriptors for all fields in
+ this type.
+ fields_by_number (dict(int, FieldDescriptor)): Same
+ :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed
+ by "number" attribute in each FieldDescriptor.
+ fields_by_name (dict(str, FieldDescriptor)): Same
+ :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed by
+ "name" attribute in each :class:`FieldDescriptor`.
+ nested_types (list[Descriptor]): Descriptor references
+ for all protocol message types nested within this one.
+ nested_types_by_name (dict(str, Descriptor)): Same Descriptor
+ objects as in :attr:`nested_types`, but indexed by "name" attribute
+ in each Descriptor.
+ enum_types (list[EnumDescriptor]): :class:`EnumDescriptor` references
+ for all enums contained within this type.
+ enum_types_by_name (dict(str, EnumDescriptor)): Same
+ :class:`EnumDescriptor` objects as in :attr:`enum_types`, but
+ indexed by "name" attribute in each EnumDescriptor.
+ enum_values_by_name (dict(str, EnumValueDescriptor)): Dict mapping
+ from enum value name to :class:`EnumValueDescriptor` for that value.
+ extensions (list[FieldDescriptor]): All extensions defined directly
+ within this message type (NOT within a nested type).
+ extensions_by_name (dict(str, FieldDescriptor)): Same FieldDescriptor
+ objects as :attr:`extensions`, but indexed by "name" attribute of each
+ FieldDescriptor.
+ is_extendable (bool): Does this type define any extension ranges?
+ oneofs (list[OneofDescriptor]): The list of descriptors for oneof fields
+ in this message.
+ oneofs_by_name (dict(str, OneofDescriptor)): Same objects as in
+ :attr:`oneofs`, but indexed by "name" attribute.
+ file (FileDescriptor): Reference to file descriptor.
+
+ """
+
+ if _USE_C_DESCRIPTORS:
+ _C_DESCRIPTOR_CLASS = _message.Descriptor
+
+ def __new__(
+ cls,
+ name=None,
+ full_name=None,
+ filename=None,
+ containing_type=None,
+ fields=None,
+ nested_types=None,
+ enum_types=None,
+ extensions=None,
+ options=None,
+ serialized_options=None,
+ is_extendable=True,
+ extension_ranges=None,
+ oneofs=None,
+ file=None, # pylint: disable=redefined-builtin
+ serialized_start=None,
+ serialized_end=None,
+ syntax=None,
+ create_key=None):
+ _message.Message._CheckCalledFromGeneratedFile()
+ return _message.default_pool.FindMessageTypeByName(full_name)
+
+ # NOTE(tmarek): The file argument redefining a builtin is nothing we can
+ # fix right now since we don't know how many clients already rely on the
+ # name of the argument.
+ def __init__(self, name, full_name, filename, containing_type, fields,
+ nested_types, enum_types, extensions, options=None,
+ serialized_options=None,
+ is_extendable=True, extension_ranges=None, oneofs=None,
+ file=None, serialized_start=None, serialized_end=None, # pylint: disable=redefined-builtin
+ syntax=None, create_key=None):
+ """Arguments to __init__() are as described in the description
+ of Descriptor fields above.
+
+ Note that filename is an obsolete argument, that is not used anymore.
+ Please use file.name to access this as an attribute.
+ """
+ if create_key is not _internal_create_key:
+ _Deprecated('Descriptor')
+
+ super(Descriptor, self).__init__(
+ options, 'MessageOptions', name, full_name, file,
+ containing_type, serialized_start=serialized_start,
+ serialized_end=serialized_end, serialized_options=serialized_options)
+
+ # We have fields in addition to fields_by_name and fields_by_number,
+ # so that:
+ # 1. Clients can index fields by "order in which they're listed."
+ # 2. Clients can easily iterate over all fields with the terse
+ # syntax: for f in descriptor.fields: ...
+ self.fields = fields
+ for field in self.fields:
+ field.containing_type = self
+ self.fields_by_number = dict((f.number, f) for f in fields)
+ self.fields_by_name = dict((f.name, f) for f in fields)
+ self._fields_by_camelcase_name = None
+
+ self.nested_types = nested_types
+ for nested_type in nested_types:
+ nested_type.containing_type = self
+ self.nested_types_by_name = dict((t.name, t) for t in nested_types)
+
+ self.enum_types = enum_types
+ for enum_type in self.enum_types:
+ enum_type.containing_type = self
+ self.enum_types_by_name = dict((t.name, t) for t in enum_types)
+ self.enum_values_by_name = dict(
+ (v.name, v) for t in enum_types for v in t.values)
+
+ self.extensions = extensions
+ for extension in self.extensions:
+ extension.extension_scope = self
+ self.extensions_by_name = dict((f.name, f) for f in extensions)
+ self.is_extendable = is_extendable
+ self.extension_ranges = extension_ranges
+ self.oneofs = oneofs if oneofs is not None else []
+ self.oneofs_by_name = dict((o.name, o) for o in self.oneofs)
+ for oneof in self.oneofs:
+ oneof.containing_type = self
+ self.syntax = syntax or "proto2"
+
+ @property
+ def fields_by_camelcase_name(self):
+ """Same FieldDescriptor objects as in :attr:`fields`, but indexed by
+ :attr:`FieldDescriptor.camelcase_name`.
+ """
+ if self._fields_by_camelcase_name is None:
+ self._fields_by_camelcase_name = dict(
+ (f.camelcase_name, f) for f in self.fields)
+ return self._fields_by_camelcase_name
+
+ def EnumValueName(self, enum, value):
+ """Returns the string name of an enum value.
+
+ This is just a small helper method to simplify a common operation.
+
+ Args:
+ enum: string name of the Enum.
+ value: int, value of the enum.
+
+ Returns:
+ string name of the enum value.
+
+ Raises:
+ KeyError if either the Enum doesn't exist or the value is not a valid
+ value for the enum.
+ """
+ return self.enum_types_by_name[enum].values_by_number[value].name
+
+ def CopyToProto(self, proto):
+ """Copies this to a descriptor_pb2.DescriptorProto.
+
+ Args:
+ proto: An empty descriptor_pb2.DescriptorProto.
+ """
+ # This function is overridden to give a better doc comment.
+ super(Descriptor, self).CopyToProto(proto)
+
+
+# TODO(robinson): We should have aggressive checking here,
+# for example:
+# * If you specify a repeated field, you should not be allowed
+# to specify a default value.
+# * [Other examples here as needed].
+#
+# TODO(robinson): for this and other *Descriptor classes, we
+# might also want to lock things down aggressively (e.g.,
+# prevent clients from setting the attributes). Having
+# stronger invariants here in general will reduce the number
+# of runtime checks we must do in reflection.py...
+class FieldDescriptor(DescriptorBase):
+
+ """Descriptor for a single field in a .proto file.
+
+ Attributes:
+ name (str): Name of this field, exactly as it appears in .proto.
+ full_name (str): Name of this field, including containing scope. This is
+ particularly relevant for extensions.
+ index (int): Dense, 0-indexed index giving the order that this
+ field textually appears within its message in the .proto file.
+ number (int): Tag number declared for this field in the .proto file.
+
+ type (int): (One of the TYPE_* constants below) Declared type.
+ cpp_type (int): (One of the CPPTYPE_* constants below) C++ type used to
+ represent this field.
+
+ label (int): (One of the LABEL_* constants below) Tells whether this
+ field is optional, required, or repeated.
+ has_default_value (bool): True if this field has a default value defined,
+ otherwise false.
+ default_value (Varies): Default value of this field. Only
+ meaningful for non-repeated scalar fields. Repeated fields
+ should always set this to [], and non-repeated composite
+ fields should always set this to None.
+
+ containing_type (Descriptor): Descriptor of the protocol message
+ type that contains this field. Set by the Descriptor constructor
+ if we're passed into one.
+ Somewhat confusingly, for extension fields, this is the
+ descriptor of the EXTENDED message, not the descriptor
+ of the message containing this field. (See is_extension and
+ extension_scope below).
+ message_type (Descriptor): If a composite field, a descriptor
+ of the message type contained in this field. Otherwise, this is None.
+ enum_type (EnumDescriptor): If this field contains an enum, a
+ descriptor of that enum. Otherwise, this is None.
+
+ is_extension: True iff this describes an extension field.
+ extension_scope (Descriptor): Only meaningful if is_extension is True.
+ Gives the message that immediately contains this extension field.
+ Will be None iff we're a top-level (file-level) extension field.
+
+ options (descriptor_pb2.FieldOptions): Protocol message field options or
+ None to use default field options.
+
+ containing_oneof (OneofDescriptor): If the field is a member of a oneof
+ union, contains its descriptor. Otherwise, None.
+
+ file (FileDescriptor): Reference to file descriptor.
+ """
+
+ # Must be consistent with C++ FieldDescriptor::Type enum in
+ # descriptor.h.
+ #
+ # TODO(robinson): Find a way to eliminate this repetition.
+ TYPE_DOUBLE = 1
+ TYPE_FLOAT = 2
+ TYPE_INT64 = 3
+ TYPE_UINT64 = 4
+ TYPE_INT32 = 5
+ TYPE_FIXED64 = 6
+ TYPE_FIXED32 = 7
+ TYPE_BOOL = 8
+ TYPE_STRING = 9
+ TYPE_GROUP = 10
+ TYPE_MESSAGE = 11
+ TYPE_BYTES = 12
+ TYPE_UINT32 = 13
+ TYPE_ENUM = 14
+ TYPE_SFIXED32 = 15
+ TYPE_SFIXED64 = 16
+ TYPE_SINT32 = 17
+ TYPE_SINT64 = 18
+ MAX_TYPE = 18
+
+ # Must be consistent with C++ FieldDescriptor::CppType enum in
+ # descriptor.h.
+ #
+ # TODO(robinson): Find a way to eliminate this repetition.
+ CPPTYPE_INT32 = 1
+ CPPTYPE_INT64 = 2
+ CPPTYPE_UINT32 = 3
+ CPPTYPE_UINT64 = 4
+ CPPTYPE_DOUBLE = 5
+ CPPTYPE_FLOAT = 6
+ CPPTYPE_BOOL = 7
+ CPPTYPE_ENUM = 8
+ CPPTYPE_STRING = 9
+ CPPTYPE_MESSAGE = 10
+ MAX_CPPTYPE = 10
+
+ _PYTHON_TO_CPP_PROTO_TYPE_MAP = {
+ TYPE_DOUBLE: CPPTYPE_DOUBLE,
+ TYPE_FLOAT: CPPTYPE_FLOAT,
+ TYPE_ENUM: CPPTYPE_ENUM,
+ TYPE_INT64: CPPTYPE_INT64,
+ TYPE_SINT64: CPPTYPE_INT64,
+ TYPE_SFIXED64: CPPTYPE_INT64,
+ TYPE_UINT64: CPPTYPE_UINT64,
+ TYPE_FIXED64: CPPTYPE_UINT64,
+ TYPE_INT32: CPPTYPE_INT32,
+ TYPE_SFIXED32: CPPTYPE_INT32,
+ TYPE_SINT32: CPPTYPE_INT32,
+ TYPE_UINT32: CPPTYPE_UINT32,
+ TYPE_FIXED32: CPPTYPE_UINT32,
+ TYPE_BYTES: CPPTYPE_STRING,
+ TYPE_STRING: CPPTYPE_STRING,
+ TYPE_BOOL: CPPTYPE_BOOL,
+ TYPE_MESSAGE: CPPTYPE_MESSAGE,
+ TYPE_GROUP: CPPTYPE_MESSAGE
+ }
+
+ # Must be consistent with C++ FieldDescriptor::Label enum in
+ # descriptor.h.
+ #
+ # TODO(robinson): Find a way to eliminate this repetition.
+ LABEL_OPTIONAL = 1
+ LABEL_REQUIRED = 2
+ LABEL_REPEATED = 3
+ MAX_LABEL = 3
+
+ # Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber,
+ # and kLastReservedNumber in descriptor.h
+ MAX_FIELD_NUMBER = (1 << 29) - 1
+ FIRST_RESERVED_FIELD_NUMBER = 19000
+ LAST_RESERVED_FIELD_NUMBER = 19999
+
+ if _USE_C_DESCRIPTORS:
+ _C_DESCRIPTOR_CLASS = _message.FieldDescriptor
+
+ def __new__(cls, name, full_name, index, number, type, cpp_type, label,
+ default_value, message_type, enum_type, containing_type,
+ is_extension, extension_scope, options=None,
+ serialized_options=None,
+ has_default_value=True, containing_oneof=None, json_name=None,
+ file=None, create_key=None): # pylint: disable=redefined-builtin
+ _message.Message._CheckCalledFromGeneratedFile()
+ if is_extension:
+ return _message.default_pool.FindExtensionByName(full_name)
+ else:
+ return _message.default_pool.FindFieldByName(full_name)
+
+ def __init__(self, name, full_name, index, number, type, cpp_type, label,
+ default_value, message_type, enum_type, containing_type,
+ is_extension, extension_scope, options=None,
+ serialized_options=None,
+ has_default_value=True, containing_oneof=None, json_name=None,
+ file=None, create_key=None): # pylint: disable=redefined-builtin
+ """The arguments are as described in the description of FieldDescriptor
+ attributes above.
+
+ Note that containing_type may be None, and may be set later if necessary
+ (to deal with circular references between message types, for example).
+ Likewise for extension_scope.
+ """
+ if create_key is not _internal_create_key:
+ _Deprecated('FieldDescriptor')
+
+ super(FieldDescriptor, self).__init__(
+ options, serialized_options, 'FieldOptions')
+ self.name = name
+ self.full_name = full_name
+ self.file = file
+ self._camelcase_name = None
+ if json_name is None:
+ self.json_name = _ToJsonName(name)
+ else:
+ self.json_name = json_name
+ self.index = index
+ self.number = number
+ self.type = type
+ self.cpp_type = cpp_type
+ self.label = label
+ self.has_default_value = has_default_value
+ self.default_value = default_value
+ self.containing_type = containing_type
+ self.message_type = message_type
+ self.enum_type = enum_type
+ self.is_extension = is_extension
+ self.extension_scope = extension_scope
+ self.containing_oneof = containing_oneof
+ if api_implementation.Type() == 'cpp':
+ if is_extension:
+ self._cdescriptor = _message.default_pool.FindExtensionByName(full_name)
+ else:
+ self._cdescriptor = _message.default_pool.FindFieldByName(full_name)
+ else:
+ self._cdescriptor = None
+
+ @property
+ def camelcase_name(self):
+ """Camelcase name of this field.
+
+ Returns:
+ str: the name in CamelCase.
+ """
+ if self._camelcase_name is None:
+ self._camelcase_name = _ToCamelCase(self.name)
+ return self._camelcase_name
+
+ @staticmethod
+ def ProtoTypeToCppProtoType(proto_type):
+ """Converts from a Python proto type to a C++ Proto Type.
+
+ The Python ProtocolBuffer classes specify both the 'Python' datatype and the
+ 'C++' datatype - and they're not the same. This helper method should
+ translate from one to another.
+
+ Args:
+ proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*)
+ Returns:
+ int: descriptor.FieldDescriptor.CPPTYPE_*, the C++ type.
+ Raises:
+ TypeTransformationError: when the Python proto type isn't known.
+ """
+ try:
+ return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type]
+ except KeyError:
+ raise TypeTransformationError('Unknown proto_type: %s' % proto_type)
+
+
+class EnumDescriptor(_NestedDescriptorBase):
+
+ """Descriptor for an enum defined in a .proto file.
+
+ Attributes:
+ name (str): Name of the enum type.
+ full_name (str): Full name of the type, including package name
+ and any enclosing type(s).
+
+ values (list[EnumValueDescriptors]): List of the values
+ in this enum.
+ values_by_name (dict(str, EnumValueDescriptor)): Same as :attr:`values`,
+ but indexed by the "name" field of each EnumValueDescriptor.
+ values_by_number (dict(int, EnumValueDescriptor)): Same as :attr:`values`,
+ but indexed by the "number" field of each EnumValueDescriptor.
+ containing_type (Descriptor): Descriptor of the immediate containing
+ type of this enum, or None if this is an enum defined at the
+ top level in a .proto file. Set by Descriptor's constructor
+ if we're passed into one.
+ file (FileDescriptor): Reference to file descriptor.
+ options (descriptor_pb2.EnumOptions): Enum options message or
+ None to use default enum options.
+ """
+
+ if _USE_C_DESCRIPTORS:
+ _C_DESCRIPTOR_CLASS = _message.EnumDescriptor
+
+ def __new__(cls, name, full_name, filename, values,
+ containing_type=None, options=None,
+ serialized_options=None, file=None, # pylint: disable=redefined-builtin
+ serialized_start=None, serialized_end=None, create_key=None):
+ _message.Message._CheckCalledFromGeneratedFile()
+ return _message.default_pool.FindEnumTypeByName(full_name)
+
+ def __init__(self, name, full_name, filename, values,
+ containing_type=None, options=None,
+ serialized_options=None, file=None, # pylint: disable=redefined-builtin
+ serialized_start=None, serialized_end=None, create_key=None):
+ """Arguments are as described in the attribute description above.
+
+ Note that filename is an obsolete argument, that is not used anymore.
+ Please use file.name to access this as an attribute.
+ """
+ if create_key is not _internal_create_key:
+ _Deprecated('EnumDescriptor')
+
+ super(EnumDescriptor, self).__init__(
+ options, 'EnumOptions', name, full_name, file,
+ containing_type, serialized_start=serialized_start,
+ serialized_end=serialized_end, serialized_options=serialized_options)
+
+ self.values = values
+ for value in self.values:
+ value.type = self
+ self.values_by_name = dict((v.name, v) for v in values)
+ # Values are reversed to ensure that the first alias is retained.
+ self.values_by_number = dict((v.number, v) for v in reversed(values))
+
+ def CopyToProto(self, proto):
+ """Copies this to a descriptor_pb2.EnumDescriptorProto.
+
+ Args:
+ proto (descriptor_pb2.EnumDescriptorProto): An empty descriptor proto.
+ """
+ # This function is overridden to give a better doc comment.
+ super(EnumDescriptor, self).CopyToProto(proto)
+
+
+class EnumValueDescriptor(DescriptorBase):
+
+ """Descriptor for a single value within an enum.
+
+ Attributes:
+ name (str): Name of this value.
+ index (int): Dense, 0-indexed index giving the order that this
+ value appears textually within its enum in the .proto file.
+ number (int): Actual number assigned to this enum value.
+ type (EnumDescriptor): :class:`EnumDescriptor` to which this value
+ belongs. Set by :class:`EnumDescriptor`'s constructor if we're
+ passed into one.
+ options (descriptor_pb2.EnumValueOptions): Enum value options message or
+ None to use default enum value options options.
+ """
+
+ if _USE_C_DESCRIPTORS:
+ _C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor
+
+ def __new__(cls, name, index, number,
+ type=None, # pylint: disable=redefined-builtin
+ options=None, serialized_options=None, create_key=None):
+ _message.Message._CheckCalledFromGeneratedFile()
+ # There is no way we can build a complete EnumValueDescriptor with the
+ # given parameters (the name of the Enum is not known, for example).
+ # Fortunately generated files just pass it to the EnumDescriptor()
+ # constructor, which will ignore it, so returning None is good enough.
+ return None
+
+ def __init__(self, name, index, number,
+ type=None, # pylint: disable=redefined-builtin
+ options=None, serialized_options=None, create_key=None):
+ """Arguments are as described in the attribute description above."""
+ if create_key is not _internal_create_key:
+ _Deprecated('EnumValueDescriptor')
+
+ super(EnumValueDescriptor, self).__init__(
+ options, serialized_options, 'EnumValueOptions')
+ self.name = name
+ self.index = index
+ self.number = number
+ self.type = type
+
+
+class OneofDescriptor(DescriptorBase):
+ """Descriptor for a oneof field.
+
+ Attributes:
+ name (str): Name of the oneof field.
+ full_name (str): Full name of the oneof field, including package name.
+ index (int): 0-based index giving the order of the oneof field inside
+ its containing type.
+ containing_type (Descriptor): :class:`Descriptor` of the protocol message
+ type that contains this field. Set by the :class:`Descriptor` constructor
+ if we're passed into one.
+ fields (list[FieldDescriptor]): The list of field descriptors this
+ oneof can contain.
+ """
+
+ if _USE_C_DESCRIPTORS:
+ _C_DESCRIPTOR_CLASS = _message.OneofDescriptor
+
+ def __new__(
+ cls, name, full_name, index, containing_type, fields, options=None,
+ serialized_options=None, create_key=None):
+ _message.Message._CheckCalledFromGeneratedFile()
+ return _message.default_pool.FindOneofByName(full_name)
+
+ def __init__(
+ self, name, full_name, index, containing_type, fields, options=None,
+ serialized_options=None, create_key=None):
+ """Arguments are as described in the attribute description above."""
+ if create_key is not _internal_create_key:
+ _Deprecated('OneofDescriptor')
+
+ super(OneofDescriptor, self).__init__(
+ options, serialized_options, 'OneofOptions')
+ self.name = name
+ self.full_name = full_name
+ self.index = index
+ self.containing_type = containing_type
+ self.fields = fields
+
+
+class ServiceDescriptor(_NestedDescriptorBase):
+
+ """Descriptor for a service.
+
+ Attributes:
+ name (str): Name of the service.
+ full_name (str): Full name of the service, including package name.
+ index (int): 0-indexed index giving the order that this services
+ definition appears within the .proto file.
+ methods (list[MethodDescriptor]): List of methods provided by this
+ service.
+ methods_by_name (dict(str, MethodDescriptor)): Same
+ :class:`MethodDescriptor` objects as in :attr:`methods_by_name`, but
+ indexed by "name" attribute in each :class:`MethodDescriptor`.
+ options (descriptor_pb2.ServiceOptions): Service options message or
+ None to use default service options.
+ file (FileDescriptor): Reference to file info.
+ """
+
+ if _USE_C_DESCRIPTORS:
+ _C_DESCRIPTOR_CLASS = _message.ServiceDescriptor
+
+ def __new__(
+ cls,
+ name=None,
+ full_name=None,
+ index=None,
+ methods=None,
+ options=None,
+ serialized_options=None,
+ file=None, # pylint: disable=redefined-builtin
+ serialized_start=None,
+ serialized_end=None,
+ create_key=None):
+ _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access
+ return _message.default_pool.FindServiceByName(full_name)
+
+ def __init__(self, name, full_name, index, methods, options=None,
+ serialized_options=None, file=None, # pylint: disable=redefined-builtin
+ serialized_start=None, serialized_end=None, create_key=None):
+ if create_key is not _internal_create_key:
+ _Deprecated('ServiceDescriptor')
+
+ super(ServiceDescriptor, self).__init__(
+ options, 'ServiceOptions', name, full_name, file,
+ None, serialized_start=serialized_start,
+ serialized_end=serialized_end, serialized_options=serialized_options)
+ self.index = index
+ self.methods = methods
+ self.methods_by_name = dict((m.name, m) for m in methods)
+ # Set the containing service for each method in this service.
+ for method in self.methods:
+ method.containing_service = self
+
+ def FindMethodByName(self, name):
+ """Searches for the specified method, and returns its descriptor.
+
+ Args:
+ name (str): Name of the method.
+ Returns:
+ MethodDescriptor or None: the descriptor for the requested method, if
+ found.
+ """
+ return self.methods_by_name.get(name, None)
+
+ def CopyToProto(self, proto):
+ """Copies this to a descriptor_pb2.ServiceDescriptorProto.
+
+ Args:
+ proto (descriptor_pb2.ServiceDescriptorProto): An empty descriptor proto.
+ """
+ # This function is overridden to give a better doc comment.
+ super(ServiceDescriptor, self).CopyToProto(proto)
+
+
+class MethodDescriptor(DescriptorBase):
+
+ """Descriptor for a method in a service.
+
+ Attributes:
+ name (str): Name of the method within the service.
+ full_name (str): Full name of method.
+ index (int): 0-indexed index of the method inside the service.
+ containing_service (ServiceDescriptor): The service that contains this
+ method.
+ input_type (Descriptor): The descriptor of the message that this method
+ accepts.
+ output_type (Descriptor): The descriptor of the message that this method
+ returns.
+ options (descriptor_pb2.MethodOptions or None): Method options message, or
+ None to use default method options.
+ """
+
+ if _USE_C_DESCRIPTORS:
+ _C_DESCRIPTOR_CLASS = _message.MethodDescriptor
+
+ def __new__(cls, name, full_name, index, containing_service,
+ input_type, output_type, options=None, serialized_options=None,
+ create_key=None):
+ _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access
+ return _message.default_pool.FindMethodByName(full_name)
+
+ def __init__(self, name, full_name, index, containing_service,
+ input_type, output_type, options=None, serialized_options=None,
+ create_key=None):
+ """The arguments are as described in the description of MethodDescriptor
+ attributes above.
+
+ Note that containing_service may be None, and may be set later if necessary.
+ """
+ if create_key is not _internal_create_key:
+ _Deprecated('MethodDescriptor')
+
+ super(MethodDescriptor, self).__init__(
+ options, serialized_options, 'MethodOptions')
+ self.name = name
+ self.full_name = full_name
+ self.index = index
+ self.containing_service = containing_service
+ self.input_type = input_type
+ self.output_type = output_type
+
+ def CopyToProto(self, proto):
+ """Copies this to a descriptor_pb2.MethodDescriptorProto.
+
+ Args:
+ proto (descriptor_pb2.MethodDescriptorProto): An empty descriptor proto.
+
+ Raises:
+ Error: If self couldn't be serialized, due to too few constructor
+ arguments.
+ """
+ if self.containing_service is not None:
+ from google.protobuf import descriptor_pb2
+ service_proto = descriptor_pb2.ServiceDescriptorProto()
+ self.containing_service.CopyToProto(service_proto)
+ proto.CopyFrom(service_proto.method[self.index])
+ else:
+ raise Error('Descriptor does not contain a service.')
+
+
+class FileDescriptor(DescriptorBase):
+ """Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto.
+
+ Note that :attr:`enum_types_by_name`, :attr:`extensions_by_name`, and
+ :attr:`dependencies` fields are only set by the
+ :py:mod:`google.protobuf.message_factory` module, and not by the generated
+ proto code.
+
+ Attributes:
+ name (str): Name of file, relative to root of source tree.
+ package (str): Name of the package
+ syntax (str): string indicating syntax of the file (can be "proto2" or
+ "proto3")
+ serialized_pb (bytes): Byte string of serialized
+ :class:`descriptor_pb2.FileDescriptorProto`.
+ dependencies (list[FileDescriptor]): List of other :class:`FileDescriptor`
+ objects this :class:`FileDescriptor` depends on.
+ public_dependencies (list[FileDescriptor]): A subset of
+ :attr:`dependencies`, which were declared as "public".
+ message_types_by_name (dict(str, Descriptor)): Mapping from message names
+ to their :class:`Descriptor`.
+ enum_types_by_name (dict(str, EnumDescriptor)): Mapping from enum names to
+ their :class:`EnumDescriptor`.
+ extensions_by_name (dict(str, FieldDescriptor)): Mapping from extension
+ names declared at file scope to their :class:`FieldDescriptor`.
+ services_by_name (dict(str, ServiceDescriptor)): Mapping from services'
+ names to their :class:`ServiceDescriptor`.
+ pool (DescriptorPool): The pool this descriptor belongs to. When not
+ passed to the constructor, the global default pool is used.
+ """
+
+ if _USE_C_DESCRIPTORS:
+ _C_DESCRIPTOR_CLASS = _message.FileDescriptor
+
+ def __new__(cls, name, package, options=None,
+ serialized_options=None, serialized_pb=None,
+ dependencies=None, public_dependencies=None,
+ syntax=None, pool=None, create_key=None):
+ # FileDescriptor() is called from various places, not only from generated
+ # files, to register dynamic proto files and messages.
+ # pylint: disable=g-explicit-bool-comparison
+ if serialized_pb == b'':
+ # Cpp generated code must be linked in if serialized_pb is ''
+ try:
+ return _message.default_pool.FindFileByName(name)
+ except KeyError:
+ raise RuntimeError('Please link in cpp generated lib for %s' % (name))
+ elif serialized_pb:
+ return _message.default_pool.AddSerializedFile(serialized_pb)
+ else:
+ return super(FileDescriptor, cls).__new__(cls)
+
+ def __init__(self, name, package, options=None,
+ serialized_options=None, serialized_pb=None,
+ dependencies=None, public_dependencies=None,
+ syntax=None, pool=None, create_key=None):
+ """Constructor."""
+ if create_key is not _internal_create_key:
+ _Deprecated('FileDescriptor')
+
+ super(FileDescriptor, self).__init__(
+ options, serialized_options, 'FileOptions')
+
+ if pool is None:
+ from google.protobuf import descriptor_pool
+ pool = descriptor_pool.Default()
+ self.pool = pool
+ self.message_types_by_name = {}
+ self.name = name
+ self.package = package
+ self.syntax = syntax or "proto2"
+ self.serialized_pb = serialized_pb
+
+ self.enum_types_by_name = {}
+ self.extensions_by_name = {}
+ self.services_by_name = {}
+ self.dependencies = (dependencies or [])
+ self.public_dependencies = (public_dependencies or [])
+
+ def CopyToProto(self, proto):
+ """Copies this to a descriptor_pb2.FileDescriptorProto.
+
+ Args:
+ proto: An empty descriptor_pb2.FileDescriptorProto.
+ """
+ proto.ParseFromString(self.serialized_pb)
+
+
+def _ParseOptions(message, string):
+ """Parses serialized options.
+
+ This helper function is used to parse serialized options in generated
+ proto2 files. It must not be used outside proto2.
+ """
+ message.ParseFromString(string)
+ return message
+
+
+def _ToCamelCase(name):
+ """Converts name to camel-case and returns it."""
+ capitalize_next = False
+ result = []
+
+ for c in name:
+ if c == '_':
+ if result:
+ capitalize_next = True
+ elif capitalize_next:
+ result.append(c.upper())
+ capitalize_next = False
+ else:
+ result += c
+
+ # Lower-case the first letter.
+ if result and result[0].isupper():
+ result[0] = result[0].lower()
+ return ''.join(result)
+
+
+def _OptionsOrNone(descriptor_proto):
+ """Returns the value of the field `options`, or None if it is not set."""
+ if descriptor_proto.HasField('options'):
+ return descriptor_proto.options
+ else:
+ return None
+
+
+def _ToJsonName(name):
+ """Converts name to Json name and returns it."""
+ capitalize_next = False
+ result = []
+
+ for c in name:
+ if c == '_':
+ capitalize_next = True
+ elif capitalize_next:
+ result.append(c.upper())
+ capitalize_next = False
+ else:
+ result += c
+
+ return ''.join(result)
+
+
+def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True,
+ syntax=None):
+ """Make a protobuf Descriptor given a DescriptorProto protobuf.
+
+ Handles nested descriptors. Note that this is limited to the scope of defining
+ a message inside of another message. Composite fields can currently only be
+ resolved if the message is defined in the same scope as the field.
+
+ Args:
+ desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
+ package: Optional package name for the new message Descriptor (string).
+ build_file_if_cpp: Update the C++ descriptor pool if api matches.
+ Set to False on recursion, so no duplicates are created.
+ syntax: The syntax/semantics that should be used. Set to "proto3" to get
+ proto3 field presence semantics.
+ Returns:
+ A Descriptor for protobuf messages.
+ """
+ if api_implementation.Type() == 'cpp' and build_file_if_cpp:
+ # The C++ implementation requires all descriptors to be backed by the same
+ # definition in the C++ descriptor pool. To do this, we build a
+ # FileDescriptorProto with the same definition as this descriptor and build
+ # it into the pool.
+ from google.protobuf import descriptor_pb2
+ file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
+ file_descriptor_proto.message_type.add().MergeFrom(desc_proto)
+
+ # Generate a random name for this proto file to prevent conflicts with any
+ # imported ones. We need to specify a file name so the descriptor pool
+ # accepts our FileDescriptorProto, but it is not important what that file
+ # name is actually set to.
+ proto_name = binascii.hexlify(os.urandom(16)).decode('ascii')
+
+ if package:
+ file_descriptor_proto.name = os.path.join(package.replace('.', '/'),
+ proto_name + '.proto')
+ file_descriptor_proto.package = package
+ else:
+ file_descriptor_proto.name = proto_name + '.proto'
+
+ _message.default_pool.Add(file_descriptor_proto)
+ result = _message.default_pool.FindFileByName(file_descriptor_proto.name)
+
+ if _USE_C_DESCRIPTORS:
+ return result.message_types_by_name[desc_proto.name]
+
+ full_message_name = [desc_proto.name]
+ if package: full_message_name.insert(0, package)
+
+ # Create Descriptors for enum types
+ enum_types = {}
+ for enum_proto in desc_proto.enum_type:
+ full_name = '.'.join(full_message_name + [enum_proto.name])
+ enum_desc = EnumDescriptor(
+ enum_proto.name, full_name, None, [
+ EnumValueDescriptor(enum_val.name, ii, enum_val.number,
+ create_key=_internal_create_key)
+ for ii, enum_val in enumerate(enum_proto.value)],
+ create_key=_internal_create_key)
+ enum_types[full_name] = enum_desc
+
+ # Create Descriptors for nested types
+ nested_types = {}
+ for nested_proto in desc_proto.nested_type:
+ full_name = '.'.join(full_message_name + [nested_proto.name])
+ # Nested types are just those defined inside of the message, not all types
+ # used by fields in the message, so no loops are possible here.
+ nested_desc = MakeDescriptor(nested_proto,
+ package='.'.join(full_message_name),
+ build_file_if_cpp=False,
+ syntax=syntax)
+ nested_types[full_name] = nested_desc
+
+ fields = []
+ for field_proto in desc_proto.field:
+ full_name = '.'.join(full_message_name + [field_proto.name])
+ enum_desc = None
+ nested_desc = None
+ if field_proto.json_name:
+ json_name = field_proto.json_name
+ else:
+ json_name = None
+ if field_proto.HasField('type_name'):
+ type_name = field_proto.type_name
+ full_type_name = '.'.join(full_message_name +
+ [type_name[type_name.rfind('.')+1:]])
+ if full_type_name in nested_types:
+ nested_desc = nested_types[full_type_name]
+ elif full_type_name in enum_types:
+ enum_desc = enum_types[full_type_name]
+ # Else type_name references a non-local type, which isn't implemented
+ field = FieldDescriptor(
+ field_proto.name, full_name, field_proto.number - 1,
+ field_proto.number, field_proto.type,
+ FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type),
+ field_proto.label, None, nested_desc, enum_desc, None, False, None,
+ options=_OptionsOrNone(field_proto), has_default_value=False,
+ json_name=json_name, create_key=_internal_create_key)
+ fields.append(field)
+
+ desc_name = '.'.join(full_message_name)
+ return Descriptor(desc_proto.name, desc_name, None, None, fields,
+ list(nested_types.values()), list(enum_types.values()), [],
+ options=_OptionsOrNone(desc_proto),
+ create_key=_internal_create_key)
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/descriptor_database.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/descriptor_database.py
new file mode 100644
index 0000000000000000000000000000000000000000..073eddc711571a7f510ff8b189e2a9a863d53454
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/descriptor_database.py
@@ -0,0 +1,177 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Provides a container for DescriptorProtos."""
+
+__author__ = 'matthewtoia@google.com (Matt Toia)'
+
+import warnings
+
+
+class Error(Exception):
+ pass
+
+
+class DescriptorDatabaseConflictingDefinitionError(Error):
+ """Raised when a proto is added with the same name & different descriptor."""
+
+
+class DescriptorDatabase(object):
+ """A container accepting FileDescriptorProtos and maps DescriptorProtos."""
+
+ def __init__(self):
+ self._file_desc_protos_by_file = {}
+ self._file_desc_protos_by_symbol = {}
+
+ def Add(self, file_desc_proto):
+ """Adds the FileDescriptorProto and its types to this database.
+
+ Args:
+ file_desc_proto: The FileDescriptorProto to add.
+ Raises:
+ DescriptorDatabaseConflictingDefinitionError: if an attempt is made to
+ add a proto with the same name but different definition than an
+ existing proto in the database.
+ """
+ proto_name = file_desc_proto.name
+ if proto_name not in self._file_desc_protos_by_file:
+ self._file_desc_protos_by_file[proto_name] = file_desc_proto
+ elif self._file_desc_protos_by_file[proto_name] != file_desc_proto:
+ raise DescriptorDatabaseConflictingDefinitionError(
+ '%s already added, but with different descriptor.' % proto_name)
+ else:
+ return
+
+ # Add all the top-level descriptors to the index.
+ package = file_desc_proto.package
+ for message in file_desc_proto.message_type:
+ for name in _ExtractSymbols(message, package):
+ self._AddSymbol(name, file_desc_proto)
+ for enum in file_desc_proto.enum_type:
+ self._AddSymbol(('.'.join((package, enum.name))), file_desc_proto)
+ for enum_value in enum.value:
+ self._file_desc_protos_by_symbol[
+ '.'.join((package, enum_value.name))] = file_desc_proto
+ for extension in file_desc_proto.extension:
+ self._AddSymbol(('.'.join((package, extension.name))), file_desc_proto)
+ for service in file_desc_proto.service:
+ self._AddSymbol(('.'.join((package, service.name))), file_desc_proto)
+
+ def FindFileByName(self, name):
+ """Finds the file descriptor proto by file name.
+
+ Typically the file name is a relative path ending to a .proto file. The
+ proto with the given name will have to have been added to this database
+ using the Add method or else an error will be raised.
+
+ Args:
+ name: The file name to find.
+
+ Returns:
+ The file descriptor proto matching the name.
+
+ Raises:
+ KeyError if no file by the given name was added.
+ """
+
+ return self._file_desc_protos_by_file[name]
+
+ def FindFileContainingSymbol(self, symbol):
+ """Finds the file descriptor proto containing the specified symbol.
+
+ The symbol should be a fully qualified name including the file descriptor's
+ package and any containing messages. Some examples:
+
+ 'some.package.name.Message'
+ 'some.package.name.Message.NestedEnum'
+ 'some.package.name.Message.some_field'
+
+ The file descriptor proto containing the specified symbol must be added to
+ this database using the Add method or else an error will be raised.
+
+ Args:
+ symbol: The fully qualified symbol name.
+
+ Returns:
+ The file descriptor proto containing the symbol.
+
+ Raises:
+ KeyError if no file contains the specified symbol.
+ """
+ try:
+ return self._file_desc_protos_by_symbol[symbol]
+ except KeyError:
+ # Fields, enum values, and nested extensions are not in
+ # _file_desc_protos_by_symbol. Try to find the top level
+ # descriptor. Non-existent nested symbol under a valid top level
+ # descriptor can also be found. The behavior is the same with
+ # protobuf C++.
+ top_level, _, _ = symbol.rpartition('.')
+ try:
+ return self._file_desc_protos_by_symbol[top_level]
+ except KeyError:
+ # Raise the original symbol as a KeyError for better diagnostics.
+ raise KeyError(symbol)
+
+ def FindFileContainingExtension(self, extendee_name, extension_number):
+ # TODO(jieluo): implement this API.
+ return None
+
+ def FindAllExtensionNumbers(self, extendee_name):
+ # TODO(jieluo): implement this API.
+ return []
+
+ def _AddSymbol(self, name, file_desc_proto):
+ if name in self._file_desc_protos_by_symbol:
+ warn_msg = ('Conflict register for file "' + file_desc_proto.name +
+ '": ' + name +
+ ' is already defined in file "' +
+ self._file_desc_protos_by_symbol[name].name + '"')
+ warnings.warn(warn_msg, RuntimeWarning)
+ self._file_desc_protos_by_symbol[name] = file_desc_proto
+
+
+def _ExtractSymbols(desc_proto, package):
+ """Pulls out all the symbols from a descriptor proto.
+
+ Args:
+ desc_proto: The proto to extract symbols from.
+ package: The package containing the descriptor type.
+
+ Yields:
+ The fully qualified name found in the descriptor.
+ """
+ message_name = package + '.' + desc_proto.name if package else desc_proto.name
+ yield message_name
+ for nested_type in desc_proto.nested_type:
+ for symbol in _ExtractSymbols(nested_type, message_name):
+ yield symbol
+ for enum_type in desc_proto.enum_type:
+ yield '.'.join((message_name, enum_type.name))
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/descriptor_pb2.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/descriptor_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..8f95c9f19f8e14b53fc4401f92436af8e953e165
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/descriptor_pb2.py
@@ -0,0 +1,2147 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/descriptor.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+ DESCRIPTOR = _descriptor.FileDescriptor(
+ name='google/protobuf/descriptor.proto',
+ package='google.protobuf',
+ syntax='proto2',
+ serialized_options=None,
+ create_key=_descriptor._internal_create_key,
+ serialized_pb=b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xa9\x05\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x12\x46\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRange\x12\x15\n\rreserved_name\x18\n \x03(\t\x1a\x65\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12\x37\n\x07options\x18\x03 \x01(\x0b\x32&.google.protobuf.ExtensionRangeOptions\x1a+\n\rReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"g\n\x15\x45xtensionRangeOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd5\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12\x11\n\tjson_name\x18\n \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\x12\x17\n\x0fproto3_optional\x18\x11 \x01(\x08\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"T\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptions\"\xa4\x02\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\x12N\n\x0ereserved_range\x18\x04 \x03(\x0b\x32\x36.google.protobuf.EnumDescriptorProto.EnumReservedRange\x12\x15\n\rreserved_name\x18\x05 \x03(\t\x1a/\n\x11\x45numReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xa5\x06\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12)\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08\x42\x02\x18\x01\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12#\n\x14php_generic_services\x18* \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x04true\x12\x19\n\x11objc_class_prefix\x18$ \x01(\t\x12\x18\n\x10\x63sharp_namespace\x18% \x01(\t\x12\x14\n\x0cswift_prefix\x18\' \x01(\t\x12\x18\n\x10php_class_prefix\x18( \x01(\t\x12\x15\n\rphp_namespace\x18) \x01(\t\x12\x1e\n\x16php_metadata_namespace\x18, \x01(\t\x12\x14\n\x0cruby_package\x18- \x01(\t\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\x84\x02\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\tJ\x04\x08\t\x10\n\"\x9e\x03\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12?\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05\"^\n\x0cOneofOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x93\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x05\x10\x06\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xad\x02\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12_\n\x11idempotency_level\x18\" \x01(\x0e\x32/.google.protobuf.MethodOptions.IdempotencyLevel:\x13IDEMPOTENCY_UNKNOWN\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"P\n\x10IdempotencyLevel\x12\x17\n\x13IDEMPOTENCY_UNKNOWN\x10\x00\x12\x13\n\x0fNO_SIDE_EFFECTS\x10\x01\x12\x0e\n\nIDEMPOTENT\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xd5\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x86\x01\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\t\x12!\n\x19leading_detached_comments\x18\x06 \x03(\t\"\xa7\x01\n\x11GeneratedCodeInfo\x12\x41\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.Annotation\x1aO\n\nAnnotation\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x13\n\x0bsource_file\x18\x02 \x01(\t\x12\r\n\x05\x62\x65gin\x18\x03 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x04 \x01(\x05\x42~\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection'
+ )
+else:
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xa9\x05\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x12\x46\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRange\x12\x15\n\rreserved_name\x18\n \x03(\t\x1a\x65\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12\x37\n\x07options\x18\x03 \x01(\x0b\x32&.google.protobuf.ExtensionRangeOptions\x1a+\n\rReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"g\n\x15\x45xtensionRangeOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd5\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12\x11\n\tjson_name\x18\n \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\x12\x17\n\x0fproto3_optional\x18\x11 \x01(\x08\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"T\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptions\"\xa4\x02\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\x12N\n\x0ereserved_range\x18\x04 \x03(\x0b\x32\x36.google.protobuf.EnumDescriptorProto.EnumReservedRange\x12\x15\n\rreserved_name\x18\x05 \x03(\t\x1a/\n\x11\x45numReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xa5\x06\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12)\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08\x42\x02\x18\x01\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12#\n\x14php_generic_services\x18* \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x04true\x12\x19\n\x11objc_class_prefix\x18$ \x01(\t\x12\x18\n\x10\x63sharp_namespace\x18% \x01(\t\x12\x14\n\x0cswift_prefix\x18\' \x01(\t\x12\x18\n\x10php_class_prefix\x18( \x01(\t\x12\x15\n\rphp_namespace\x18) \x01(\t\x12\x1e\n\x16php_metadata_namespace\x18, \x01(\t\x12\x14\n\x0cruby_package\x18- \x01(\t\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\x84\x02\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\tJ\x04\x08\t\x10\n\"\x9e\x03\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12?\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05\"^\n\x0cOneofOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x93\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x05\x10\x06\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xad\x02\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12_\n\x11idempotency_level\x18\" \x01(\x0e\x32/.google.protobuf.MethodOptions.IdempotencyLevel:\x13IDEMPOTENCY_UNKNOWN\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"P\n\x10IdempotencyLevel\x12\x17\n\x13IDEMPOTENCY_UNKNOWN\x10\x00\x12\x13\n\x0fNO_SIDE_EFFECTS\x10\x01\x12\x0e\n\nIDEMPOTENT\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xd5\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x86\x01\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\t\x12!\n\x19leading_detached_comments\x18\x06 \x03(\t\"\xa7\x01\n\x11GeneratedCodeInfo\x12\x41\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.Annotation\x1aO\n\nAnnotation\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x13\n\x0bsource_file\x18\x02 \x01(\t\x12\r\n\x05\x62\x65gin\x18\x03 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x04 \x01(\x05\x42~\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection')
+
+
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+ _FIELDDESCRIPTORPROTO_TYPE = _descriptor.EnumDescriptor(
+ name='Type',
+ full_name='google.protobuf.FieldDescriptorProto.Type',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_DOUBLE', index=0, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_FLOAT', index=1, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_INT64', index=2, number=3,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_UINT64', index=3, number=4,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_INT32', index=4, number=5,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_FIXED64', index=5, number=6,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_FIXED32', index=6, number=7,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_BOOL', index=7, number=8,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_STRING', index=8, number=9,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_GROUP', index=9, number=10,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_MESSAGE', index=10, number=11,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_BYTES', index=11, number=12,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_UINT32', index=12, number=13,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_ENUM', index=13, number=14,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_SFIXED32', index=14, number=15,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_SFIXED64', index=15, number=16,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_SINT32', index=16, number=17,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='TYPE_SINT64', index=17, number=18,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_TYPE)
+
+ _FIELDDESCRIPTORPROTO_LABEL = _descriptor.EnumDescriptor(
+ name='Label',
+ full_name='google.protobuf.FieldDescriptorProto.Label',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='LABEL_OPTIONAL', index=0, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='LABEL_REQUIRED', index=1, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='LABEL_REPEATED', index=2, number=3,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_LABEL)
+
+ _FILEOPTIONS_OPTIMIZEMODE = _descriptor.EnumDescriptor(
+ name='OptimizeMode',
+ full_name='google.protobuf.FileOptions.OptimizeMode',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='SPEED', index=0, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='CODE_SIZE', index=1, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='LITE_RUNTIME', index=2, number=3,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_FILEOPTIONS_OPTIMIZEMODE)
+
+ _FIELDOPTIONS_CTYPE = _descriptor.EnumDescriptor(
+ name='CType',
+ full_name='google.protobuf.FieldOptions.CType',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='STRING', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='CORD', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='STRING_PIECE', index=2, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_CTYPE)
+
+ _FIELDOPTIONS_JSTYPE = _descriptor.EnumDescriptor(
+ name='JSType',
+ full_name='google.protobuf.FieldOptions.JSType',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='JS_NORMAL', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='JS_STRING', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='JS_NUMBER', index=2, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_JSTYPE)
+
+ _METHODOPTIONS_IDEMPOTENCYLEVEL = _descriptor.EnumDescriptor(
+ name='IdempotencyLevel',
+ full_name='google.protobuf.MethodOptions.IdempotencyLevel',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='IDEMPOTENCY_UNKNOWN', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='NO_SIDE_EFFECTS', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='IDEMPOTENT', index=2, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ )
+ _sym_db.RegisterEnumDescriptor(_METHODOPTIONS_IDEMPOTENCYLEVEL)
+
+
+ _FILEDESCRIPTORSET = _descriptor.Descriptor(
+ name='FileDescriptorSet',
+ full_name='google.protobuf.FileDescriptorSet',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='file', full_name='google.protobuf.FileDescriptorSet.file', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _FILEDESCRIPTORPROTO = _descriptor.Descriptor(
+ name='FileDescriptorProto',
+ full_name='google.protobuf.FileDescriptorProto',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.protobuf.FileDescriptorProto.name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='package', full_name='google.protobuf.FileDescriptorProto.package', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='dependency', full_name='google.protobuf.FileDescriptorProto.dependency', index=2,
+ number=3, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='public_dependency', full_name='google.protobuf.FileDescriptorProto.public_dependency', index=3,
+ number=10, type=5, cpp_type=1, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='weak_dependency', full_name='google.protobuf.FileDescriptorProto.weak_dependency', index=4,
+ number=11, type=5, cpp_type=1, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='message_type', full_name='google.protobuf.FileDescriptorProto.message_type', index=5,
+ number=4, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='enum_type', full_name='google.protobuf.FileDescriptorProto.enum_type', index=6,
+ number=5, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='service', full_name='google.protobuf.FileDescriptorProto.service', index=7,
+ number=6, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='extension', full_name='google.protobuf.FileDescriptorProto.extension', index=8,
+ number=7, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='options', full_name='google.protobuf.FileDescriptorProto.options', index=9,
+ number=8, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='source_code_info', full_name='google.protobuf.FileDescriptorProto.source_code_info', index=10,
+ number=9, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='syntax', full_name='google.protobuf.FileDescriptorProto.syntax', index=11,
+ number=12, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _DESCRIPTORPROTO_EXTENSIONRANGE = _descriptor.Descriptor(
+ name='ExtensionRange',
+ full_name='google.protobuf.DescriptorProto.ExtensionRange',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='start', full_name='google.protobuf.DescriptorProto.ExtensionRange.start', index=0,
+ number=1, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='end', full_name='google.protobuf.DescriptorProto.ExtensionRange.end', index=1,
+ number=2, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='options', full_name='google.protobuf.DescriptorProto.ExtensionRange.options', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+ _DESCRIPTORPROTO_RESERVEDRANGE = _descriptor.Descriptor(
+ name='ReservedRange',
+ full_name='google.protobuf.DescriptorProto.ReservedRange',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='start', full_name='google.protobuf.DescriptorProto.ReservedRange.start', index=0,
+ number=1, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='end', full_name='google.protobuf.DescriptorProto.ReservedRange.end', index=1,
+ number=2, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+ _DESCRIPTORPROTO = _descriptor.Descriptor(
+ name='DescriptorProto',
+ full_name='google.protobuf.DescriptorProto',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.protobuf.DescriptorProto.name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='field', full_name='google.protobuf.DescriptorProto.field', index=1,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='extension', full_name='google.protobuf.DescriptorProto.extension', index=2,
+ number=6, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='nested_type', full_name='google.protobuf.DescriptorProto.nested_type', index=3,
+ number=3, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='enum_type', full_name='google.protobuf.DescriptorProto.enum_type', index=4,
+ number=4, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='extension_range', full_name='google.protobuf.DescriptorProto.extension_range', index=5,
+ number=5, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='oneof_decl', full_name='google.protobuf.DescriptorProto.oneof_decl', index=6,
+ number=8, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='options', full_name='google.protobuf.DescriptorProto.options', index=7,
+ number=7, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='reserved_range', full_name='google.protobuf.DescriptorProto.reserved_range', index=8,
+ number=9, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='reserved_name', full_name='google.protobuf.DescriptorProto.reserved_name', index=9,
+ number=10, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[_DESCRIPTORPROTO_EXTENSIONRANGE, _DESCRIPTORPROTO_RESERVEDRANGE, ],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _EXTENSIONRANGEOPTIONS = _descriptor.Descriptor(
+ name='ExtensionRangeOptions',
+ full_name='google.protobuf.ExtensionRangeOptions',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='uninterpreted_option', full_name='google.protobuf.ExtensionRangeOptions.uninterpreted_option', index=0,
+ number=999, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=True,
+ syntax='proto2',
+ extension_ranges=[(1000, 536870912), ],
+ oneofs=[
+ ],
+ )
+
+
+ _FIELDDESCRIPTORPROTO = _descriptor.Descriptor(
+ name='FieldDescriptorProto',
+ full_name='google.protobuf.FieldDescriptorProto',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.protobuf.FieldDescriptorProto.name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='number', full_name='google.protobuf.FieldDescriptorProto.number', index=1,
+ number=3, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='label', full_name='google.protobuf.FieldDescriptorProto.label', index=2,
+ number=4, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='type', full_name='google.protobuf.FieldDescriptorProto.type', index=3,
+ number=5, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='type_name', full_name='google.protobuf.FieldDescriptorProto.type_name', index=4,
+ number=6, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='extendee', full_name='google.protobuf.FieldDescriptorProto.extendee', index=5,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='default_value', full_name='google.protobuf.FieldDescriptorProto.default_value', index=6,
+ number=7, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='oneof_index', full_name='google.protobuf.FieldDescriptorProto.oneof_index', index=7,
+ number=9, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='json_name', full_name='google.protobuf.FieldDescriptorProto.json_name', index=8,
+ number=10, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='options', full_name='google.protobuf.FieldDescriptorProto.options', index=9,
+ number=8, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='proto3_optional', full_name='google.protobuf.FieldDescriptorProto.proto3_optional', index=10,
+ number=17, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _FIELDDESCRIPTORPROTO_TYPE,
+ _FIELDDESCRIPTORPROTO_LABEL,
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _ONEOFDESCRIPTORPROTO = _descriptor.Descriptor(
+ name='OneofDescriptorProto',
+ full_name='google.protobuf.OneofDescriptorProto',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.protobuf.OneofDescriptorProto.name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='options', full_name='google.protobuf.OneofDescriptorProto.options', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE = _descriptor.Descriptor(
+ name='EnumReservedRange',
+ full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='start', full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange.start', index=0,
+ number=1, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='end', full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange.end', index=1,
+ number=2, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+ _ENUMDESCRIPTORPROTO = _descriptor.Descriptor(
+ name='EnumDescriptorProto',
+ full_name='google.protobuf.EnumDescriptorProto',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.protobuf.EnumDescriptorProto.name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='google.protobuf.EnumDescriptorProto.value', index=1,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='options', full_name='google.protobuf.EnumDescriptorProto.options', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='reserved_range', full_name='google.protobuf.EnumDescriptorProto.reserved_range', index=3,
+ number=4, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='reserved_name', full_name='google.protobuf.EnumDescriptorProto.reserved_name', index=4,
+ number=5, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[_ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE, ],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _ENUMVALUEDESCRIPTORPROTO = _descriptor.Descriptor(
+ name='EnumValueDescriptorProto',
+ full_name='google.protobuf.EnumValueDescriptorProto',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.protobuf.EnumValueDescriptorProto.name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='number', full_name='google.protobuf.EnumValueDescriptorProto.number', index=1,
+ number=2, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='options', full_name='google.protobuf.EnumValueDescriptorProto.options', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _SERVICEDESCRIPTORPROTO = _descriptor.Descriptor(
+ name='ServiceDescriptorProto',
+ full_name='google.protobuf.ServiceDescriptorProto',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.protobuf.ServiceDescriptorProto.name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='method', full_name='google.protobuf.ServiceDescriptorProto.method', index=1,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='options', full_name='google.protobuf.ServiceDescriptorProto.options', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _METHODDESCRIPTORPROTO = _descriptor.Descriptor(
+ name='MethodDescriptorProto',
+ full_name='google.protobuf.MethodDescriptorProto',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.protobuf.MethodDescriptorProto.name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='input_type', full_name='google.protobuf.MethodDescriptorProto.input_type', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='output_type', full_name='google.protobuf.MethodDescriptorProto.output_type', index=2,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='options', full_name='google.protobuf.MethodDescriptorProto.options', index=3,
+ number=4, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='client_streaming', full_name='google.protobuf.MethodDescriptorProto.client_streaming', index=4,
+ number=5, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='server_streaming', full_name='google.protobuf.MethodDescriptorProto.server_streaming', index=5,
+ number=6, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _FILEOPTIONS = _descriptor.Descriptor(
+ name='FileOptions',
+ full_name='google.protobuf.FileOptions',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='java_package', full_name='google.protobuf.FileOptions.java_package', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='java_outer_classname', full_name='google.protobuf.FileOptions.java_outer_classname', index=1,
+ number=8, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='java_multiple_files', full_name='google.protobuf.FileOptions.java_multiple_files', index=2,
+ number=10, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='java_generate_equals_and_hash', full_name='google.protobuf.FileOptions.java_generate_equals_and_hash', index=3,
+ number=20, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='java_string_check_utf8', full_name='google.protobuf.FileOptions.java_string_check_utf8', index=4,
+ number=27, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='optimize_for', full_name='google.protobuf.FileOptions.optimize_for', index=5,
+ number=9, type=14, cpp_type=8, label=1,
+ has_default_value=True, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='go_package', full_name='google.protobuf.FileOptions.go_package', index=6,
+ number=11, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='cc_generic_services', full_name='google.protobuf.FileOptions.cc_generic_services', index=7,
+ number=16, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='java_generic_services', full_name='google.protobuf.FileOptions.java_generic_services', index=8,
+ number=17, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='py_generic_services', full_name='google.protobuf.FileOptions.py_generic_services', index=9,
+ number=18, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='php_generic_services', full_name='google.protobuf.FileOptions.php_generic_services', index=10,
+ number=42, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='deprecated', full_name='google.protobuf.FileOptions.deprecated', index=11,
+ number=23, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='cc_enable_arenas', full_name='google.protobuf.FileOptions.cc_enable_arenas', index=12,
+ number=31, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=True,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='objc_class_prefix', full_name='google.protobuf.FileOptions.objc_class_prefix', index=13,
+ number=36, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='csharp_namespace', full_name='google.protobuf.FileOptions.csharp_namespace', index=14,
+ number=37, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='swift_prefix', full_name='google.protobuf.FileOptions.swift_prefix', index=15,
+ number=39, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='php_class_prefix', full_name='google.protobuf.FileOptions.php_class_prefix', index=16,
+ number=40, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='php_namespace', full_name='google.protobuf.FileOptions.php_namespace', index=17,
+ number=41, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='php_metadata_namespace', full_name='google.protobuf.FileOptions.php_metadata_namespace', index=18,
+ number=44, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='ruby_package', full_name='google.protobuf.FileOptions.ruby_package', index=19,
+ number=45, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='uninterpreted_option', full_name='google.protobuf.FileOptions.uninterpreted_option', index=20,
+ number=999, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _FILEOPTIONS_OPTIMIZEMODE,
+ ],
+ serialized_options=None,
+ is_extendable=True,
+ syntax='proto2',
+ extension_ranges=[(1000, 536870912), ],
+ oneofs=[
+ ],
+ )
+
+
+ _MESSAGEOPTIONS = _descriptor.Descriptor(
+ name='MessageOptions',
+ full_name='google.protobuf.MessageOptions',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='message_set_wire_format', full_name='google.protobuf.MessageOptions.message_set_wire_format', index=0,
+ number=1, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='no_standard_descriptor_accessor', full_name='google.protobuf.MessageOptions.no_standard_descriptor_accessor', index=1,
+ number=2, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='deprecated', full_name='google.protobuf.MessageOptions.deprecated', index=2,
+ number=3, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='map_entry', full_name='google.protobuf.MessageOptions.map_entry', index=3,
+ number=7, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='uninterpreted_option', full_name='google.protobuf.MessageOptions.uninterpreted_option', index=4,
+ number=999, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=True,
+ syntax='proto2',
+ extension_ranges=[(1000, 536870912), ],
+ oneofs=[
+ ],
+ )
+
+
+ _FIELDOPTIONS = _descriptor.Descriptor(
+ name='FieldOptions',
+ full_name='google.protobuf.FieldOptions',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='ctype', full_name='google.protobuf.FieldOptions.ctype', index=0,
+ number=1, type=14, cpp_type=8, label=1,
+ has_default_value=True, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='packed', full_name='google.protobuf.FieldOptions.packed', index=1,
+ number=2, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='jstype', full_name='google.protobuf.FieldOptions.jstype', index=2,
+ number=6, type=14, cpp_type=8, label=1,
+ has_default_value=True, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='lazy', full_name='google.protobuf.FieldOptions.lazy', index=3,
+ number=5, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='deprecated', full_name='google.protobuf.FieldOptions.deprecated', index=4,
+ number=3, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='weak', full_name='google.protobuf.FieldOptions.weak', index=5,
+ number=10, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='uninterpreted_option', full_name='google.protobuf.FieldOptions.uninterpreted_option', index=6,
+ number=999, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _FIELDOPTIONS_CTYPE,
+ _FIELDOPTIONS_JSTYPE,
+ ],
+ serialized_options=None,
+ is_extendable=True,
+ syntax='proto2',
+ extension_ranges=[(1000, 536870912), ],
+ oneofs=[
+ ],
+ )
+
+
+ _ONEOFOPTIONS = _descriptor.Descriptor(
+ name='OneofOptions',
+ full_name='google.protobuf.OneofOptions',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='uninterpreted_option', full_name='google.protobuf.OneofOptions.uninterpreted_option', index=0,
+ number=999, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=True,
+ syntax='proto2',
+ extension_ranges=[(1000, 536870912), ],
+ oneofs=[
+ ],
+ )
+
+
+ _ENUMOPTIONS = _descriptor.Descriptor(
+ name='EnumOptions',
+ full_name='google.protobuf.EnumOptions',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='allow_alias', full_name='google.protobuf.EnumOptions.allow_alias', index=0,
+ number=2, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='deprecated', full_name='google.protobuf.EnumOptions.deprecated', index=1,
+ number=3, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='uninterpreted_option', full_name='google.protobuf.EnumOptions.uninterpreted_option', index=2,
+ number=999, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=True,
+ syntax='proto2',
+ extension_ranges=[(1000, 536870912), ],
+ oneofs=[
+ ],
+ )
+
+
+ _ENUMVALUEOPTIONS = _descriptor.Descriptor(
+ name='EnumValueOptions',
+ full_name='google.protobuf.EnumValueOptions',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='deprecated', full_name='google.protobuf.EnumValueOptions.deprecated', index=0,
+ number=1, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='uninterpreted_option', full_name='google.protobuf.EnumValueOptions.uninterpreted_option', index=1,
+ number=999, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=True,
+ syntax='proto2',
+ extension_ranges=[(1000, 536870912), ],
+ oneofs=[
+ ],
+ )
+
+
+ _SERVICEOPTIONS = _descriptor.Descriptor(
+ name='ServiceOptions',
+ full_name='google.protobuf.ServiceOptions',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='deprecated', full_name='google.protobuf.ServiceOptions.deprecated', index=0,
+ number=33, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='uninterpreted_option', full_name='google.protobuf.ServiceOptions.uninterpreted_option', index=1,
+ number=999, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=True,
+ syntax='proto2',
+ extension_ranges=[(1000, 536870912), ],
+ oneofs=[
+ ],
+ )
+
+
+ _METHODOPTIONS = _descriptor.Descriptor(
+ name='MethodOptions',
+ full_name='google.protobuf.MethodOptions',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='deprecated', full_name='google.protobuf.MethodOptions.deprecated', index=0,
+ number=33, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='idempotency_level', full_name='google.protobuf.MethodOptions.idempotency_level', index=1,
+ number=34, type=14, cpp_type=8, label=1,
+ has_default_value=True, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='uninterpreted_option', full_name='google.protobuf.MethodOptions.uninterpreted_option', index=2,
+ number=999, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _METHODOPTIONS_IDEMPOTENCYLEVEL,
+ ],
+ serialized_options=None,
+ is_extendable=True,
+ syntax='proto2',
+ extension_ranges=[(1000, 536870912), ],
+ oneofs=[
+ ],
+ )
+
+
+ _UNINTERPRETEDOPTION_NAMEPART = _descriptor.Descriptor(
+ name='NamePart',
+ full_name='google.protobuf.UninterpretedOption.NamePart',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name_part', full_name='google.protobuf.UninterpretedOption.NamePart.name_part', index=0,
+ number=1, type=9, cpp_type=9, label=2,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='is_extension', full_name='google.protobuf.UninterpretedOption.NamePart.is_extension', index=1,
+ number=2, type=8, cpp_type=7, label=2,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+ _UNINTERPRETEDOPTION = _descriptor.Descriptor(
+ name='UninterpretedOption',
+ full_name='google.protobuf.UninterpretedOption',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='google.protobuf.UninterpretedOption.name', index=0,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='identifier_value', full_name='google.protobuf.UninterpretedOption.identifier_value', index=1,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='positive_int_value', full_name='google.protobuf.UninterpretedOption.positive_int_value', index=2,
+ number=4, type=4, cpp_type=4, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='negative_int_value', full_name='google.protobuf.UninterpretedOption.negative_int_value', index=3,
+ number=5, type=3, cpp_type=2, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='double_value', full_name='google.protobuf.UninterpretedOption.double_value', index=4,
+ number=6, type=1, cpp_type=5, label=1,
+ has_default_value=False, default_value=float(0),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='string_value', full_name='google.protobuf.UninterpretedOption.string_value', index=5,
+ number=7, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"",
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='aggregate_value', full_name='google.protobuf.UninterpretedOption.aggregate_value', index=6,
+ number=8, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[_UNINTERPRETEDOPTION_NAMEPART, ],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _SOURCECODEINFO_LOCATION = _descriptor.Descriptor(
+ name='Location',
+ full_name='google.protobuf.SourceCodeInfo.Location',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='path', full_name='google.protobuf.SourceCodeInfo.Location.path', index=0,
+ number=1, type=5, cpp_type=1, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='span', full_name='google.protobuf.SourceCodeInfo.Location.span', index=1,
+ number=2, type=5, cpp_type=1, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='leading_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_comments', index=2,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='trailing_comments', full_name='google.protobuf.SourceCodeInfo.Location.trailing_comments', index=3,
+ number=4, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='leading_detached_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_detached_comments', index=4,
+ number=6, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+ _SOURCECODEINFO = _descriptor.Descriptor(
+ name='SourceCodeInfo',
+ full_name='google.protobuf.SourceCodeInfo',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='location', full_name='google.protobuf.SourceCodeInfo.location', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[_SOURCECODEINFO_LOCATION, ],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+
+ _GENERATEDCODEINFO_ANNOTATION = _descriptor.Descriptor(
+ name='Annotation',
+ full_name='google.protobuf.GeneratedCodeInfo.Annotation',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='path', full_name='google.protobuf.GeneratedCodeInfo.Annotation.path', index=0,
+ number=1, type=5, cpp_type=1, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='source_file', full_name='google.protobuf.GeneratedCodeInfo.Annotation.source_file', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='begin', full_name='google.protobuf.GeneratedCodeInfo.Annotation.begin', index=2,
+ number=3, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='end', full_name='google.protobuf.GeneratedCodeInfo.Annotation.end', index=3,
+ number=4, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+ _GENERATEDCODEINFO = _descriptor.Descriptor(
+ name='GeneratedCodeInfo',
+ full_name='google.protobuf.GeneratedCodeInfo',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='annotation', full_name='google.protobuf.GeneratedCodeInfo.annotation', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[_GENERATEDCODEINFO_ANNOTATION, ],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ )
+
+ _FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO
+ _FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO
+ _FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
+ _FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO
+ _FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
+ _FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS
+ _FILEDESCRIPTORPROTO.fields_by_name['source_code_info'].message_type = _SOURCECODEINFO
+ _DESCRIPTORPROTO_EXTENSIONRANGE.fields_by_name['options'].message_type = _EXTENSIONRANGEOPTIONS
+ _DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO
+ _DESCRIPTORPROTO_RESERVEDRANGE.containing_type = _DESCRIPTORPROTO
+ _DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO
+ _DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
+ _DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO
+ _DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
+ _DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE
+ _DESCRIPTORPROTO.fields_by_name['oneof_decl'].message_type = _ONEOFDESCRIPTORPROTO
+ _DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS
+ _DESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _DESCRIPTORPROTO_RESERVEDRANGE
+ _EXTENSIONRANGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+ _FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL
+ _FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE
+ _FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS
+ _FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO
+ _FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO
+ _ONEOFDESCRIPTORPROTO.fields_by_name['options'].message_type = _ONEOFOPTIONS
+ _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE.containing_type = _ENUMDESCRIPTORPROTO
+ _ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO
+ _ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS
+ _ENUMDESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE
+ _ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS
+ _SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO
+ _SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS
+ _METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS
+ _FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE
+ _FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+ _FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS
+ _MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+ _FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE
+ _FIELDOPTIONS.fields_by_name['jstype'].enum_type = _FIELDOPTIONS_JSTYPE
+ _FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+ _FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS
+ _FIELDOPTIONS_JSTYPE.containing_type = _FIELDOPTIONS
+ _ONEOFOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+ _ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+ _ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+ _SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+ _METHODOPTIONS.fields_by_name['idempotency_level'].enum_type = _METHODOPTIONS_IDEMPOTENCYLEVEL
+ _METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
+ _METHODOPTIONS_IDEMPOTENCYLEVEL.containing_type = _METHODOPTIONS
+ _UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION
+ _UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART
+ _SOURCECODEINFO_LOCATION.containing_type = _SOURCECODEINFO
+ _SOURCECODEINFO.fields_by_name['location'].message_type = _SOURCECODEINFO_LOCATION
+ _GENERATEDCODEINFO_ANNOTATION.containing_type = _GENERATEDCODEINFO
+ _GENERATEDCODEINFO.fields_by_name['annotation'].message_type = _GENERATEDCODEINFO_ANNOTATION
+ DESCRIPTOR.message_types_by_name['FileDescriptorSet'] = _FILEDESCRIPTORSET
+ DESCRIPTOR.message_types_by_name['FileDescriptorProto'] = _FILEDESCRIPTORPROTO
+ DESCRIPTOR.message_types_by_name['DescriptorProto'] = _DESCRIPTORPROTO
+ DESCRIPTOR.message_types_by_name['ExtensionRangeOptions'] = _EXTENSIONRANGEOPTIONS
+ DESCRIPTOR.message_types_by_name['FieldDescriptorProto'] = _FIELDDESCRIPTORPROTO
+ DESCRIPTOR.message_types_by_name['OneofDescriptorProto'] = _ONEOFDESCRIPTORPROTO
+ DESCRIPTOR.message_types_by_name['EnumDescriptorProto'] = _ENUMDESCRIPTORPROTO
+ DESCRIPTOR.message_types_by_name['EnumValueDescriptorProto'] = _ENUMVALUEDESCRIPTORPROTO
+ DESCRIPTOR.message_types_by_name['ServiceDescriptorProto'] = _SERVICEDESCRIPTORPROTO
+ DESCRIPTOR.message_types_by_name['MethodDescriptorProto'] = _METHODDESCRIPTORPROTO
+ DESCRIPTOR.message_types_by_name['FileOptions'] = _FILEOPTIONS
+ DESCRIPTOR.message_types_by_name['MessageOptions'] = _MESSAGEOPTIONS
+ DESCRIPTOR.message_types_by_name['FieldOptions'] = _FIELDOPTIONS
+ DESCRIPTOR.message_types_by_name['OneofOptions'] = _ONEOFOPTIONS
+ DESCRIPTOR.message_types_by_name['EnumOptions'] = _ENUMOPTIONS
+ DESCRIPTOR.message_types_by_name['EnumValueOptions'] = _ENUMVALUEOPTIONS
+ DESCRIPTOR.message_types_by_name['ServiceOptions'] = _SERVICEOPTIONS
+ DESCRIPTOR.message_types_by_name['MethodOptions'] = _METHODOPTIONS
+ DESCRIPTOR.message_types_by_name['UninterpretedOption'] = _UNINTERPRETEDOPTION
+ DESCRIPTOR.message_types_by_name['SourceCodeInfo'] = _SOURCECODEINFO
+ DESCRIPTOR.message_types_by_name['GeneratedCodeInfo'] = _GENERATEDCODEINFO
+ _sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+else:
+ _FILEDESCRIPTORSET = DESCRIPTOR.message_types_by_name['FileDescriptorSet']
+ _FILEDESCRIPTORPROTO = DESCRIPTOR.message_types_by_name['FileDescriptorProto']
+ _DESCRIPTORPROTO = DESCRIPTOR.message_types_by_name['DescriptorProto']
+ _DESCRIPTORPROTO_EXTENSIONRANGE = _DESCRIPTORPROTO.nested_types_by_name['ExtensionRange']
+ _DESCRIPTORPROTO_RESERVEDRANGE = _DESCRIPTORPROTO.nested_types_by_name['ReservedRange']
+ _EXTENSIONRANGEOPTIONS = DESCRIPTOR.message_types_by_name['ExtensionRangeOptions']
+ _FIELDDESCRIPTORPROTO = DESCRIPTOR.message_types_by_name['FieldDescriptorProto']
+ _ONEOFDESCRIPTORPROTO = DESCRIPTOR.message_types_by_name['OneofDescriptorProto']
+ _ENUMDESCRIPTORPROTO = DESCRIPTOR.message_types_by_name['EnumDescriptorProto']
+ _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE = _ENUMDESCRIPTORPROTO.nested_types_by_name['EnumReservedRange']
+ _ENUMVALUEDESCRIPTORPROTO = DESCRIPTOR.message_types_by_name['EnumValueDescriptorProto']
+ _SERVICEDESCRIPTORPROTO = DESCRIPTOR.message_types_by_name['ServiceDescriptorProto']
+ _METHODDESCRIPTORPROTO = DESCRIPTOR.message_types_by_name['MethodDescriptorProto']
+ _FILEOPTIONS = DESCRIPTOR.message_types_by_name['FileOptions']
+ _MESSAGEOPTIONS = DESCRIPTOR.message_types_by_name['MessageOptions']
+ _FIELDOPTIONS = DESCRIPTOR.message_types_by_name['FieldOptions']
+ _ONEOFOPTIONS = DESCRIPTOR.message_types_by_name['OneofOptions']
+ _ENUMOPTIONS = DESCRIPTOR.message_types_by_name['EnumOptions']
+ _ENUMVALUEOPTIONS = DESCRIPTOR.message_types_by_name['EnumValueOptions']
+ _SERVICEOPTIONS = DESCRIPTOR.message_types_by_name['ServiceOptions']
+ _METHODOPTIONS = DESCRIPTOR.message_types_by_name['MethodOptions']
+ _UNINTERPRETEDOPTION = DESCRIPTOR.message_types_by_name['UninterpretedOption']
+ _UNINTERPRETEDOPTION_NAMEPART = _UNINTERPRETEDOPTION.nested_types_by_name['NamePart']
+ _SOURCECODEINFO = DESCRIPTOR.message_types_by_name['SourceCodeInfo']
+ _SOURCECODEINFO_LOCATION = _SOURCECODEINFO.nested_types_by_name['Location']
+ _GENERATEDCODEINFO = DESCRIPTOR.message_types_by_name['GeneratedCodeInfo']
+ _GENERATEDCODEINFO_ANNOTATION = _GENERATEDCODEINFO.nested_types_by_name['Annotation']
+ _FIELDDESCRIPTORPROTO_TYPE = _FIELDDESCRIPTORPROTO.enum_types_by_name['Type']
+ _FIELDDESCRIPTORPROTO_LABEL = _FIELDDESCRIPTORPROTO.enum_types_by_name['Label']
+ _FILEOPTIONS_OPTIMIZEMODE = _FILEOPTIONS.enum_types_by_name['OptimizeMode']
+ _FIELDOPTIONS_CTYPE = _FIELDOPTIONS.enum_types_by_name['CType']
+ _FIELDOPTIONS_JSTYPE = _FIELDOPTIONS.enum_types_by_name['JSType']
+ _METHODOPTIONS_IDEMPOTENCYLEVEL = _METHODOPTIONS.enum_types_by_name['IdempotencyLevel']
+FileDescriptorSet = _reflection.GeneratedProtocolMessageType('FileDescriptorSet', (_message.Message,), {
+ 'DESCRIPTOR' : _FILEDESCRIPTORSET,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.FileDescriptorSet)
+ })
+_sym_db.RegisterMessage(FileDescriptorSet)
+
+FileDescriptorProto = _reflection.GeneratedProtocolMessageType('FileDescriptorProto', (_message.Message,), {
+ 'DESCRIPTOR' : _FILEDESCRIPTORPROTO,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.FileDescriptorProto)
+ })
+_sym_db.RegisterMessage(FileDescriptorProto)
+
+DescriptorProto = _reflection.GeneratedProtocolMessageType('DescriptorProto', (_message.Message,), {
+
+ 'ExtensionRange' : _reflection.GeneratedProtocolMessageType('ExtensionRange', (_message.Message,), {
+ 'DESCRIPTOR' : _DESCRIPTORPROTO_EXTENSIONRANGE,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto.ExtensionRange)
+ })
+ ,
+
+ 'ReservedRange' : _reflection.GeneratedProtocolMessageType('ReservedRange', (_message.Message,), {
+ 'DESCRIPTOR' : _DESCRIPTORPROTO_RESERVEDRANGE,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto.ReservedRange)
+ })
+ ,
+ 'DESCRIPTOR' : _DESCRIPTORPROTO,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.DescriptorProto)
+ })
+_sym_db.RegisterMessage(DescriptorProto)
+_sym_db.RegisterMessage(DescriptorProto.ExtensionRange)
+_sym_db.RegisterMessage(DescriptorProto.ReservedRange)
+
+ExtensionRangeOptions = _reflection.GeneratedProtocolMessageType('ExtensionRangeOptions', (_message.Message,), {
+ 'DESCRIPTOR' : _EXTENSIONRANGEOPTIONS,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.ExtensionRangeOptions)
+ })
+_sym_db.RegisterMessage(ExtensionRangeOptions)
+
+FieldDescriptorProto = _reflection.GeneratedProtocolMessageType('FieldDescriptorProto', (_message.Message,), {
+ 'DESCRIPTOR' : _FIELDDESCRIPTORPROTO,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.FieldDescriptorProto)
+ })
+_sym_db.RegisterMessage(FieldDescriptorProto)
+
+OneofDescriptorProto = _reflection.GeneratedProtocolMessageType('OneofDescriptorProto', (_message.Message,), {
+ 'DESCRIPTOR' : _ONEOFDESCRIPTORPROTO,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.OneofDescriptorProto)
+ })
+_sym_db.RegisterMessage(OneofDescriptorProto)
+
+EnumDescriptorProto = _reflection.GeneratedProtocolMessageType('EnumDescriptorProto', (_message.Message,), {
+
+ 'EnumReservedRange' : _reflection.GeneratedProtocolMessageType('EnumReservedRange', (_message.Message,), {
+ 'DESCRIPTOR' : _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.EnumDescriptorProto.EnumReservedRange)
+ })
+ ,
+ 'DESCRIPTOR' : _ENUMDESCRIPTORPROTO,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.EnumDescriptorProto)
+ })
+_sym_db.RegisterMessage(EnumDescriptorProto)
+_sym_db.RegisterMessage(EnumDescriptorProto.EnumReservedRange)
+
+EnumValueDescriptorProto = _reflection.GeneratedProtocolMessageType('EnumValueDescriptorProto', (_message.Message,), {
+ 'DESCRIPTOR' : _ENUMVALUEDESCRIPTORPROTO,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.EnumValueDescriptorProto)
+ })
+_sym_db.RegisterMessage(EnumValueDescriptorProto)
+
+ServiceDescriptorProto = _reflection.GeneratedProtocolMessageType('ServiceDescriptorProto', (_message.Message,), {
+ 'DESCRIPTOR' : _SERVICEDESCRIPTORPROTO,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.ServiceDescriptorProto)
+ })
+_sym_db.RegisterMessage(ServiceDescriptorProto)
+
+MethodDescriptorProto = _reflection.GeneratedProtocolMessageType('MethodDescriptorProto', (_message.Message,), {
+ 'DESCRIPTOR' : _METHODDESCRIPTORPROTO,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.MethodDescriptorProto)
+ })
+_sym_db.RegisterMessage(MethodDescriptorProto)
+
+FileOptions = _reflection.GeneratedProtocolMessageType('FileOptions', (_message.Message,), {
+ 'DESCRIPTOR' : _FILEOPTIONS,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.FileOptions)
+ })
+_sym_db.RegisterMessage(FileOptions)
+
+MessageOptions = _reflection.GeneratedProtocolMessageType('MessageOptions', (_message.Message,), {
+ 'DESCRIPTOR' : _MESSAGEOPTIONS,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.MessageOptions)
+ })
+_sym_db.RegisterMessage(MessageOptions)
+
+FieldOptions = _reflection.GeneratedProtocolMessageType('FieldOptions', (_message.Message,), {
+ 'DESCRIPTOR' : _FIELDOPTIONS,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.FieldOptions)
+ })
+_sym_db.RegisterMessage(FieldOptions)
+
+OneofOptions = _reflection.GeneratedProtocolMessageType('OneofOptions', (_message.Message,), {
+ 'DESCRIPTOR' : _ONEOFOPTIONS,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.OneofOptions)
+ })
+_sym_db.RegisterMessage(OneofOptions)
+
+EnumOptions = _reflection.GeneratedProtocolMessageType('EnumOptions', (_message.Message,), {
+ 'DESCRIPTOR' : _ENUMOPTIONS,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.EnumOptions)
+ })
+_sym_db.RegisterMessage(EnumOptions)
+
+EnumValueOptions = _reflection.GeneratedProtocolMessageType('EnumValueOptions', (_message.Message,), {
+ 'DESCRIPTOR' : _ENUMVALUEOPTIONS,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.EnumValueOptions)
+ })
+_sym_db.RegisterMessage(EnumValueOptions)
+
+ServiceOptions = _reflection.GeneratedProtocolMessageType('ServiceOptions', (_message.Message,), {
+ 'DESCRIPTOR' : _SERVICEOPTIONS,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.ServiceOptions)
+ })
+_sym_db.RegisterMessage(ServiceOptions)
+
+MethodOptions = _reflection.GeneratedProtocolMessageType('MethodOptions', (_message.Message,), {
+ 'DESCRIPTOR' : _METHODOPTIONS,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.MethodOptions)
+ })
+_sym_db.RegisterMessage(MethodOptions)
+
+UninterpretedOption = _reflection.GeneratedProtocolMessageType('UninterpretedOption', (_message.Message,), {
+
+ 'NamePart' : _reflection.GeneratedProtocolMessageType('NamePart', (_message.Message,), {
+ 'DESCRIPTOR' : _UNINTERPRETEDOPTION_NAMEPART,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.UninterpretedOption.NamePart)
+ })
+ ,
+ 'DESCRIPTOR' : _UNINTERPRETEDOPTION,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.UninterpretedOption)
+ })
+_sym_db.RegisterMessage(UninterpretedOption)
+_sym_db.RegisterMessage(UninterpretedOption.NamePart)
+
+SourceCodeInfo = _reflection.GeneratedProtocolMessageType('SourceCodeInfo', (_message.Message,), {
+
+ 'Location' : _reflection.GeneratedProtocolMessageType('Location', (_message.Message,), {
+ 'DESCRIPTOR' : _SOURCECODEINFO_LOCATION,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.SourceCodeInfo.Location)
+ })
+ ,
+ 'DESCRIPTOR' : _SOURCECODEINFO,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.SourceCodeInfo)
+ })
+_sym_db.RegisterMessage(SourceCodeInfo)
+_sym_db.RegisterMessage(SourceCodeInfo.Location)
+
+GeneratedCodeInfo = _reflection.GeneratedProtocolMessageType('GeneratedCodeInfo', (_message.Message,), {
+
+ 'Annotation' : _reflection.GeneratedProtocolMessageType('Annotation', (_message.Message,), {
+ 'DESCRIPTOR' : _GENERATEDCODEINFO_ANNOTATION,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.GeneratedCodeInfo.Annotation)
+ })
+ ,
+ 'DESCRIPTOR' : _GENERATEDCODEINFO,
+ '__module__' : 'google.protobuf.descriptor_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.GeneratedCodeInfo)
+ })
+_sym_db.RegisterMessage(GeneratedCodeInfo)
+_sym_db.RegisterMessage(GeneratedCodeInfo.Annotation)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ _FILEDESCRIPTORSET._serialized_start=53
+ _FILEDESCRIPTORSET._serialized_end=124
+ _FILEDESCRIPTORPROTO._serialized_start=127
+ _FILEDESCRIPTORPROTO._serialized_end=602
+ _DESCRIPTORPROTO._serialized_start=605
+ _DESCRIPTORPROTO._serialized_end=1286
+ _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_start=1140
+ _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_end=1241
+ _DESCRIPTORPROTO_RESERVEDRANGE._serialized_start=1243
+ _DESCRIPTORPROTO_RESERVEDRANGE._serialized_end=1286
+ _EXTENSIONRANGEOPTIONS._serialized_start=1288
+ _EXTENSIONRANGEOPTIONS._serialized_end=1391
+ _FIELDDESCRIPTORPROTO._serialized_start=1394
+ _FIELDDESCRIPTORPROTO._serialized_end=2119
+ _FIELDDESCRIPTORPROTO_TYPE._serialized_start=1740
+ _FIELDDESCRIPTORPROTO_TYPE._serialized_end=2050
+ _FIELDDESCRIPTORPROTO_LABEL._serialized_start=2052
+ _FIELDDESCRIPTORPROTO_LABEL._serialized_end=2119
+ _ONEOFDESCRIPTORPROTO._serialized_start=2121
+ _ONEOFDESCRIPTORPROTO._serialized_end=2205
+ _ENUMDESCRIPTORPROTO._serialized_start=2208
+ _ENUMDESCRIPTORPROTO._serialized_end=2500
+ _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE._serialized_start=2453
+ _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE._serialized_end=2500
+ _ENUMVALUEDESCRIPTORPROTO._serialized_start=2502
+ _ENUMVALUEDESCRIPTORPROTO._serialized_end=2610
+ _SERVICEDESCRIPTORPROTO._serialized_start=2613
+ _SERVICEDESCRIPTORPROTO._serialized_end=2757
+ _METHODDESCRIPTORPROTO._serialized_start=2760
+ _METHODDESCRIPTORPROTO._serialized_end=2953
+ _FILEOPTIONS._serialized_start=2956
+ _FILEOPTIONS._serialized_end=3761
+ _FILEOPTIONS_OPTIMIZEMODE._serialized_start=3686
+ _FILEOPTIONS_OPTIMIZEMODE._serialized_end=3744
+ _MESSAGEOPTIONS._serialized_start=3764
+ _MESSAGEOPTIONS._serialized_end=4024
+ _FIELDOPTIONS._serialized_start=4027
+ _FIELDOPTIONS._serialized_end=4441
+ _FIELDOPTIONS_CTYPE._serialized_start=4322
+ _FIELDOPTIONS_CTYPE._serialized_end=4369
+ _FIELDOPTIONS_JSTYPE._serialized_start=4371
+ _FIELDOPTIONS_JSTYPE._serialized_end=4424
+ _ONEOFOPTIONS._serialized_start=4443
+ _ONEOFOPTIONS._serialized_end=4537
+ _ENUMOPTIONS._serialized_start=4540
+ _ENUMOPTIONS._serialized_end=4687
+ _ENUMVALUEOPTIONS._serialized_start=4689
+ _ENUMVALUEOPTIONS._serialized_end=4814
+ _SERVICEOPTIONS._serialized_start=4816
+ _SERVICEOPTIONS._serialized_end=4939
+ _METHODOPTIONS._serialized_start=4942
+ _METHODOPTIONS._serialized_end=5243
+ _METHODOPTIONS_IDEMPOTENCYLEVEL._serialized_start=5152
+ _METHODOPTIONS_IDEMPOTENCYLEVEL._serialized_end=5232
+ _UNINTERPRETEDOPTION._serialized_start=5246
+ _UNINTERPRETEDOPTION._serialized_end=5532
+ _UNINTERPRETEDOPTION_NAMEPART._serialized_start=5481
+ _UNINTERPRETEDOPTION_NAMEPART._serialized_end=5532
+ _SOURCECODEINFO._serialized_start=5535
+ _SOURCECODEINFO._serialized_end=5748
+ _SOURCECODEINFO_LOCATION._serialized_start=5614
+ _SOURCECODEINFO_LOCATION._serialized_end=5748
+ _GENERATEDCODEINFO._serialized_start=5751
+ _GENERATEDCODEINFO._serialized_end=5918
+ _GENERATEDCODEINFO_ANNOTATION._serialized_start=5839
+ _GENERATEDCODEINFO_ANNOTATION._serialized_end=5918
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/descriptor_pool.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/descriptor_pool.py
new file mode 100644
index 0000000000000000000000000000000000000000..a6955ce81e855288de96b85cd976c90b42965532
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/descriptor_pool.py
@@ -0,0 +1,1289 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Provides DescriptorPool to use as a container for proto2 descriptors.
+
+The DescriptorPool is used in conjection with a DescriptorDatabase to maintain
+a collection of protocol buffer descriptors for use when dynamically creating
+message types at runtime.
+
+For most applications protocol buffers should be used via modules generated by
+the protocol buffer compiler tool. This should only be used when the type of
+protocol buffers used in an application or library cannot be predetermined.
+
+Below is a straightforward example on how to use this class::
+
+ pool = DescriptorPool()
+ file_descriptor_protos = [ ... ]
+ for file_descriptor_proto in file_descriptor_protos:
+ pool.Add(file_descriptor_proto)
+ my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType')
+
+The message descriptor can be used in conjunction with the message_factory
+module in order to create a protocol buffer class that can be encoded and
+decoded.
+
+If you want to get a Python class for the specified proto, use the
+helper functions inside google.protobuf.message_factory
+directly instead of this class.
+"""
+
+__author__ = 'matthewtoia@google.com (Matt Toia)'
+
+import collections
+import warnings
+
+from google.protobuf import descriptor
+from google.protobuf import descriptor_database
+from google.protobuf import text_encoding
+
+
+_USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS # pylint: disable=protected-access
+
+
+def _Deprecated(func):
+ """Mark functions as deprecated."""
+
+ def NewFunc(*args, **kwargs):
+ warnings.warn(
+ 'Call to deprecated function %s(). Note: Do add unlinked descriptors '
+ 'to descriptor_pool is wrong. Use Add() or AddSerializedFile() '
+ 'instead.' % func.__name__,
+ category=DeprecationWarning)
+ return func(*args, **kwargs)
+ NewFunc.__name__ = func.__name__
+ NewFunc.__doc__ = func.__doc__
+ NewFunc.__dict__.update(func.__dict__)
+ return NewFunc
+
+
+def _NormalizeFullyQualifiedName(name):
+ """Remove leading period from fully-qualified type name.
+
+ Due to b/13860351 in descriptor_database.py, types in the root namespace are
+ generated with a leading period. This function removes that prefix.
+
+ Args:
+ name (str): The fully-qualified symbol name.
+
+ Returns:
+ str: The normalized fully-qualified symbol name.
+ """
+ return name.lstrip('.')
+
+
+def _OptionsOrNone(descriptor_proto):
+ """Returns the value of the field `options`, or None if it is not set."""
+ if descriptor_proto.HasField('options'):
+ return descriptor_proto.options
+ else:
+ return None
+
+
+def _IsMessageSetExtension(field):
+ return (field.is_extension and
+ field.containing_type.has_options and
+ field.containing_type.GetOptions().message_set_wire_format and
+ field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
+ field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL)
+
+
+class DescriptorPool(object):
+ """A collection of protobufs dynamically constructed by descriptor protos."""
+
+ if _USE_C_DESCRIPTORS:
+
+ def __new__(cls, descriptor_db=None):
+ # pylint: disable=protected-access
+ return descriptor._message.DescriptorPool(descriptor_db)
+
+ def __init__(self, descriptor_db=None):
+ """Initializes a Pool of proto buffs.
+
+ The descriptor_db argument to the constructor is provided to allow
+ specialized file descriptor proto lookup code to be triggered on demand. An
+ example would be an implementation which will read and compile a file
+ specified in a call to FindFileByName() and not require the call to Add()
+ at all. Results from this database will be cached internally here as well.
+
+ Args:
+ descriptor_db: A secondary source of file descriptors.
+ """
+
+ self._internal_db = descriptor_database.DescriptorDatabase()
+ self._descriptor_db = descriptor_db
+ self._descriptors = {}
+ self._enum_descriptors = {}
+ self._service_descriptors = {}
+ self._file_descriptors = {}
+ self._toplevel_extensions = {}
+ # TODO(jieluo): Remove _file_desc_by_toplevel_extension after
+ # maybe year 2020 for compatibility issue (with 3.4.1 only).
+ self._file_desc_by_toplevel_extension = {}
+ self._top_enum_values = {}
+ # We store extensions in two two-level mappings: The first key is the
+ # descriptor of the message being extended, the second key is the extension
+ # full name or its tag number.
+ self._extensions_by_name = collections.defaultdict(dict)
+ self._extensions_by_number = collections.defaultdict(dict)
+
+ def _CheckConflictRegister(self, desc, desc_name, file_name):
+ """Check if the descriptor name conflicts with another of the same name.
+
+ Args:
+ desc: Descriptor of a message, enum, service, extension or enum value.
+ desc_name (str): the full name of desc.
+ file_name (str): The file name of descriptor.
+ """
+ for register, descriptor_type in [
+ (self._descriptors, descriptor.Descriptor),
+ (self._enum_descriptors, descriptor.EnumDescriptor),
+ (self._service_descriptors, descriptor.ServiceDescriptor),
+ (self._toplevel_extensions, descriptor.FieldDescriptor),
+ (self._top_enum_values, descriptor.EnumValueDescriptor)]:
+ if desc_name in register:
+ old_desc = register[desc_name]
+ if isinstance(old_desc, descriptor.EnumValueDescriptor):
+ old_file = old_desc.type.file.name
+ else:
+ old_file = old_desc.file.name
+
+ if not isinstance(desc, descriptor_type) or (
+ old_file != file_name):
+ error_msg = ('Conflict register for file "' + file_name +
+ '": ' + desc_name +
+ ' is already defined in file "' +
+ old_file + '". Please fix the conflict by adding '
+ 'package name on the proto file, or use different '
+ 'name for the duplication.')
+ if isinstance(desc, descriptor.EnumValueDescriptor):
+ error_msg += ('\nNote: enum values appear as '
+ 'siblings of the enum type instead of '
+ 'children of it.')
+
+ raise TypeError(error_msg)
+
+ return
+
+ def Add(self, file_desc_proto):
+ """Adds the FileDescriptorProto and its types to this pool.
+
+ Args:
+ file_desc_proto (FileDescriptorProto): The file descriptor to add.
+ """
+
+ self._internal_db.Add(file_desc_proto)
+
+ def AddSerializedFile(self, serialized_file_desc_proto):
+ """Adds the FileDescriptorProto and its types to this pool.
+
+ Args:
+ serialized_file_desc_proto (bytes): A bytes string, serialization of the
+ :class:`FileDescriptorProto` to add.
+
+ Returns:
+ FileDescriptor: Descriptor for the added file.
+ """
+
+ # pylint: disable=g-import-not-at-top
+ from google.protobuf import descriptor_pb2
+ file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString(
+ serialized_file_desc_proto)
+ file_desc = self._ConvertFileProtoToFileDescriptor(file_desc_proto)
+ file_desc.serialized_pb = serialized_file_desc_proto
+ return file_desc
+
+ # Add Descriptor to descriptor pool is dreprecated. Please use Add()
+ # or AddSerializedFile() to add a FileDescriptorProto instead.
+ @_Deprecated
+ def AddDescriptor(self, desc):
+ self._AddDescriptor(desc)
+
+ # Never call this method. It is for internal usage only.
+ def _AddDescriptor(self, desc):
+ """Adds a Descriptor to the pool, non-recursively.
+
+ If the Descriptor contains nested messages or enums, the caller must
+ explicitly register them. This method also registers the FileDescriptor
+ associated with the message.
+
+ Args:
+ desc: A Descriptor.
+ """
+ if not isinstance(desc, descriptor.Descriptor):
+ raise TypeError('Expected instance of descriptor.Descriptor.')
+
+ self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
+
+ self._descriptors[desc.full_name] = desc
+ self._AddFileDescriptor(desc.file)
+
+ # Add EnumDescriptor to descriptor pool is dreprecated. Please use Add()
+ # or AddSerializedFile() to add a FileDescriptorProto instead.
+ @_Deprecated
+ def AddEnumDescriptor(self, enum_desc):
+ self._AddEnumDescriptor(enum_desc)
+
+ # Never call this method. It is for internal usage only.
+ def _AddEnumDescriptor(self, enum_desc):
+ """Adds an EnumDescriptor to the pool.
+
+ This method also registers the FileDescriptor associated with the enum.
+
+ Args:
+ enum_desc: An EnumDescriptor.
+ """
+
+ if not isinstance(enum_desc, descriptor.EnumDescriptor):
+ raise TypeError('Expected instance of descriptor.EnumDescriptor.')
+
+ file_name = enum_desc.file.name
+ self._CheckConflictRegister(enum_desc, enum_desc.full_name, file_name)
+ self._enum_descriptors[enum_desc.full_name] = enum_desc
+
+ # Top enum values need to be indexed.
+ # Count the number of dots to see whether the enum is toplevel or nested
+ # in a message. We cannot use enum_desc.containing_type at this stage.
+ if enum_desc.file.package:
+ top_level = (enum_desc.full_name.count('.')
+ - enum_desc.file.package.count('.') == 1)
+ else:
+ top_level = enum_desc.full_name.count('.') == 0
+ if top_level:
+ file_name = enum_desc.file.name
+ package = enum_desc.file.package
+ for enum_value in enum_desc.values:
+ full_name = _NormalizeFullyQualifiedName(
+ '.'.join((package, enum_value.name)))
+ self._CheckConflictRegister(enum_value, full_name, file_name)
+ self._top_enum_values[full_name] = enum_value
+ self._AddFileDescriptor(enum_desc.file)
+
+ # Add ServiceDescriptor to descriptor pool is dreprecated. Please use Add()
+ # or AddSerializedFile() to add a FileDescriptorProto instead.
+ @_Deprecated
+ def AddServiceDescriptor(self, service_desc):
+ self._AddServiceDescriptor(service_desc)
+
+ # Never call this method. It is for internal usage only.
+ def _AddServiceDescriptor(self, service_desc):
+ """Adds a ServiceDescriptor to the pool.
+
+ Args:
+ service_desc: A ServiceDescriptor.
+ """
+
+ if not isinstance(service_desc, descriptor.ServiceDescriptor):
+ raise TypeError('Expected instance of descriptor.ServiceDescriptor.')
+
+ self._CheckConflictRegister(service_desc, service_desc.full_name,
+ service_desc.file.name)
+ self._service_descriptors[service_desc.full_name] = service_desc
+
+ # Add ExtensionDescriptor to descriptor pool is dreprecated. Please use Add()
+ # or AddSerializedFile() to add a FileDescriptorProto instead.
+ @_Deprecated
+ def AddExtensionDescriptor(self, extension):
+ self._AddExtensionDescriptor(extension)
+
+ # Never call this method. It is for internal usage only.
+ def _AddExtensionDescriptor(self, extension):
+ """Adds a FieldDescriptor describing an extension to the pool.
+
+ Args:
+ extension: A FieldDescriptor.
+
+ Raises:
+ AssertionError: when another extension with the same number extends the
+ same message.
+ TypeError: when the specified extension is not a
+ descriptor.FieldDescriptor.
+ """
+ if not (isinstance(extension, descriptor.FieldDescriptor) and
+ extension.is_extension):
+ raise TypeError('Expected an extension descriptor.')
+
+ if extension.extension_scope is None:
+ self._toplevel_extensions[extension.full_name] = extension
+
+ try:
+ existing_desc = self._extensions_by_number[
+ extension.containing_type][extension.number]
+ except KeyError:
+ pass
+ else:
+ if extension is not existing_desc:
+ raise AssertionError(
+ 'Extensions "%s" and "%s" both try to extend message type "%s" '
+ 'with field number %d.' %
+ (extension.full_name, existing_desc.full_name,
+ extension.containing_type.full_name, extension.number))
+
+ self._extensions_by_number[extension.containing_type][
+ extension.number] = extension
+ self._extensions_by_name[extension.containing_type][
+ extension.full_name] = extension
+
+ # Also register MessageSet extensions with the type name.
+ if _IsMessageSetExtension(extension):
+ self._extensions_by_name[extension.containing_type][
+ extension.message_type.full_name] = extension
+
+ @_Deprecated
+ def AddFileDescriptor(self, file_desc):
+ self._InternalAddFileDescriptor(file_desc)
+
+ # Never call this method. It is for internal usage only.
+ def _InternalAddFileDescriptor(self, file_desc):
+ """Adds a FileDescriptor to the pool, non-recursively.
+
+ If the FileDescriptor contains messages or enums, the caller must explicitly
+ register them.
+
+ Args:
+ file_desc: A FileDescriptor.
+ """
+
+ self._AddFileDescriptor(file_desc)
+ # TODO(jieluo): This is a temporary solution for FieldDescriptor.file.
+ # FieldDescriptor.file is added in code gen. Remove this solution after
+ # maybe 2020 for compatibility reason (with 3.4.1 only).
+ for extension in file_desc.extensions_by_name.values():
+ self._file_desc_by_toplevel_extension[
+ extension.full_name] = file_desc
+
+ def _AddFileDescriptor(self, file_desc):
+ """Adds a FileDescriptor to the pool, non-recursively.
+
+ If the FileDescriptor contains messages or enums, the caller must explicitly
+ register them.
+
+ Args:
+ file_desc: A FileDescriptor.
+ """
+
+ if not isinstance(file_desc, descriptor.FileDescriptor):
+ raise TypeError('Expected instance of descriptor.FileDescriptor.')
+ self._file_descriptors[file_desc.name] = file_desc
+
+ def FindFileByName(self, file_name):
+ """Gets a FileDescriptor by file name.
+
+ Args:
+ file_name (str): The path to the file to get a descriptor for.
+
+ Returns:
+ FileDescriptor: The descriptor for the named file.
+
+ Raises:
+ KeyError: if the file cannot be found in the pool.
+ """
+
+ try:
+ return self._file_descriptors[file_name]
+ except KeyError:
+ pass
+
+ try:
+ file_proto = self._internal_db.FindFileByName(file_name)
+ except KeyError as error:
+ if self._descriptor_db:
+ file_proto = self._descriptor_db.FindFileByName(file_name)
+ else:
+ raise error
+ if not file_proto:
+ raise KeyError('Cannot find a file named %s' % file_name)
+ return self._ConvertFileProtoToFileDescriptor(file_proto)
+
+ def FindFileContainingSymbol(self, symbol):
+ """Gets the FileDescriptor for the file containing the specified symbol.
+
+ Args:
+ symbol (str): The name of the symbol to search for.
+
+ Returns:
+ FileDescriptor: Descriptor for the file that contains the specified
+ symbol.
+
+ Raises:
+ KeyError: if the file cannot be found in the pool.
+ """
+
+ symbol = _NormalizeFullyQualifiedName(symbol)
+ try:
+ return self._InternalFindFileContainingSymbol(symbol)
+ except KeyError:
+ pass
+
+ try:
+ # Try fallback database. Build and find again if possible.
+ self._FindFileContainingSymbolInDb(symbol)
+ return self._InternalFindFileContainingSymbol(symbol)
+ except KeyError:
+ raise KeyError('Cannot find a file containing %s' % symbol)
+
+ def _InternalFindFileContainingSymbol(self, symbol):
+ """Gets the already built FileDescriptor containing the specified symbol.
+
+ Args:
+ symbol (str): The name of the symbol to search for.
+
+ Returns:
+ FileDescriptor: Descriptor for the file that contains the specified
+ symbol.
+
+ Raises:
+ KeyError: if the file cannot be found in the pool.
+ """
+ try:
+ return self._descriptors[symbol].file
+ except KeyError:
+ pass
+
+ try:
+ return self._enum_descriptors[symbol].file
+ except KeyError:
+ pass
+
+ try:
+ return self._service_descriptors[symbol].file
+ except KeyError:
+ pass
+
+ try:
+ return self._top_enum_values[symbol].type.file
+ except KeyError:
+ pass
+
+ try:
+ return self._file_desc_by_toplevel_extension[symbol]
+ except KeyError:
+ pass
+
+ # Try fields, enum values and nested extensions inside a message.
+ top_name, _, sub_name = symbol.rpartition('.')
+ try:
+ message = self.FindMessageTypeByName(top_name)
+ assert (sub_name in message.extensions_by_name or
+ sub_name in message.fields_by_name or
+ sub_name in message.enum_values_by_name)
+ return message.file
+ except (KeyError, AssertionError):
+ raise KeyError('Cannot find a file containing %s' % symbol)
+
+ def FindMessageTypeByName(self, full_name):
+ """Loads the named descriptor from the pool.
+
+ Args:
+ full_name (str): The full name of the descriptor to load.
+
+ Returns:
+ Descriptor: The descriptor for the named type.
+
+ Raises:
+ KeyError: if the message cannot be found in the pool.
+ """
+
+ full_name = _NormalizeFullyQualifiedName(full_name)
+ if full_name not in self._descriptors:
+ self._FindFileContainingSymbolInDb(full_name)
+ return self._descriptors[full_name]
+
+ def FindEnumTypeByName(self, full_name):
+ """Loads the named enum descriptor from the pool.
+
+ Args:
+ full_name (str): The full name of the enum descriptor to load.
+
+ Returns:
+ EnumDescriptor: The enum descriptor for the named type.
+
+ Raises:
+ KeyError: if the enum cannot be found in the pool.
+ """
+
+ full_name = _NormalizeFullyQualifiedName(full_name)
+ if full_name not in self._enum_descriptors:
+ self._FindFileContainingSymbolInDb(full_name)
+ return self._enum_descriptors[full_name]
+
+ def FindFieldByName(self, full_name):
+ """Loads the named field descriptor from the pool.
+
+ Args:
+ full_name (str): The full name of the field descriptor to load.
+
+ Returns:
+ FieldDescriptor: The field descriptor for the named field.
+
+ Raises:
+ KeyError: if the field cannot be found in the pool.
+ """
+ full_name = _NormalizeFullyQualifiedName(full_name)
+ message_name, _, field_name = full_name.rpartition('.')
+ message_descriptor = self.FindMessageTypeByName(message_name)
+ return message_descriptor.fields_by_name[field_name]
+
+ def FindOneofByName(self, full_name):
+ """Loads the named oneof descriptor from the pool.
+
+ Args:
+ full_name (str): The full name of the oneof descriptor to load.
+
+ Returns:
+ OneofDescriptor: The oneof descriptor for the named oneof.
+
+ Raises:
+ KeyError: if the oneof cannot be found in the pool.
+ """
+ full_name = _NormalizeFullyQualifiedName(full_name)
+ message_name, _, oneof_name = full_name.rpartition('.')
+ message_descriptor = self.FindMessageTypeByName(message_name)
+ return message_descriptor.oneofs_by_name[oneof_name]
+
+ def FindExtensionByName(self, full_name):
+ """Loads the named extension descriptor from the pool.
+
+ Args:
+ full_name (str): The full name of the extension descriptor to load.
+
+ Returns:
+ FieldDescriptor: The field descriptor for the named extension.
+
+ Raises:
+ KeyError: if the extension cannot be found in the pool.
+ """
+ full_name = _NormalizeFullyQualifiedName(full_name)
+ try:
+ # The proto compiler does not give any link between the FileDescriptor
+ # and top-level extensions unless the FileDescriptorProto is added to
+ # the DescriptorDatabase, but this can impact memory usage.
+ # So we registered these extensions by name explicitly.
+ return self._toplevel_extensions[full_name]
+ except KeyError:
+ pass
+ message_name, _, extension_name = full_name.rpartition('.')
+ try:
+ # Most extensions are nested inside a message.
+ scope = self.FindMessageTypeByName(message_name)
+ except KeyError:
+ # Some extensions are defined at file scope.
+ scope = self._FindFileContainingSymbolInDb(full_name)
+ return scope.extensions_by_name[extension_name]
+
+ def FindExtensionByNumber(self, message_descriptor, number):
+ """Gets the extension of the specified message with the specified number.
+
+ Extensions have to be registered to this pool by calling :func:`Add` or
+ :func:`AddExtensionDescriptor`.
+
+ Args:
+ message_descriptor (Descriptor): descriptor of the extended message.
+ number (int): Number of the extension field.
+
+ Returns:
+ FieldDescriptor: The descriptor for the extension.
+
+ Raises:
+ KeyError: when no extension with the given number is known for the
+ specified message.
+ """
+ try:
+ return self._extensions_by_number[message_descriptor][number]
+ except KeyError:
+ self._TryLoadExtensionFromDB(message_descriptor, number)
+ return self._extensions_by_number[message_descriptor][number]
+
+ def FindAllExtensions(self, message_descriptor):
+ """Gets all the known extensions of a given message.
+
+ Extensions have to be registered to this pool by build related
+ :func:`Add` or :func:`AddExtensionDescriptor`.
+
+ Args:
+ message_descriptor (Descriptor): Descriptor of the extended message.
+
+ Returns:
+ list[FieldDescriptor]: Field descriptors describing the extensions.
+ """
+ # Fallback to descriptor db if FindAllExtensionNumbers is provided.
+ if self._descriptor_db and hasattr(
+ self._descriptor_db, 'FindAllExtensionNumbers'):
+ full_name = message_descriptor.full_name
+ all_numbers = self._descriptor_db.FindAllExtensionNumbers(full_name)
+ for number in all_numbers:
+ if number in self._extensions_by_number[message_descriptor]:
+ continue
+ self._TryLoadExtensionFromDB(message_descriptor, number)
+
+ return list(self._extensions_by_number[message_descriptor].values())
+
+ def _TryLoadExtensionFromDB(self, message_descriptor, number):
+ """Try to Load extensions from descriptor db.
+
+ Args:
+ message_descriptor: descriptor of the extended message.
+ number: the extension number that needs to be loaded.
+ """
+ if not self._descriptor_db:
+ return
+ # Only supported when FindFileContainingExtension is provided.
+ if not hasattr(
+ self._descriptor_db, 'FindFileContainingExtension'):
+ return
+
+ full_name = message_descriptor.full_name
+ file_proto = self._descriptor_db.FindFileContainingExtension(
+ full_name, number)
+
+ if file_proto is None:
+ return
+
+ try:
+ self._ConvertFileProtoToFileDescriptor(file_proto)
+ except:
+ warn_msg = ('Unable to load proto file %s for extension number %d.' %
+ (file_proto.name, number))
+ warnings.warn(warn_msg, RuntimeWarning)
+
+ def FindServiceByName(self, full_name):
+ """Loads the named service descriptor from the pool.
+
+ Args:
+ full_name (str): The full name of the service descriptor to load.
+
+ Returns:
+ ServiceDescriptor: The service descriptor for the named service.
+
+ Raises:
+ KeyError: if the service cannot be found in the pool.
+ """
+ full_name = _NormalizeFullyQualifiedName(full_name)
+ if full_name not in self._service_descriptors:
+ self._FindFileContainingSymbolInDb(full_name)
+ return self._service_descriptors[full_name]
+
+ def FindMethodByName(self, full_name):
+ """Loads the named service method descriptor from the pool.
+
+ Args:
+ full_name (str): The full name of the method descriptor to load.
+
+ Returns:
+ MethodDescriptor: The method descriptor for the service method.
+
+ Raises:
+ KeyError: if the method cannot be found in the pool.
+ """
+ full_name = _NormalizeFullyQualifiedName(full_name)
+ service_name, _, method_name = full_name.rpartition('.')
+ service_descriptor = self.FindServiceByName(service_name)
+ return service_descriptor.methods_by_name[method_name]
+
+ def _FindFileContainingSymbolInDb(self, symbol):
+ """Finds the file in descriptor DB containing the specified symbol.
+
+ Args:
+ symbol (str): The name of the symbol to search for.
+
+ Returns:
+ FileDescriptor: The file that contains the specified symbol.
+
+ Raises:
+ KeyError: if the file cannot be found in the descriptor database.
+ """
+ try:
+ file_proto = self._internal_db.FindFileContainingSymbol(symbol)
+ except KeyError as error:
+ if self._descriptor_db:
+ file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
+ else:
+ raise error
+ if not file_proto:
+ raise KeyError('Cannot find a file containing %s' % symbol)
+ return self._ConvertFileProtoToFileDescriptor(file_proto)
+
+ def _ConvertFileProtoToFileDescriptor(self, file_proto):
+ """Creates a FileDescriptor from a proto or returns a cached copy.
+
+ This method also has the side effect of loading all the symbols found in
+ the file into the appropriate dictionaries in the pool.
+
+ Args:
+ file_proto: The proto to convert.
+
+ Returns:
+ A FileDescriptor matching the passed in proto.
+ """
+ if file_proto.name not in self._file_descriptors:
+ built_deps = list(self._GetDeps(file_proto.dependency))
+ direct_deps = [self.FindFileByName(n) for n in file_proto.dependency]
+ public_deps = [direct_deps[i] for i in file_proto.public_dependency]
+
+ file_descriptor = descriptor.FileDescriptor(
+ pool=self,
+ name=file_proto.name,
+ package=file_proto.package,
+ syntax=file_proto.syntax,
+ options=_OptionsOrNone(file_proto),
+ serialized_pb=file_proto.SerializeToString(),
+ dependencies=direct_deps,
+ public_dependencies=public_deps,
+ # pylint: disable=protected-access
+ create_key=descriptor._internal_create_key)
+ scope = {}
+
+ # This loop extracts all the message and enum types from all the
+ # dependencies of the file_proto. This is necessary to create the
+ # scope of available message types when defining the passed in
+ # file proto.
+ for dependency in built_deps:
+ scope.update(self._ExtractSymbols(
+ dependency.message_types_by_name.values()))
+ scope.update((_PrefixWithDot(enum.full_name), enum)
+ for enum in dependency.enum_types_by_name.values())
+
+ for message_type in file_proto.message_type:
+ message_desc = self._ConvertMessageDescriptor(
+ message_type, file_proto.package, file_descriptor, scope,
+ file_proto.syntax)
+ file_descriptor.message_types_by_name[message_desc.name] = (
+ message_desc)
+
+ for enum_type in file_proto.enum_type:
+ file_descriptor.enum_types_by_name[enum_type.name] = (
+ self._ConvertEnumDescriptor(enum_type, file_proto.package,
+ file_descriptor, None, scope, True))
+
+ for index, extension_proto in enumerate(file_proto.extension):
+ extension_desc = self._MakeFieldDescriptor(
+ extension_proto, file_proto.package, index, file_descriptor,
+ is_extension=True)
+ extension_desc.containing_type = self._GetTypeFromScope(
+ file_descriptor.package, extension_proto.extendee, scope)
+ self._SetFieldType(extension_proto, extension_desc,
+ file_descriptor.package, scope)
+ file_descriptor.extensions_by_name[extension_desc.name] = (
+ extension_desc)
+ self._file_desc_by_toplevel_extension[extension_desc.full_name] = (
+ file_descriptor)
+
+ for desc_proto in file_proto.message_type:
+ self._SetAllFieldTypes(file_proto.package, desc_proto, scope)
+
+ if file_proto.package:
+ desc_proto_prefix = _PrefixWithDot(file_proto.package)
+ else:
+ desc_proto_prefix = ''
+
+ for desc_proto in file_proto.message_type:
+ desc = self._GetTypeFromScope(
+ desc_proto_prefix, desc_proto.name, scope)
+ file_descriptor.message_types_by_name[desc_proto.name] = desc
+
+ for index, service_proto in enumerate(file_proto.service):
+ file_descriptor.services_by_name[service_proto.name] = (
+ self._MakeServiceDescriptor(service_proto, index, scope,
+ file_proto.package, file_descriptor))
+
+ self._file_descriptors[file_proto.name] = file_descriptor
+
+ # Add extensions to the pool
+ file_desc = self._file_descriptors[file_proto.name]
+ for extension in file_desc.extensions_by_name.values():
+ self._AddExtensionDescriptor(extension)
+ for message_type in file_desc.message_types_by_name.values():
+ for extension in message_type.extensions:
+ self._AddExtensionDescriptor(extension)
+
+ return file_desc
+
+ def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None,
+ scope=None, syntax=None):
+ """Adds the proto to the pool in the specified package.
+
+ Args:
+ desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
+ package: The package the proto should be located in.
+ file_desc: The file containing this message.
+ scope: Dict mapping short and full symbols to message and enum types.
+ syntax: string indicating syntax of the file ("proto2" or "proto3")
+
+ Returns:
+ The added descriptor.
+ """
+
+ if package:
+ desc_name = '.'.join((package, desc_proto.name))
+ else:
+ desc_name = desc_proto.name
+
+ if file_desc is None:
+ file_name = None
+ else:
+ file_name = file_desc.name
+
+ if scope is None:
+ scope = {}
+
+ nested = [
+ self._ConvertMessageDescriptor(
+ nested, desc_name, file_desc, scope, syntax)
+ for nested in desc_proto.nested_type]
+ enums = [
+ self._ConvertEnumDescriptor(enum, desc_name, file_desc, None,
+ scope, False)
+ for enum in desc_proto.enum_type]
+ fields = [self._MakeFieldDescriptor(field, desc_name, index, file_desc)
+ for index, field in enumerate(desc_proto.field)]
+ extensions = [
+ self._MakeFieldDescriptor(extension, desc_name, index, file_desc,
+ is_extension=True)
+ for index, extension in enumerate(desc_proto.extension)]
+ oneofs = [
+ # pylint: disable=g-complex-comprehension
+ descriptor.OneofDescriptor(
+ desc.name,
+ '.'.join((desc_name, desc.name)),
+ index,
+ None,
+ [],
+ _OptionsOrNone(desc),
+ # pylint: disable=protected-access
+ create_key=descriptor._internal_create_key)
+ for index, desc in enumerate(desc_proto.oneof_decl)
+ ]
+ extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range]
+ if extension_ranges:
+ is_extendable = True
+ else:
+ is_extendable = False
+ desc = descriptor.Descriptor(
+ name=desc_proto.name,
+ full_name=desc_name,
+ filename=file_name,
+ containing_type=None,
+ fields=fields,
+ oneofs=oneofs,
+ nested_types=nested,
+ enum_types=enums,
+ extensions=extensions,
+ options=_OptionsOrNone(desc_proto),
+ is_extendable=is_extendable,
+ extension_ranges=extension_ranges,
+ file=file_desc,
+ serialized_start=None,
+ serialized_end=None,
+ syntax=syntax,
+ # pylint: disable=protected-access
+ create_key=descriptor._internal_create_key)
+ for nested in desc.nested_types:
+ nested.containing_type = desc
+ for enum in desc.enum_types:
+ enum.containing_type = desc
+ for field_index, field_desc in enumerate(desc_proto.field):
+ if field_desc.HasField('oneof_index'):
+ oneof_index = field_desc.oneof_index
+ oneofs[oneof_index].fields.append(fields[field_index])
+ fields[field_index].containing_oneof = oneofs[oneof_index]
+
+ scope[_PrefixWithDot(desc_name)] = desc
+ self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
+ self._descriptors[desc_name] = desc
+ return desc
+
+ def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None,
+ containing_type=None, scope=None, top_level=False):
+ """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf.
+
+ Args:
+ enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message.
+ package: Optional package name for the new message EnumDescriptor.
+ file_desc: The file containing the enum descriptor.
+ containing_type: The type containing this enum.
+ scope: Scope containing available types.
+ top_level: If True, the enum is a top level symbol. If False, the enum
+ is defined inside a message.
+
+ Returns:
+ The added descriptor
+ """
+
+ if package:
+ enum_name = '.'.join((package, enum_proto.name))
+ else:
+ enum_name = enum_proto.name
+
+ if file_desc is None:
+ file_name = None
+ else:
+ file_name = file_desc.name
+
+ values = [self._MakeEnumValueDescriptor(value, index)
+ for index, value in enumerate(enum_proto.value)]
+ desc = descriptor.EnumDescriptor(name=enum_proto.name,
+ full_name=enum_name,
+ filename=file_name,
+ file=file_desc,
+ values=values,
+ containing_type=containing_type,
+ options=_OptionsOrNone(enum_proto),
+ # pylint: disable=protected-access
+ create_key=descriptor._internal_create_key)
+ scope['.%s' % enum_name] = desc
+ self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
+ self._enum_descriptors[enum_name] = desc
+
+ # Add top level enum values.
+ if top_level:
+ for value in values:
+ full_name = _NormalizeFullyQualifiedName(
+ '.'.join((package, value.name)))
+ self._CheckConflictRegister(value, full_name, file_name)
+ self._top_enum_values[full_name] = value
+
+ return desc
+
+ def _MakeFieldDescriptor(self, field_proto, message_name, index,
+ file_desc, is_extension=False):
+ """Creates a field descriptor from a FieldDescriptorProto.
+
+ For message and enum type fields, this method will do a look up
+ in the pool for the appropriate descriptor for that type. If it
+ is unavailable, it will fall back to the _source function to
+ create it. If this type is still unavailable, construction will
+ fail.
+
+ Args:
+ field_proto: The proto describing the field.
+ message_name: The name of the containing message.
+ index: Index of the field
+ file_desc: The file containing the field descriptor.
+ is_extension: Indication that this field is for an extension.
+
+ Returns:
+ An initialized FieldDescriptor object
+ """
+
+ if message_name:
+ full_name = '.'.join((message_name, field_proto.name))
+ else:
+ full_name = field_proto.name
+
+ if field_proto.json_name:
+ json_name = field_proto.json_name
+ else:
+ json_name = None
+
+ return descriptor.FieldDescriptor(
+ name=field_proto.name,
+ full_name=full_name,
+ index=index,
+ number=field_proto.number,
+ type=field_proto.type,
+ cpp_type=None,
+ message_type=None,
+ enum_type=None,
+ containing_type=None,
+ label=field_proto.label,
+ has_default_value=False,
+ default_value=None,
+ is_extension=is_extension,
+ extension_scope=None,
+ options=_OptionsOrNone(field_proto),
+ json_name=json_name,
+ file=file_desc,
+ # pylint: disable=protected-access
+ create_key=descriptor._internal_create_key)
+
+ def _SetAllFieldTypes(self, package, desc_proto, scope):
+ """Sets all the descriptor's fields's types.
+
+ This method also sets the containing types on any extensions.
+
+ Args:
+ package: The current package of desc_proto.
+ desc_proto: The message descriptor to update.
+ scope: Enclosing scope of available types.
+ """
+
+ package = _PrefixWithDot(package)
+
+ main_desc = self._GetTypeFromScope(package, desc_proto.name, scope)
+
+ if package == '.':
+ nested_package = _PrefixWithDot(desc_proto.name)
+ else:
+ nested_package = '.'.join([package, desc_proto.name])
+
+ for field_proto, field_desc in zip(desc_proto.field, main_desc.fields):
+ self._SetFieldType(field_proto, field_desc, nested_package, scope)
+
+ for extension_proto, extension_desc in (
+ zip(desc_proto.extension, main_desc.extensions)):
+ extension_desc.containing_type = self._GetTypeFromScope(
+ nested_package, extension_proto.extendee, scope)
+ self._SetFieldType(extension_proto, extension_desc, nested_package, scope)
+
+ for nested_type in desc_proto.nested_type:
+ self._SetAllFieldTypes(nested_package, nested_type, scope)
+
+ def _SetFieldType(self, field_proto, field_desc, package, scope):
+ """Sets the field's type, cpp_type, message_type and enum_type.
+
+ Args:
+ field_proto: Data about the field in proto format.
+ field_desc: The descriptor to modify.
+ package: The package the field's container is in.
+ scope: Enclosing scope of available types.
+ """
+ if field_proto.type_name:
+ desc = self._GetTypeFromScope(package, field_proto.type_name, scope)
+ else:
+ desc = None
+
+ if not field_proto.HasField('type'):
+ if isinstance(desc, descriptor.Descriptor):
+ field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE
+ else:
+ field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM
+
+ field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType(
+ field_proto.type)
+
+ if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE
+ or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP):
+ field_desc.message_type = desc
+
+ if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
+ field_desc.enum_type = desc
+
+ if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED:
+ field_desc.has_default_value = False
+ field_desc.default_value = []
+ elif field_proto.HasField('default_value'):
+ field_desc.has_default_value = True
+ if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
+ field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
+ field_desc.default_value = float(field_proto.default_value)
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
+ field_desc.default_value = field_proto.default_value
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
+ field_desc.default_value = field_proto.default_value.lower() == 'true'
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
+ field_desc.default_value = field_desc.enum_type.values_by_name[
+ field_proto.default_value].number
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
+ field_desc.default_value = text_encoding.CUnescape(
+ field_proto.default_value)
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE:
+ field_desc.default_value = None
+ else:
+ # All other types are of the "int" type.
+ field_desc.default_value = int(field_proto.default_value)
+ else:
+ field_desc.has_default_value = False
+ if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
+ field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
+ field_desc.default_value = 0.0
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
+ field_desc.default_value = u''
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
+ field_desc.default_value = False
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
+ field_desc.default_value = field_desc.enum_type.values[0].number
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
+ field_desc.default_value = b''
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE:
+ field_desc.default_value = None
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP:
+ field_desc.default_value = None
+ else:
+ # All other types are of the "int" type.
+ field_desc.default_value = 0
+
+ field_desc.type = field_proto.type
+
+ def _MakeEnumValueDescriptor(self, value_proto, index):
+ """Creates a enum value descriptor object from a enum value proto.
+
+ Args:
+ value_proto: The proto describing the enum value.
+ index: The index of the enum value.
+
+ Returns:
+ An initialized EnumValueDescriptor object.
+ """
+
+ return descriptor.EnumValueDescriptor(
+ name=value_proto.name,
+ index=index,
+ number=value_proto.number,
+ options=_OptionsOrNone(value_proto),
+ type=None,
+ # pylint: disable=protected-access
+ create_key=descriptor._internal_create_key)
+
+ def _MakeServiceDescriptor(self, service_proto, service_index, scope,
+ package, file_desc):
+ """Make a protobuf ServiceDescriptor given a ServiceDescriptorProto.
+
+ Args:
+ service_proto: The descriptor_pb2.ServiceDescriptorProto protobuf message.
+ service_index: The index of the service in the File.
+ scope: Dict mapping short and full symbols to message and enum types.
+ package: Optional package name for the new message EnumDescriptor.
+ file_desc: The file containing the service descriptor.
+
+ Returns:
+ The added descriptor.
+ """
+
+ if package:
+ service_name = '.'.join((package, service_proto.name))
+ else:
+ service_name = service_proto.name
+
+ methods = [self._MakeMethodDescriptor(method_proto, service_name, package,
+ scope, index)
+ for index, method_proto in enumerate(service_proto.method)]
+ desc = descriptor.ServiceDescriptor(
+ name=service_proto.name,
+ full_name=service_name,
+ index=service_index,
+ methods=methods,
+ options=_OptionsOrNone(service_proto),
+ file=file_desc,
+ # pylint: disable=protected-access
+ create_key=descriptor._internal_create_key)
+ self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
+ self._service_descriptors[service_name] = desc
+ return desc
+
+ def _MakeMethodDescriptor(self, method_proto, service_name, package, scope,
+ index):
+ """Creates a method descriptor from a MethodDescriptorProto.
+
+ Args:
+ method_proto: The proto describing the method.
+ service_name: The name of the containing service.
+ package: Optional package name to look up for types.
+ scope: Scope containing available types.
+ index: Index of the method in the service.
+
+ Returns:
+ An initialized MethodDescriptor object.
+ """
+ full_name = '.'.join((service_name, method_proto.name))
+ input_type = self._GetTypeFromScope(
+ package, method_proto.input_type, scope)
+ output_type = self._GetTypeFromScope(
+ package, method_proto.output_type, scope)
+ return descriptor.MethodDescriptor(
+ name=method_proto.name,
+ full_name=full_name,
+ index=index,
+ containing_service=None,
+ input_type=input_type,
+ output_type=output_type,
+ options=_OptionsOrNone(method_proto),
+ # pylint: disable=protected-access
+ create_key=descriptor._internal_create_key)
+
+ def _ExtractSymbols(self, descriptors):
+ """Pulls out all the symbols from descriptor protos.
+
+ Args:
+ descriptors: The messages to extract descriptors from.
+ Yields:
+ A two element tuple of the type name and descriptor object.
+ """
+
+ for desc in descriptors:
+ yield (_PrefixWithDot(desc.full_name), desc)
+ for symbol in self._ExtractSymbols(desc.nested_types):
+ yield symbol
+ for enum in desc.enum_types:
+ yield (_PrefixWithDot(enum.full_name), enum)
+
+ def _GetDeps(self, dependencies):
+ """Recursively finds dependencies for file protos.
+
+ Args:
+ dependencies: The names of the files being depended on.
+
+ Yields:
+ Each direct and indirect dependency.
+ """
+
+ for dependency in dependencies:
+ dep_desc = self.FindFileByName(dependency)
+ yield dep_desc
+ for parent_dep in dep_desc.dependencies:
+ yield parent_dep
+
+ def _GetTypeFromScope(self, package, type_name, scope):
+ """Finds a given type name in the current scope.
+
+ Args:
+ package: The package the proto should be located in.
+ type_name: The name of the type to be found in the scope.
+ scope: Dict mapping short and full symbols to message and enum types.
+
+ Returns:
+ The descriptor for the requested type.
+ """
+ if type_name not in scope:
+ components = _PrefixWithDot(package).split('.')
+ while components:
+ possible_match = '.'.join(components + [type_name])
+ if possible_match in scope:
+ type_name = possible_match
+ break
+ else:
+ components.pop(-1)
+ return scope[type_name]
+
+
+def _PrefixWithDot(name):
+ return name if name.startswith('.') else '.%s' % name
+
+
+if _USE_C_DESCRIPTORS:
+ # TODO(amauryfa): This pool could be constructed from Python code, when we
+ # support a flag like 'use_cpp_generated_pool=True'.
+ # pylint: disable=protected-access
+ _DEFAULT = descriptor._message.default_pool
+else:
+ _DEFAULT = DescriptorPool()
+
+
+def Default():
+ return _DEFAULT
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/duration_pb2.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/duration_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..60f58678a118a1aac98fd7604b1d9bc97de98923
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/duration_pb2.py
@@ -0,0 +1,35 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/duration.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x83\x01\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z1google.golang.org/protobuf/types/known/durationpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
+
+
+
+_DURATION = DESCRIPTOR.message_types_by_name['Duration']
+Duration = _reflection.GeneratedProtocolMessageType('Duration', (_message.Message,), {
+ 'DESCRIPTOR' : _DURATION,
+ '__module__' : 'google.protobuf.duration_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.Duration)
+ })
+_sym_db.RegisterMessage(Duration)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rDurationProtoP\001Z1google.golang.org/protobuf/types/known/durationpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
+ _DURATION._serialized_start=51
+ _DURATION._serialized_end=93
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/empty_pb2.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/empty_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..aecb7af5d7e14bc8e71b8e1b6b2008ccf0ef75d2
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/empty_pb2.py
@@ -0,0 +1,35 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/empty.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyB}\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z.google.golang.org/protobuf/types/known/emptypb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
+
+
+
+_EMPTY = DESCRIPTOR.message_types_by_name['Empty']
+Empty = _reflection.GeneratedProtocolMessageType('Empty', (_message.Message,), {
+ 'DESCRIPTOR' : _EMPTY,
+ '__module__' : 'google.protobuf.empty_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.Empty)
+ })
+_sym_db.RegisterMessage(Empty)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nEmptyProtoP\001Z.google.golang.org/protobuf/types/known/emptypb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
+ _EMPTY._serialized_start=48
+ _EMPTY._serialized_end=55
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/field_mask_pb2.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/field_mask_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..46e48e8e7cf3a3355f54b3cc4199b85c7a24de68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/field_mask_pb2.py
@@ -0,0 +1,35 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/field_mask.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"\x1a\n\tFieldMask\x12\r\n\x05paths\x18\x01 \x03(\tB\x85\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z2google.golang.org/protobuf/types/known/fieldmaskpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
+
+
+
+_FIELDMASK = DESCRIPTOR.message_types_by_name['FieldMask']
+FieldMask = _reflection.GeneratedProtocolMessageType('FieldMask', (_message.Message,), {
+ 'DESCRIPTOR' : _FIELDMASK,
+ '__module__' : 'google.protobuf.field_mask_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.FieldMask)
+ })
+_sym_db.RegisterMessage(FieldMask)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z2google.golang.org/protobuf/types/known/fieldmaskpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
+ _FIELDMASK._serialized_start=53
+ _FIELDMASK._serialized_end=79
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__init__.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d21e8b10d4422c14802b831eb9f7ca8154b1cd28
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/api_implementation.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/api_implementation.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..595ec402c3aad99c9b4f9733987a97b9309ed65f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/api_implementation.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/containers.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/containers.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cd8fc75c0e83d38c7dda6254ae839035280a9eb3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/containers.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/decoder.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/decoder.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2adce815a8fa16246867d60329cbbf19fc6b2c0c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/decoder.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/encoder.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/encoder.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..88fb4d7e17decbea553e26818eb7d9e06edc83df
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/encoder.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/enum_type_wrapper.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/enum_type_wrapper.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..30ba70139015a50487e0c288b8995fed5d28313b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/enum_type_wrapper.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/extension_dict.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/extension_dict.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..36ea60da8208e62cfd3db79499233a26531bb444
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/extension_dict.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/message_listener.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/message_listener.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..91deee41c55cb5d4d3dbd555bc10a4bde7155cb0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/message_listener.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/python_message.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/python_message.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7907c6ef48abaea4891a9736f12100b63f7d1299
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/python_message.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/type_checkers.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/type_checkers.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5d649081b81af394f5e6166249dff48509c4124d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/type_checkers.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/well_known_types.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/well_known_types.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0b4cf86c0f0c80aa4c5c339f97bb4c5c48c72d50
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/well_known_types.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/wire_format.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/wire_format.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f097d2f89e0597005b556fd762720a8629bda02d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/__pycache__/wire_format.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/_api_implementation.cpython-38-x86_64-linux-gnu.so b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/_api_implementation.cpython-38-x86_64-linux-gnu.so
new file mode 100755
index 0000000000000000000000000000000000000000..e2b11aa47e45089d688d4c64514601c66d3e54c3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/_api_implementation.cpython-38-x86_64-linux-gnu.so differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/api_implementation.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/api_implementation.py
new file mode 100644
index 0000000000000000000000000000000000000000..7fef2376707df2419f413c17c8159108134e3b61
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/api_implementation.py
@@ -0,0 +1,112 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Determine which implementation of the protobuf API is used in this process.
+"""
+
+import os
+import sys
+import warnings
+
+try:
+ # pylint: disable=g-import-not-at-top
+ from google.protobuf.internal import _api_implementation
+ # The compile-time constants in the _api_implementation module can be used to
+ # switch to a certain implementation of the Python API at build time.
+ _api_version = _api_implementation.api_version
+except ImportError:
+ _api_version = -1 # Unspecified by compiler flags.
+
+if _api_version == 1:
+ raise ValueError('api_version=1 is no longer supported.')
+
+
+_default_implementation_type = ('cpp' if _api_version > 0 else 'python')
+
+
+# This environment variable can be used to switch to a certain implementation
+# of the Python API, overriding the compile-time constants in the
+# _api_implementation module. Right now only 'python' and 'cpp' are valid
+# values. Any other value will be ignored.
+_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION',
+ _default_implementation_type)
+
+if _implementation_type != 'python':
+ _implementation_type = 'cpp'
+
+if 'PyPy' in sys.version and _implementation_type == 'cpp':
+ warnings.warn('PyPy does not work yet with cpp protocol buffers. '
+ 'Falling back to the python implementation.')
+ _implementation_type = 'python'
+
+
+# Detect if serialization should be deterministic by default
+try:
+ # The presence of this module in a build allows the proto implementation to
+ # be upgraded merely via build deps.
+ #
+ # NOTE: Merely importing this automatically enables deterministic proto
+ # serialization for C++ code, but we still need to export it as a boolean so
+ # that we can do the same for `_implementation_type == 'python'`.
+ #
+ # NOTE2: It is possible for C++ code to enable deterministic serialization by
+ # default _without_ affecting Python code, if the C++ implementation is not in
+ # use by this module. That is intended behavior, so we don't actually expose
+ # this boolean outside of this module.
+ #
+ # pylint: disable=g-import-not-at-top,unused-import
+ from google.protobuf import enable_deterministic_proto_serialization
+ _python_deterministic_proto_serialization = True
+except ImportError:
+ _python_deterministic_proto_serialization = False
+
+
+# Usage of this function is discouraged. Clients shouldn't care which
+# implementation of the API is in use. Note that there is no guarantee
+# that differences between APIs will be maintained.
+# Please don't use this function if possible.
+def Type():
+ return _implementation_type
+
+
+def _SetType(implementation_type):
+ """Never use! Only for protobuf benchmark."""
+ global _implementation_type
+ _implementation_type = implementation_type
+
+
+# See comment on 'Type' above.
+def Version():
+ return 2
+
+
+# For internal use only
+def IsPythonDefaultSerializationDeterministic():
+ return _python_deterministic_proto_serialization
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/containers.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/containers.py
new file mode 100644
index 0000000000000000000000000000000000000000..f0c06df8dd1c49b0b317020df73e00f2d4de620b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/containers.py
@@ -0,0 +1,643 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Contains container classes to represent different protocol buffer types.
+
+This file defines container classes which represent categories of protocol
+buffer field types which need extra maintenance. Currently these categories
+are:
+
+- Repeated scalar fields - These are all repeated fields which aren't
+ composite (e.g. they are of simple types like int32, string, etc).
+- Repeated composite fields - Repeated fields which are composite. This
+ includes groups and nested messages.
+"""
+
+__author__ = 'petar@google.com (Petar Petrov)'
+
+import collections.abc
+
+
+class BaseContainer(object):
+
+ """Base container class."""
+
+ # Minimizes memory usage and disallows assignment to other attributes.
+ __slots__ = ['_message_listener', '_values']
+
+ def __init__(self, message_listener):
+ """
+ Args:
+ message_listener: A MessageListener implementation.
+ The RepeatedScalarFieldContainer will call this object's
+ Modified() method when it is modified.
+ """
+ self._message_listener = message_listener
+ self._values = []
+
+ def __getitem__(self, key):
+ """Retrieves item by the specified key."""
+ return self._values[key]
+
+ def __len__(self):
+ """Returns the number of elements in the container."""
+ return len(self._values)
+
+ def __ne__(self, other):
+ """Checks if another instance isn't equal to this one."""
+ # The concrete classes should define __eq__.
+ return not self == other
+
+ def __hash__(self):
+ raise TypeError('unhashable object')
+
+ def __repr__(self):
+ return repr(self._values)
+
+ def sort(self, *args, **kwargs):
+ # Continue to support the old sort_function keyword argument.
+ # This is expected to be a rare occurrence, so use LBYL to avoid
+ # the overhead of actually catching KeyError.
+ if 'sort_function' in kwargs:
+ kwargs['cmp'] = kwargs.pop('sort_function')
+ self._values.sort(*args, **kwargs)
+
+ def reverse(self):
+ self._values.reverse()
+
+
+collections.abc.MutableSequence.register(BaseContainer)
+
+
+class RepeatedScalarFieldContainer(BaseContainer):
+ """Simple, type-checked, list-like container for holding repeated scalars."""
+
+ # Disallows assignment to other attributes.
+ __slots__ = ['_type_checker']
+
+ def __init__(self, message_listener, type_checker):
+ """Args:
+
+ message_listener: A MessageListener implementation. The
+ RepeatedScalarFieldContainer will call this object's Modified() method
+ when it is modified.
+ type_checker: A type_checkers.ValueChecker instance to run on elements
+ inserted into this container.
+ """
+ super(RepeatedScalarFieldContainer, self).__init__(message_listener)
+ self._type_checker = type_checker
+
+ def append(self, value):
+ """Appends an item to the list. Similar to list.append()."""
+ self._values.append(self._type_checker.CheckValue(value))
+ if not self._message_listener.dirty:
+ self._message_listener.Modified()
+
+ def insert(self, key, value):
+ """Inserts the item at the specified position. Similar to list.insert()."""
+ self._values.insert(key, self._type_checker.CheckValue(value))
+ if not self._message_listener.dirty:
+ self._message_listener.Modified()
+
+ def extend(self, elem_seq):
+ """Extends by appending the given iterable. Similar to list.extend()."""
+
+ if elem_seq is None:
+ return
+ try:
+ elem_seq_iter = iter(elem_seq)
+ except TypeError:
+ if not elem_seq:
+ # silently ignore falsy inputs :-/.
+ # TODO(ptucker): Deprecate this behavior. b/18413862
+ return
+ raise
+
+ new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter]
+ if new_values:
+ self._values.extend(new_values)
+ self._message_listener.Modified()
+
+ def MergeFrom(self, other):
+ """Appends the contents of another repeated field of the same type to this
+ one. We do not check the types of the individual fields.
+ """
+ self._values.extend(other._values)
+ self._message_listener.Modified()
+
+ def remove(self, elem):
+ """Removes an item from the list. Similar to list.remove()."""
+ self._values.remove(elem)
+ self._message_listener.Modified()
+
+ def pop(self, key=-1):
+ """Removes and returns an item at a given index. Similar to list.pop()."""
+ value = self._values[key]
+ self.__delitem__(key)
+ return value
+
+ def __setitem__(self, key, value):
+ """Sets the item on the specified position."""
+ if isinstance(key, slice): # PY3
+ if key.step is not None:
+ raise ValueError('Extended slices not supported')
+ self.__setslice__(key.start, key.stop, value)
+ else:
+ self._values[key] = self._type_checker.CheckValue(value)
+ self._message_listener.Modified()
+
+ def __getslice__(self, start, stop):
+ """Retrieves the subset of items from between the specified indices."""
+ return self._values[start:stop]
+
+ def __setslice__(self, start, stop, values):
+ """Sets the subset of items from between the specified indices."""
+ new_values = []
+ for value in values:
+ new_values.append(self._type_checker.CheckValue(value))
+ self._values[start:stop] = new_values
+ self._message_listener.Modified()
+
+ def __delitem__(self, key):
+ """Deletes the item at the specified position."""
+ del self._values[key]
+ self._message_listener.Modified()
+
+ def __delslice__(self, start, stop):
+ """Deletes the subset of items from between the specified indices."""
+ del self._values[start:stop]
+ self._message_listener.Modified()
+
+ def __eq__(self, other):
+ """Compares the current instance with another one."""
+ if self is other:
+ return True
+ # Special case for the same type which should be common and fast.
+ if isinstance(other, self.__class__):
+ return other._values == self._values
+ # We are presumably comparing against some other sequence type.
+ return other == self._values
+
+
+class RepeatedCompositeFieldContainer(BaseContainer):
+
+ """Simple, list-like container for holding repeated composite fields."""
+
+ # Disallows assignment to other attributes.
+ __slots__ = ['_message_descriptor']
+
+ def __init__(self, message_listener, message_descriptor):
+ """
+ Note that we pass in a descriptor instead of the generated directly,
+ since at the time we construct a _RepeatedCompositeFieldContainer we
+ haven't yet necessarily initialized the type that will be contained in the
+ container.
+
+ Args:
+ message_listener: A MessageListener implementation.
+ The RepeatedCompositeFieldContainer will call this object's
+ Modified() method when it is modified.
+ message_descriptor: A Descriptor instance describing the protocol type
+ that should be present in this container. We'll use the
+ _concrete_class field of this descriptor when the client calls add().
+ """
+ super(RepeatedCompositeFieldContainer, self).__init__(message_listener)
+ self._message_descriptor = message_descriptor
+
+ def add(self, **kwargs):
+ """Adds a new element at the end of the list and returns it. Keyword
+ arguments may be used to initialize the element.
+ """
+ new_element = self._message_descriptor._concrete_class(**kwargs)
+ new_element._SetListener(self._message_listener)
+ self._values.append(new_element)
+ if not self._message_listener.dirty:
+ self._message_listener.Modified()
+ return new_element
+
+ def append(self, value):
+ """Appends one element by copying the message."""
+ new_element = self._message_descriptor._concrete_class()
+ new_element._SetListener(self._message_listener)
+ new_element.CopyFrom(value)
+ self._values.append(new_element)
+ if not self._message_listener.dirty:
+ self._message_listener.Modified()
+
+ def insert(self, key, value):
+ """Inserts the item at the specified position by copying."""
+ new_element = self._message_descriptor._concrete_class()
+ new_element._SetListener(self._message_listener)
+ new_element.CopyFrom(value)
+ self._values.insert(key, new_element)
+ if not self._message_listener.dirty:
+ self._message_listener.Modified()
+
+ def extend(self, elem_seq):
+ """Extends by appending the given sequence of elements of the same type
+
+ as this one, copying each individual message.
+ """
+ message_class = self._message_descriptor._concrete_class
+ listener = self._message_listener
+ values = self._values
+ for message in elem_seq:
+ new_element = message_class()
+ new_element._SetListener(listener)
+ new_element.MergeFrom(message)
+ values.append(new_element)
+ listener.Modified()
+
+ def MergeFrom(self, other):
+ """Appends the contents of another repeated field of the same type to this
+ one, copying each individual message.
+ """
+ self.extend(other._values)
+
+ def remove(self, elem):
+ """Removes an item from the list. Similar to list.remove()."""
+ self._values.remove(elem)
+ self._message_listener.Modified()
+
+ def pop(self, key=-1):
+ """Removes and returns an item at a given index. Similar to list.pop()."""
+ value = self._values[key]
+ self.__delitem__(key)
+ return value
+
+ def __getslice__(self, start, stop):
+ """Retrieves the subset of items from between the specified indices."""
+ return self._values[start:stop]
+
+ def __delitem__(self, key):
+ """Deletes the item at the specified position."""
+ del self._values[key]
+ self._message_listener.Modified()
+
+ def __delslice__(self, start, stop):
+ """Deletes the subset of items from between the specified indices."""
+ del self._values[start:stop]
+ self._message_listener.Modified()
+
+ def __eq__(self, other):
+ """Compares the current instance with another one."""
+ if self is other:
+ return True
+ if not isinstance(other, self.__class__):
+ raise TypeError('Can only compare repeated composite fields against '
+ 'other repeated composite fields.')
+ return self._values == other._values
+
+
+class ScalarMap(collections.abc.MutableMapping):
+
+ """Simple, type-checked, dict-like container for holding repeated scalars."""
+
+ # Disallows assignment to other attributes.
+ __slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener',
+ '_entry_descriptor']
+
+ def __init__(self, message_listener, key_checker, value_checker,
+ entry_descriptor):
+ """
+ Args:
+ message_listener: A MessageListener implementation.
+ The ScalarMap will call this object's Modified() method when it
+ is modified.
+ key_checker: A type_checkers.ValueChecker instance to run on keys
+ inserted into this container.
+ value_checker: A type_checkers.ValueChecker instance to run on values
+ inserted into this container.
+ entry_descriptor: The MessageDescriptor of a map entry: key and value.
+ """
+ self._message_listener = message_listener
+ self._key_checker = key_checker
+ self._value_checker = value_checker
+ self._entry_descriptor = entry_descriptor
+ self._values = {}
+
+ def __getitem__(self, key):
+ try:
+ return self._values[key]
+ except KeyError:
+ key = self._key_checker.CheckValue(key)
+ val = self._value_checker.DefaultValue()
+ self._values[key] = val
+ return val
+
+ def __contains__(self, item):
+ # We check the key's type to match the strong-typing flavor of the API.
+ # Also this makes it easier to match the behavior of the C++ implementation.
+ self._key_checker.CheckValue(item)
+ return item in self._values
+
+ # We need to override this explicitly, because our defaultdict-like behavior
+ # will make the default implementation (from our base class) always insert
+ # the key.
+ def get(self, key, default=None):
+ if key in self:
+ return self[key]
+ else:
+ return default
+
+ def __setitem__(self, key, value):
+ checked_key = self._key_checker.CheckValue(key)
+ checked_value = self._value_checker.CheckValue(value)
+ self._values[checked_key] = checked_value
+ self._message_listener.Modified()
+
+ def __delitem__(self, key):
+ del self._values[key]
+ self._message_listener.Modified()
+
+ def __len__(self):
+ return len(self._values)
+
+ def __iter__(self):
+ return iter(self._values)
+
+ def __repr__(self):
+ return repr(self._values)
+
+ def MergeFrom(self, other):
+ self._values.update(other._values)
+ self._message_listener.Modified()
+
+ def InvalidateIterators(self):
+ # It appears that the only way to reliably invalidate iterators to
+ # self._values is to ensure that its size changes.
+ original = self._values
+ self._values = original.copy()
+ original[None] = None
+
+ # This is defined in the abstract base, but we can do it much more cheaply.
+ def clear(self):
+ self._values.clear()
+ self._message_listener.Modified()
+
+ def GetEntryClass(self):
+ return self._entry_descriptor._concrete_class
+
+
+class MessageMap(collections.abc.MutableMapping):
+
+ """Simple, type-checked, dict-like container for with submessage values."""
+
+ # Disallows assignment to other attributes.
+ __slots__ = ['_key_checker', '_values', '_message_listener',
+ '_message_descriptor', '_entry_descriptor']
+
+ def __init__(self, message_listener, message_descriptor, key_checker,
+ entry_descriptor):
+ """
+ Args:
+ message_listener: A MessageListener implementation.
+ The ScalarMap will call this object's Modified() method when it
+ is modified.
+ key_checker: A type_checkers.ValueChecker instance to run on keys
+ inserted into this container.
+ value_checker: A type_checkers.ValueChecker instance to run on values
+ inserted into this container.
+ entry_descriptor: The MessageDescriptor of a map entry: key and value.
+ """
+ self._message_listener = message_listener
+ self._message_descriptor = message_descriptor
+ self._key_checker = key_checker
+ self._entry_descriptor = entry_descriptor
+ self._values = {}
+
+ def __getitem__(self, key):
+ key = self._key_checker.CheckValue(key)
+ try:
+ return self._values[key]
+ except KeyError:
+ new_element = self._message_descriptor._concrete_class()
+ new_element._SetListener(self._message_listener)
+ self._values[key] = new_element
+ self._message_listener.Modified()
+
+ return new_element
+
+ def get_or_create(self, key):
+ """get_or_create() is an alias for getitem (ie. map[key]).
+
+ Args:
+ key: The key to get or create in the map.
+
+ This is useful in cases where you want to be explicit that the call is
+ mutating the map. This can avoid lint errors for statements like this
+ that otherwise would appear to be pointless statements:
+
+ msg.my_map[key]
+ """
+ return self[key]
+
+ # We need to override this explicitly, because our defaultdict-like behavior
+ # will make the default implementation (from our base class) always insert
+ # the key.
+ def get(self, key, default=None):
+ if key in self:
+ return self[key]
+ else:
+ return default
+
+ def __contains__(self, item):
+ item = self._key_checker.CheckValue(item)
+ return item in self._values
+
+ def __setitem__(self, key, value):
+ raise ValueError('May not set values directly, call my_map[key].foo = 5')
+
+ def __delitem__(self, key):
+ key = self._key_checker.CheckValue(key)
+ del self._values[key]
+ self._message_listener.Modified()
+
+ def __len__(self):
+ return len(self._values)
+
+ def __iter__(self):
+ return iter(self._values)
+
+ def __repr__(self):
+ return repr(self._values)
+
+ def MergeFrom(self, other):
+ # pylint: disable=protected-access
+ for key in other._values:
+ # According to documentation: "When parsing from the wire or when merging,
+ # if there are duplicate map keys the last key seen is used".
+ if key in self:
+ del self[key]
+ self[key].CopyFrom(other[key])
+ # self._message_listener.Modified() not required here, because
+ # mutations to submessages already propagate.
+
+ def InvalidateIterators(self):
+ # It appears that the only way to reliably invalidate iterators to
+ # self._values is to ensure that its size changes.
+ original = self._values
+ self._values = original.copy()
+ original[None] = None
+
+ # This is defined in the abstract base, but we can do it much more cheaply.
+ def clear(self):
+ self._values.clear()
+ self._message_listener.Modified()
+
+ def GetEntryClass(self):
+ return self._entry_descriptor._concrete_class
+
+
+class _UnknownField(object):
+
+ """A parsed unknown field."""
+
+ # Disallows assignment to other attributes.
+ __slots__ = ['_field_number', '_wire_type', '_data']
+
+ def __init__(self, field_number, wire_type, data):
+ self._field_number = field_number
+ self._wire_type = wire_type
+ self._data = data
+ return
+
+ def __lt__(self, other):
+ # pylint: disable=protected-access
+ return self._field_number < other._field_number
+
+ def __eq__(self, other):
+ if self is other:
+ return True
+ # pylint: disable=protected-access
+ return (self._field_number == other._field_number and
+ self._wire_type == other._wire_type and
+ self._data == other._data)
+
+
+class UnknownFieldRef(object):
+
+ def __init__(self, parent, index):
+ self._parent = parent
+ self._index = index
+ return
+
+ def _check_valid(self):
+ if not self._parent:
+ raise ValueError('UnknownField does not exist. '
+ 'The parent message might be cleared.')
+ if self._index >= len(self._parent):
+ raise ValueError('UnknownField does not exist. '
+ 'The parent message might be cleared.')
+
+ @property
+ def field_number(self):
+ self._check_valid()
+ # pylint: disable=protected-access
+ return self._parent._internal_get(self._index)._field_number
+
+ @property
+ def wire_type(self):
+ self._check_valid()
+ # pylint: disable=protected-access
+ return self._parent._internal_get(self._index)._wire_type
+
+ @property
+ def data(self):
+ self._check_valid()
+ # pylint: disable=protected-access
+ return self._parent._internal_get(self._index)._data
+
+
+class UnknownFieldSet(object):
+
+ """UnknownField container"""
+
+ # Disallows assignment to other attributes.
+ __slots__ = ['_values']
+
+ def __init__(self):
+ self._values = []
+
+ def __getitem__(self, index):
+ if self._values is None:
+ raise ValueError('UnknownFields does not exist. '
+ 'The parent message might be cleared.')
+ size = len(self._values)
+ if index < 0:
+ index += size
+ if index < 0 or index >= size:
+ raise IndexError('index %d out of range'.index)
+
+ return UnknownFieldRef(self, index)
+
+ def _internal_get(self, index):
+ return self._values[index]
+
+ def __len__(self):
+ if self._values is None:
+ raise ValueError('UnknownFields does not exist. '
+ 'The parent message might be cleared.')
+ return len(self._values)
+
+ def _add(self, field_number, wire_type, data):
+ unknown_field = _UnknownField(field_number, wire_type, data)
+ self._values.append(unknown_field)
+ return unknown_field
+
+ def __iter__(self):
+ for i in range(len(self)):
+ yield UnknownFieldRef(self, i)
+
+ def _extend(self, other):
+ if other is None:
+ return
+ # pylint: disable=protected-access
+ self._values.extend(other._values)
+
+ def __eq__(self, other):
+ if self is other:
+ return True
+ # Sort unknown fields because their order shouldn't
+ # affect equality test.
+ values = list(self._values)
+ if other is None:
+ return not values
+ values.sort()
+ # pylint: disable=protected-access
+ other_values = sorted(other._values)
+ return values == other_values
+
+ def _clear(self):
+ for value in self._values:
+ # pylint: disable=protected-access
+ if isinstance(value._data, UnknownFieldSet):
+ value._data._clear() # pylint: disable=protected-access
+ self._values = None
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/decoder.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/decoder.py
new file mode 100644
index 0000000000000000000000000000000000000000..bc1b7b785cf0f6b001db2a650eb0eac15a2eb82b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/decoder.py
@@ -0,0 +1,1029 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Code for decoding protocol buffer primitives.
+
+This code is very similar to encoder.py -- read the docs for that module first.
+
+A "decoder" is a function with the signature:
+ Decode(buffer, pos, end, message, field_dict)
+The arguments are:
+ buffer: The string containing the encoded message.
+ pos: The current position in the string.
+ end: The position in the string where the current message ends. May be
+ less than len(buffer) if we're reading a sub-message.
+ message: The message object into which we're parsing.
+ field_dict: message._fields (avoids a hashtable lookup).
+The decoder reads the field and stores it into field_dict, returning the new
+buffer position. A decoder for a repeated field may proactively decode all of
+the elements of that field, if they appear consecutively.
+
+Note that decoders may throw any of the following:
+ IndexError: Indicates a truncated message.
+ struct.error: Unpacking of a fixed-width field failed.
+ message.DecodeError: Other errors.
+
+Decoders are expected to raise an exception if they are called with pos > end.
+This allows callers to be lax about bounds checking: it's fineto read past
+"end" as long as you are sure that someone else will notice and throw an
+exception later on.
+
+Something up the call stack is expected to catch IndexError and struct.error
+and convert them to message.DecodeError.
+
+Decoders are constructed using decoder constructors with the signature:
+ MakeDecoder(field_number, is_repeated, is_packed, key, new_default)
+The arguments are:
+ field_number: The field number of the field we want to decode.
+ is_repeated: Is the field a repeated field? (bool)
+ is_packed: Is the field a packed field? (bool)
+ key: The key to use when looking up the field within field_dict.
+ (This is actually the FieldDescriptor but nothing in this
+ file should depend on that.)
+ new_default: A function which takes a message object as a parameter and
+ returns a new instance of the default value for this field.
+ (This is called for repeated fields and sub-messages, when an
+ instance does not already exist.)
+
+As with encoders, we define a decoder constructor for every type of field.
+Then, for every field of every message class we construct an actual decoder.
+That decoder goes into a dict indexed by tag, so when we decode a message
+we repeatedly read a tag, look up the corresponding decoder, and invoke it.
+"""
+
+__author__ = 'kenton@google.com (Kenton Varda)'
+
+import math
+import struct
+
+from google.protobuf.internal import containers
+from google.protobuf.internal import encoder
+from google.protobuf.internal import wire_format
+from google.protobuf import message
+
+
+# This is not for optimization, but rather to avoid conflicts with local
+# variables named "message".
+_DecodeError = message.DecodeError
+
+
+def _VarintDecoder(mask, result_type):
+ """Return an encoder for a basic varint value (does not include tag).
+
+ Decoded values will be bitwise-anded with the given mask before being
+ returned, e.g. to limit them to 32 bits. The returned decoder does not
+ take the usual "end" parameter -- the caller is expected to do bounds checking
+ after the fact (often the caller can defer such checking until later). The
+ decoder returns a (value, new_pos) pair.
+ """
+
+ def DecodeVarint(buffer, pos):
+ result = 0
+ shift = 0
+ while 1:
+ b = buffer[pos]
+ result |= ((b & 0x7f) << shift)
+ pos += 1
+ if not (b & 0x80):
+ result &= mask
+ result = result_type(result)
+ return (result, pos)
+ shift += 7
+ if shift >= 64:
+ raise _DecodeError('Too many bytes when decoding varint.')
+ return DecodeVarint
+
+
+def _SignedVarintDecoder(bits, result_type):
+ """Like _VarintDecoder() but decodes signed values."""
+
+ signbit = 1 << (bits - 1)
+ mask = (1 << bits) - 1
+
+ def DecodeVarint(buffer, pos):
+ result = 0
+ shift = 0
+ while 1:
+ b = buffer[pos]
+ result |= ((b & 0x7f) << shift)
+ pos += 1
+ if not (b & 0x80):
+ result &= mask
+ result = (result ^ signbit) - signbit
+ result = result_type(result)
+ return (result, pos)
+ shift += 7
+ if shift >= 64:
+ raise _DecodeError('Too many bytes when decoding varint.')
+ return DecodeVarint
+
+# All 32-bit and 64-bit values are represented as int.
+_DecodeVarint = _VarintDecoder((1 << 64) - 1, int)
+_DecodeSignedVarint = _SignedVarintDecoder(64, int)
+
+# Use these versions for values which must be limited to 32 bits.
+_DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int)
+_DecodeSignedVarint32 = _SignedVarintDecoder(32, int)
+
+
+def ReadTag(buffer, pos):
+ """Read a tag from the memoryview, and return a (tag_bytes, new_pos) tuple.
+
+ We return the raw bytes of the tag rather than decoding them. The raw
+ bytes can then be used to look up the proper decoder. This effectively allows
+ us to trade some work that would be done in pure-python (decoding a varint)
+ for work that is done in C (searching for a byte string in a hash table).
+ In a low-level language it would be much cheaper to decode the varint and
+ use that, but not in Python.
+
+ Args:
+ buffer: memoryview object of the encoded bytes
+ pos: int of the current position to start from
+
+ Returns:
+ Tuple[bytes, int] of the tag data and new position.
+ """
+ start = pos
+ while buffer[pos] & 0x80:
+ pos += 1
+ pos += 1
+
+ tag_bytes = buffer[start:pos].tobytes()
+ return tag_bytes, pos
+
+
+# --------------------------------------------------------------------
+
+
+def _SimpleDecoder(wire_type, decode_value):
+ """Return a constructor for a decoder for fields of a particular type.
+
+ Args:
+ wire_type: The field's wire type.
+ decode_value: A function which decodes an individual value, e.g.
+ _DecodeVarint()
+ """
+
+ def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default,
+ clear_if_default=False):
+ if is_packed:
+ local_DecodeVarint = _DecodeVarint
+ def DecodePackedField(buffer, pos, end, message, field_dict):
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ (endpoint, pos) = local_DecodeVarint(buffer, pos)
+ endpoint += pos
+ if endpoint > end:
+ raise _DecodeError('Truncated message.')
+ while pos < endpoint:
+ (element, pos) = decode_value(buffer, pos)
+ value.append(element)
+ if pos > endpoint:
+ del value[-1] # Discard corrupt value.
+ raise _DecodeError('Packed element was truncated.')
+ return pos
+ return DecodePackedField
+ elif is_repeated:
+ tag_bytes = encoder.TagBytes(field_number, wire_type)
+ tag_len = len(tag_bytes)
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ while 1:
+ (element, new_pos) = decode_value(buffer, pos)
+ value.append(element)
+ # Predict that the next tag is another copy of the same repeated
+ # field.
+ pos = new_pos + tag_len
+ if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
+ # Prediction failed. Return.
+ if new_pos > end:
+ raise _DecodeError('Truncated message.')
+ return new_pos
+ return DecodeRepeatedField
+ else:
+ def DecodeField(buffer, pos, end, message, field_dict):
+ (new_value, pos) = decode_value(buffer, pos)
+ if pos > end:
+ raise _DecodeError('Truncated message.')
+ if clear_if_default and not new_value:
+ field_dict.pop(key, None)
+ else:
+ field_dict[key] = new_value
+ return pos
+ return DecodeField
+
+ return SpecificDecoder
+
+
+def _ModifiedDecoder(wire_type, decode_value, modify_value):
+ """Like SimpleDecoder but additionally invokes modify_value on every value
+ before storing it. Usually modify_value is ZigZagDecode.
+ """
+
+ # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
+ # not enough to make a significant difference.
+
+ def InnerDecode(buffer, pos):
+ (result, new_pos) = decode_value(buffer, pos)
+ return (modify_value(result), new_pos)
+ return _SimpleDecoder(wire_type, InnerDecode)
+
+
+def _StructPackDecoder(wire_type, format):
+ """Return a constructor for a decoder for a fixed-width field.
+
+ Args:
+ wire_type: The field's wire type.
+ format: The format string to pass to struct.unpack().
+ """
+
+ value_size = struct.calcsize(format)
+ local_unpack = struct.unpack
+
+ # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
+ # not enough to make a significant difference.
+
+ # Note that we expect someone up-stack to catch struct.error and convert
+ # it to _DecodeError -- this way we don't have to set up exception-
+ # handling blocks every time we parse one value.
+
+ def InnerDecode(buffer, pos):
+ new_pos = pos + value_size
+ result = local_unpack(format, buffer[pos:new_pos])[0]
+ return (result, new_pos)
+ return _SimpleDecoder(wire_type, InnerDecode)
+
+
+def _FloatDecoder():
+ """Returns a decoder for a float field.
+
+ This code works around a bug in struct.unpack for non-finite 32-bit
+ floating-point values.
+ """
+
+ local_unpack = struct.unpack
+
+ def InnerDecode(buffer, pos):
+ """Decode serialized float to a float and new position.
+
+ Args:
+ buffer: memoryview of the serialized bytes
+ pos: int, position in the memory view to start at.
+
+ Returns:
+ Tuple[float, int] of the deserialized float value and new position
+ in the serialized data.
+ """
+ # We expect a 32-bit value in little-endian byte order. Bit 1 is the sign
+ # bit, bits 2-9 represent the exponent, and bits 10-32 are the significand.
+ new_pos = pos + 4
+ float_bytes = buffer[pos:new_pos].tobytes()
+
+ # If this value has all its exponent bits set, then it's non-finite.
+ # In Python 2.4, struct.unpack will convert it to a finite 64-bit value.
+ # To avoid that, we parse it specially.
+ if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'):
+ # If at least one significand bit is set...
+ if float_bytes[0:3] != b'\x00\x00\x80':
+ return (math.nan, new_pos)
+ # If sign bit is set...
+ if float_bytes[3:4] == b'\xFF':
+ return (-math.inf, new_pos)
+ return (math.inf, new_pos)
+
+ # Note that we expect someone up-stack to catch struct.error and convert
+ # it to _DecodeError -- this way we don't have to set up exception-
+ # handling blocks every time we parse one value.
+ result = local_unpack('<f', float_bytes)[0]
+ return (result, new_pos)
+ return _SimpleDecoder(wire_format.WIRETYPE_FIXED32, InnerDecode)
+
+
+def _DoubleDecoder():
+ """Returns a decoder for a double field.
+
+ This code works around a bug in struct.unpack for not-a-number.
+ """
+
+ local_unpack = struct.unpack
+
+ def InnerDecode(buffer, pos):
+ """Decode serialized double to a double and new position.
+
+ Args:
+ buffer: memoryview of the serialized bytes.
+ pos: int, position in the memory view to start at.
+
+ Returns:
+ Tuple[float, int] of the decoded double value and new position
+ in the serialized data.
+ """
+ # We expect a 64-bit value in little-endian byte order. Bit 1 is the sign
+ # bit, bits 2-12 represent the exponent, and bits 13-64 are the significand.
+ new_pos = pos + 8
+ double_bytes = buffer[pos:new_pos].tobytes()
+
+ # If this value has all its exponent bits set and at least one significand
+ # bit set, it's not a number. In Python 2.4, struct.unpack will treat it
+ # as inf or -inf. To avoid that, we treat it specially.
+ if ((double_bytes[7:8] in b'\x7F\xFF')
+ and (double_bytes[6:7] >= b'\xF0')
+ and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')):
+ return (math.nan, new_pos)
+
+ # Note that we expect someone up-stack to catch struct.error and convert
+ # it to _DecodeError -- this way we don't have to set up exception-
+ # handling blocks every time we parse one value.
+ result = local_unpack('<d', double_bytes)[0]
+ return (result, new_pos)
+ return _SimpleDecoder(wire_format.WIRETYPE_FIXED64, InnerDecode)
+
+
+def EnumDecoder(field_number, is_repeated, is_packed, key, new_default,
+ clear_if_default=False):
+ """Returns a decoder for enum field."""
+ enum_type = key.enum_type
+ if is_packed:
+ local_DecodeVarint = _DecodeVarint
+ def DecodePackedField(buffer, pos, end, message, field_dict):
+ """Decode serialized packed enum to its value and a new position.
+
+ Args:
+ buffer: memoryview of the serialized bytes.
+ pos: int, position in the memory view to start at.
+ end: int, end position of serialized data
+ message: Message object to store unknown fields in
+ field_dict: Map[Descriptor, Any] to store decoded values in.
+
+ Returns:
+ int, new position in serialized data.
+ """
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ (endpoint, pos) = local_DecodeVarint(buffer, pos)
+ endpoint += pos
+ if endpoint > end:
+ raise _DecodeError('Truncated message.')
+ while pos < endpoint:
+ value_start_pos = pos
+ (element, pos) = _DecodeSignedVarint32(buffer, pos)
+ # pylint: disable=protected-access
+ if element in enum_type.values_by_number:
+ value.append(element)
+ else:
+ if not message._unknown_fields:
+ message._unknown_fields = []
+ tag_bytes = encoder.TagBytes(field_number,
+ wire_format.WIRETYPE_VARINT)
+
+ message._unknown_fields.append(
+ (tag_bytes, buffer[value_start_pos:pos].tobytes()))
+ if message._unknown_field_set is None:
+ message._unknown_field_set = containers.UnknownFieldSet()
+ message._unknown_field_set._add(
+ field_number, wire_format.WIRETYPE_VARINT, element)
+ # pylint: enable=protected-access
+ if pos > endpoint:
+ if element in enum_type.values_by_number:
+ del value[-1] # Discard corrupt value.
+ else:
+ del message._unknown_fields[-1]
+ # pylint: disable=protected-access
+ del message._unknown_field_set._values[-1]
+ # pylint: enable=protected-access
+ raise _DecodeError('Packed element was truncated.')
+ return pos
+ return DecodePackedField
+ elif is_repeated:
+ tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT)
+ tag_len = len(tag_bytes)
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
+ """Decode serialized repeated enum to its value and a new position.
+
+ Args:
+ buffer: memoryview of the serialized bytes.
+ pos: int, position in the memory view to start at.
+ end: int, end position of serialized data
+ message: Message object to store unknown fields in
+ field_dict: Map[Descriptor, Any] to store decoded values in.
+
+ Returns:
+ int, new position in serialized data.
+ """
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ while 1:
+ (element, new_pos) = _DecodeSignedVarint32(buffer, pos)
+ # pylint: disable=protected-access
+ if element in enum_type.values_by_number:
+ value.append(element)
+ else:
+ if not message._unknown_fields:
+ message._unknown_fields = []
+ message._unknown_fields.append(
+ (tag_bytes, buffer[pos:new_pos].tobytes()))
+ if message._unknown_field_set is None:
+ message._unknown_field_set = containers.UnknownFieldSet()
+ message._unknown_field_set._add(
+ field_number, wire_format.WIRETYPE_VARINT, element)
+ # pylint: enable=protected-access
+ # Predict that the next tag is another copy of the same repeated
+ # field.
+ pos = new_pos + tag_len
+ if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
+ # Prediction failed. Return.
+ if new_pos > end:
+ raise _DecodeError('Truncated message.')
+ return new_pos
+ return DecodeRepeatedField
+ else:
+ def DecodeField(buffer, pos, end, message, field_dict):
+ """Decode serialized repeated enum to its value and a new position.
+
+ Args:
+ buffer: memoryview of the serialized bytes.
+ pos: int, position in the memory view to start at.
+ end: int, end position of serialized data
+ message: Message object to store unknown fields in
+ field_dict: Map[Descriptor, Any] to store decoded values in.
+
+ Returns:
+ int, new position in serialized data.
+ """
+ value_start_pos = pos
+ (enum_value, pos) = _DecodeSignedVarint32(buffer, pos)
+ if pos > end:
+ raise _DecodeError('Truncated message.')
+ if clear_if_default and not enum_value:
+ field_dict.pop(key, None)
+ return pos
+ # pylint: disable=protected-access
+ if enum_value in enum_type.values_by_number:
+ field_dict[key] = enum_value
+ else:
+ if not message._unknown_fields:
+ message._unknown_fields = []
+ tag_bytes = encoder.TagBytes(field_number,
+ wire_format.WIRETYPE_VARINT)
+ message._unknown_fields.append(
+ (tag_bytes, buffer[value_start_pos:pos].tobytes()))
+ if message._unknown_field_set is None:
+ message._unknown_field_set = containers.UnknownFieldSet()
+ message._unknown_field_set._add(
+ field_number, wire_format.WIRETYPE_VARINT, enum_value)
+ # pylint: enable=protected-access
+ return pos
+ return DecodeField
+
+
+# --------------------------------------------------------------------
+
+
+Int32Decoder = _SimpleDecoder(
+ wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32)
+
+Int64Decoder = _SimpleDecoder(
+ wire_format.WIRETYPE_VARINT, _DecodeSignedVarint)
+
+UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32)
+UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint)
+
+SInt32Decoder = _ModifiedDecoder(
+ wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode)
+SInt64Decoder = _ModifiedDecoder(
+ wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode)
+
+# Note that Python conveniently guarantees that when using the '<' prefix on
+# formats, they will also have the same size across all platforms (as opposed
+# to without the prefix, where their sizes depend on the C compiler's basic
+# type sizes).
+Fixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<I')
+Fixed64Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<Q')
+SFixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<i')
+SFixed64Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<q')
+FloatDecoder = _FloatDecoder()
+DoubleDecoder = _DoubleDecoder()
+
+BoolDecoder = _ModifiedDecoder(
+ wire_format.WIRETYPE_VARINT, _DecodeVarint, bool)
+
+
+def StringDecoder(field_number, is_repeated, is_packed, key, new_default,
+ clear_if_default=False):
+ """Returns a decoder for a string field."""
+
+ local_DecodeVarint = _DecodeVarint
+
+ def _ConvertToUnicode(memview):
+ """Convert byte to unicode."""
+ byte_str = memview.tobytes()
+ try:
+ value = str(byte_str, 'utf-8')
+ except UnicodeDecodeError as e:
+ # add more information to the error message and re-raise it.
+ e.reason = '%s in field: %s' % (e, key.full_name)
+ raise
+
+ return value
+
+ assert not is_packed
+ if is_repeated:
+ tag_bytes = encoder.TagBytes(field_number,
+ wire_format.WIRETYPE_LENGTH_DELIMITED)
+ tag_len = len(tag_bytes)
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ while 1:
+ (size, pos) = local_DecodeVarint(buffer, pos)
+ new_pos = pos + size
+ if new_pos > end:
+ raise _DecodeError('Truncated string.')
+ value.append(_ConvertToUnicode(buffer[pos:new_pos]))
+ # Predict that the next tag is another copy of the same repeated field.
+ pos = new_pos + tag_len
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
+ # Prediction failed. Return.
+ return new_pos
+ return DecodeRepeatedField
+ else:
+ def DecodeField(buffer, pos, end, message, field_dict):
+ (size, pos) = local_DecodeVarint(buffer, pos)
+ new_pos = pos + size
+ if new_pos > end:
+ raise _DecodeError('Truncated string.')
+ if clear_if_default and not size:
+ field_dict.pop(key, None)
+ else:
+ field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos])
+ return new_pos
+ return DecodeField
+
+
+def BytesDecoder(field_number, is_repeated, is_packed, key, new_default,
+ clear_if_default=False):
+ """Returns a decoder for a bytes field."""
+
+ local_DecodeVarint = _DecodeVarint
+
+ assert not is_packed
+ if is_repeated:
+ tag_bytes = encoder.TagBytes(field_number,
+ wire_format.WIRETYPE_LENGTH_DELIMITED)
+ tag_len = len(tag_bytes)
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ while 1:
+ (size, pos) = local_DecodeVarint(buffer, pos)
+ new_pos = pos + size
+ if new_pos > end:
+ raise _DecodeError('Truncated string.')
+ value.append(buffer[pos:new_pos].tobytes())
+ # Predict that the next tag is another copy of the same repeated field.
+ pos = new_pos + tag_len
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
+ # Prediction failed. Return.
+ return new_pos
+ return DecodeRepeatedField
+ else:
+ def DecodeField(buffer, pos, end, message, field_dict):
+ (size, pos) = local_DecodeVarint(buffer, pos)
+ new_pos = pos + size
+ if new_pos > end:
+ raise _DecodeError('Truncated string.')
+ if clear_if_default and not size:
+ field_dict.pop(key, None)
+ else:
+ field_dict[key] = buffer[pos:new_pos].tobytes()
+ return new_pos
+ return DecodeField
+
+
+def GroupDecoder(field_number, is_repeated, is_packed, key, new_default):
+ """Returns a decoder for a group field."""
+
+ end_tag_bytes = encoder.TagBytes(field_number,
+ wire_format.WIRETYPE_END_GROUP)
+ end_tag_len = len(end_tag_bytes)
+
+ assert not is_packed
+ if is_repeated:
+ tag_bytes = encoder.TagBytes(field_number,
+ wire_format.WIRETYPE_START_GROUP)
+ tag_len = len(tag_bytes)
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ while 1:
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ # Read sub-message.
+ pos = value.add()._InternalParse(buffer, pos, end)
+ # Read end tag.
+ new_pos = pos+end_tag_len
+ if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
+ raise _DecodeError('Missing group end tag.')
+ # Predict that the next tag is another copy of the same repeated field.
+ pos = new_pos + tag_len
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
+ # Prediction failed. Return.
+ return new_pos
+ return DecodeRepeatedField
+ else:
+ def DecodeField(buffer, pos, end, message, field_dict):
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ # Read sub-message.
+ pos = value._InternalParse(buffer, pos, end)
+ # Read end tag.
+ new_pos = pos+end_tag_len
+ if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
+ raise _DecodeError('Missing group end tag.')
+ return new_pos
+ return DecodeField
+
+
+def MessageDecoder(field_number, is_repeated, is_packed, key, new_default):
+ """Returns a decoder for a message field."""
+
+ local_DecodeVarint = _DecodeVarint
+
+ assert not is_packed
+ if is_repeated:
+ tag_bytes = encoder.TagBytes(field_number,
+ wire_format.WIRETYPE_LENGTH_DELIMITED)
+ tag_len = len(tag_bytes)
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ while 1:
+ # Read length.
+ (size, pos) = local_DecodeVarint(buffer, pos)
+ new_pos = pos + size
+ if new_pos > end:
+ raise _DecodeError('Truncated message.')
+ # Read sub-message.
+ if value.add()._InternalParse(buffer, pos, new_pos) != new_pos:
+ # The only reason _InternalParse would return early is if it
+ # encountered an end-group tag.
+ raise _DecodeError('Unexpected end-group tag.')
+ # Predict that the next tag is another copy of the same repeated field.
+ pos = new_pos + tag_len
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
+ # Prediction failed. Return.
+ return new_pos
+ return DecodeRepeatedField
+ else:
+ def DecodeField(buffer, pos, end, message, field_dict):
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ # Read length.
+ (size, pos) = local_DecodeVarint(buffer, pos)
+ new_pos = pos + size
+ if new_pos > end:
+ raise _DecodeError('Truncated message.')
+ # Read sub-message.
+ if value._InternalParse(buffer, pos, new_pos) != new_pos:
+ # The only reason _InternalParse would return early is if it encountered
+ # an end-group tag.
+ raise _DecodeError('Unexpected end-group tag.')
+ return new_pos
+ return DecodeField
+
+
+# --------------------------------------------------------------------
+
+MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP)
+
+def MessageSetItemDecoder(descriptor):
+ """Returns a decoder for a MessageSet item.
+
+ The parameter is the message Descriptor.
+
+ The message set message looks like this:
+ message MessageSet {
+ repeated group Item = 1 {
+ required int32 type_id = 2;
+ required string message = 3;
+ }
+ }
+ """
+
+ type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT)
+ message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)
+ item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP)
+
+ local_ReadTag = ReadTag
+ local_DecodeVarint = _DecodeVarint
+ local_SkipField = SkipField
+
+ def DecodeItem(buffer, pos, end, message, field_dict):
+ """Decode serialized message set to its value and new position.
+
+ Args:
+ buffer: memoryview of the serialized bytes.
+ pos: int, position in the memory view to start at.
+ end: int, end position of serialized data
+ message: Message object to store unknown fields in
+ field_dict: Map[Descriptor, Any] to store decoded values in.
+
+ Returns:
+ int, new position in serialized data.
+ """
+ message_set_item_start = pos
+ type_id = -1
+ message_start = -1
+ message_end = -1
+
+ # Technically, type_id and message can appear in any order, so we need
+ # a little loop here.
+ while 1:
+ (tag_bytes, pos) = local_ReadTag(buffer, pos)
+ if tag_bytes == type_id_tag_bytes:
+ (type_id, pos) = local_DecodeVarint(buffer, pos)
+ elif tag_bytes == message_tag_bytes:
+ (size, message_start) = local_DecodeVarint(buffer, pos)
+ pos = message_end = message_start + size
+ elif tag_bytes == item_end_tag_bytes:
+ break
+ else:
+ pos = SkipField(buffer, pos, end, tag_bytes)
+ if pos == -1:
+ raise _DecodeError('Missing group end tag.')
+
+ if pos > end:
+ raise _DecodeError('Truncated message.')
+
+ if type_id == -1:
+ raise _DecodeError('MessageSet item missing type_id.')
+ if message_start == -1:
+ raise _DecodeError('MessageSet item missing message.')
+
+ extension = message.Extensions._FindExtensionByNumber(type_id)
+ # pylint: disable=protected-access
+ if extension is not None:
+ value = field_dict.get(extension)
+ if value is None:
+ message_type = extension.message_type
+ if not hasattr(message_type, '_concrete_class'):
+ # pylint: disable=protected-access
+ message._FACTORY.GetPrototype(message_type)
+ value = field_dict.setdefault(
+ extension, message_type._concrete_class())
+ if value._InternalParse(buffer, message_start,message_end) != message_end:
+ # The only reason _InternalParse would return early is if it encountered
+ # an end-group tag.
+ raise _DecodeError('Unexpected end-group tag.')
+ else:
+ if not message._unknown_fields:
+ message._unknown_fields = []
+ message._unknown_fields.append(
+ (MESSAGE_SET_ITEM_TAG, buffer[message_set_item_start:pos].tobytes()))
+ if message._unknown_field_set is None:
+ message._unknown_field_set = containers.UnknownFieldSet()
+ message._unknown_field_set._add(
+ type_id,
+ wire_format.WIRETYPE_LENGTH_DELIMITED,
+ buffer[message_start:message_end].tobytes())
+ # pylint: enable=protected-access
+
+ return pos
+
+ return DecodeItem
+
+# --------------------------------------------------------------------
+
+def MapDecoder(field_descriptor, new_default, is_message_map):
+ """Returns a decoder for a map field."""
+
+ key = field_descriptor
+ tag_bytes = encoder.TagBytes(field_descriptor.number,
+ wire_format.WIRETYPE_LENGTH_DELIMITED)
+ tag_len = len(tag_bytes)
+ local_DecodeVarint = _DecodeVarint
+ # Can't read _concrete_class yet; might not be initialized.
+ message_type = field_descriptor.message_type
+
+ def DecodeMap(buffer, pos, end, message, field_dict):
+ submsg = message_type._concrete_class()
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ while 1:
+ # Read length.
+ (size, pos) = local_DecodeVarint(buffer, pos)
+ new_pos = pos + size
+ if new_pos > end:
+ raise _DecodeError('Truncated message.')
+ # Read sub-message.
+ submsg.Clear()
+ if submsg._InternalParse(buffer, pos, new_pos) != new_pos:
+ # The only reason _InternalParse would return early is if it
+ # encountered an end-group tag.
+ raise _DecodeError('Unexpected end-group tag.')
+
+ if is_message_map:
+ value[submsg.key].CopyFrom(submsg.value)
+ else:
+ value[submsg.key] = submsg.value
+
+ # Predict that the next tag is another copy of the same repeated field.
+ pos = new_pos + tag_len
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
+ # Prediction failed. Return.
+ return new_pos
+
+ return DecodeMap
+
+# --------------------------------------------------------------------
+# Optimization is not as heavy here because calls to SkipField() are rare,
+# except for handling end-group tags.
+
+def _SkipVarint(buffer, pos, end):
+ """Skip a varint value. Returns the new position."""
+ # Previously ord(buffer[pos]) raised IndexError when pos is out of range.
+ # With this code, ord(b'') raises TypeError. Both are handled in
+ # python_message.py to generate a 'Truncated message' error.
+ while ord(buffer[pos:pos+1].tobytes()) & 0x80:
+ pos += 1
+ pos += 1
+ if pos > end:
+ raise _DecodeError('Truncated message.')
+ return pos
+
+def _SkipFixed64(buffer, pos, end):
+ """Skip a fixed64 value. Returns the new position."""
+
+ pos += 8
+ if pos > end:
+ raise _DecodeError('Truncated message.')
+ return pos
+
+
+def _DecodeFixed64(buffer, pos):
+ """Decode a fixed64."""
+ new_pos = pos + 8
+ return (struct.unpack('<Q', buffer[pos:new_pos])[0], new_pos)
+
+
+def _SkipLengthDelimited(buffer, pos, end):
+ """Skip a length-delimited value. Returns the new position."""
+
+ (size, pos) = _DecodeVarint(buffer, pos)
+ pos += size
+ if pos > end:
+ raise _DecodeError('Truncated message.')
+ return pos
+
+
+def _SkipGroup(buffer, pos, end):
+ """Skip sub-group. Returns the new position."""
+
+ while 1:
+ (tag_bytes, pos) = ReadTag(buffer, pos)
+ new_pos = SkipField(buffer, pos, end, tag_bytes)
+ if new_pos == -1:
+ return pos
+ pos = new_pos
+
+
+def _DecodeUnknownFieldSet(buffer, pos, end_pos=None):
+ """Decode UnknownFieldSet. Returns the UnknownFieldSet and new position."""
+
+ unknown_field_set = containers.UnknownFieldSet()
+ while end_pos is None or pos < end_pos:
+ (tag_bytes, pos) = ReadTag(buffer, pos)
+ (tag, _) = _DecodeVarint(tag_bytes, 0)
+ field_number, wire_type = wire_format.UnpackTag(tag)
+ if wire_type == wire_format.WIRETYPE_END_GROUP:
+ break
+ (data, pos) = _DecodeUnknownField(buffer, pos, wire_type)
+ # pylint: disable=protected-access
+ unknown_field_set._add(field_number, wire_type, data)
+
+ return (unknown_field_set, pos)
+
+
+def _DecodeUnknownField(buffer, pos, wire_type):
+ """Decode a unknown field. Returns the UnknownField and new position."""
+
+ if wire_type == wire_format.WIRETYPE_VARINT:
+ (data, pos) = _DecodeVarint(buffer, pos)
+ elif wire_type == wire_format.WIRETYPE_FIXED64:
+ (data, pos) = _DecodeFixed64(buffer, pos)
+ elif wire_type == wire_format.WIRETYPE_FIXED32:
+ (data, pos) = _DecodeFixed32(buffer, pos)
+ elif wire_type == wire_format.WIRETYPE_LENGTH_DELIMITED:
+ (size, pos) = _DecodeVarint(buffer, pos)
+ data = buffer[pos:pos+size].tobytes()
+ pos += size
+ elif wire_type == wire_format.WIRETYPE_START_GROUP:
+ (data, pos) = _DecodeUnknownFieldSet(buffer, pos)
+ elif wire_type == wire_format.WIRETYPE_END_GROUP:
+ return (0, -1)
+ else:
+ raise _DecodeError('Wrong wire type in tag.')
+
+ return (data, pos)
+
+
+def _EndGroup(buffer, pos, end):
+ """Skipping an END_GROUP tag returns -1 to tell the parent loop to break."""
+
+ return -1
+
+
+def _SkipFixed32(buffer, pos, end):
+ """Skip a fixed32 value. Returns the new position."""
+
+ pos += 4
+ if pos > end:
+ raise _DecodeError('Truncated message.')
+ return pos
+
+
+def _DecodeFixed32(buffer, pos):
+ """Decode a fixed32."""
+
+ new_pos = pos + 4
+ return (struct.unpack('<I', buffer[pos:new_pos])[0], new_pos)
+
+
+def _RaiseInvalidWireType(buffer, pos, end):
+ """Skip function for unknown wire types. Raises an exception."""
+
+ raise _DecodeError('Tag had invalid wire type.')
+
+def _FieldSkipper():
+ """Constructs the SkipField function."""
+
+ WIRETYPE_TO_SKIPPER = [
+ _SkipVarint,
+ _SkipFixed64,
+ _SkipLengthDelimited,
+ _SkipGroup,
+ _EndGroup,
+ _SkipFixed32,
+ _RaiseInvalidWireType,
+ _RaiseInvalidWireType,
+ ]
+
+ wiretype_mask = wire_format.TAG_TYPE_MASK
+
+ def SkipField(buffer, pos, end, tag_bytes):
+ """Skips a field with the specified tag.
+
+ |pos| should point to the byte immediately after the tag.
+
+ Returns:
+ The new position (after the tag value), or -1 if the tag is an end-group
+ tag (in which case the calling loop should break).
+ """
+
+ # The wire type is always in the first byte since varints are little-endian.
+ wire_type = ord(tag_bytes[0:1]) & wiretype_mask
+ return WIRETYPE_TO_SKIPPER[wire_type](buffer, pos, end)
+
+ return SkipField
+
+SkipField = _FieldSkipper()
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/encoder.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/encoder.py
new file mode 100644
index 0000000000000000000000000000000000000000..4b4f652f2516e86ad8a9eb7d6cb420e355f1900f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/encoder.py
@@ -0,0 +1,829 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Code for encoding protocol message primitives.
+
+Contains the logic for encoding every logical protocol field type
+into one of the 5 physical wire types.
+
+This code is designed to push the Python interpreter's performance to the
+limits.
+
+The basic idea is that at startup time, for every field (i.e. every
+FieldDescriptor) we construct two functions: a "sizer" and an "encoder". The
+sizer takes a value of this field's type and computes its byte size. The
+encoder takes a writer function and a value. It encodes the value into byte
+strings and invokes the writer function to write those strings. Typically the
+writer function is the write() method of a BytesIO.
+
+We try to do as much work as possible when constructing the writer and the
+sizer rather than when calling them. In particular:
+* We copy any needed global functions to local variables, so that we do not need
+ to do costly global table lookups at runtime.
+* Similarly, we try to do any attribute lookups at startup time if possible.
+* Every field's tag is encoded to bytes at startup, since it can't change at
+ runtime.
+* Whatever component of the field size we can compute at startup, we do.
+* We *avoid* sharing code if doing so would make the code slower and not sharing
+ does not burden us too much. For example, encoders for repeated fields do
+ not just call the encoders for singular fields in a loop because this would
+ add an extra function call overhead for every loop iteration; instead, we
+ manually inline the single-value encoder into the loop.
+* If a Python function lacks a return statement, Python actually generates
+ instructions to pop the result of the last statement off the stack, push
+ None onto the stack, and then return that. If we really don't care what
+ value is returned, then we can save two instructions by returning the
+ result of the last statement. It looks funny but it helps.
+* We assume that type and bounds checking has happened at a higher level.
+"""
+
+__author__ = 'kenton@google.com (Kenton Varda)'
+
+import struct
+
+from google.protobuf.internal import wire_format
+
+
+# This will overflow and thus become IEEE-754 "infinity". We would use
+# "float('inf')" but it doesn't work on Windows pre-Python-2.6.
+_POS_INF = 1e10000
+_NEG_INF = -_POS_INF
+
+
+def _VarintSize(value):
+ """Compute the size of a varint value."""
+ if value <= 0x7f: return 1
+ if value <= 0x3fff: return 2
+ if value <= 0x1fffff: return 3
+ if value <= 0xfffffff: return 4
+ if value <= 0x7ffffffff: return 5
+ if value <= 0x3ffffffffff: return 6
+ if value <= 0x1ffffffffffff: return 7
+ if value <= 0xffffffffffffff: return 8
+ if value <= 0x7fffffffffffffff: return 9
+ return 10
+
+
+def _SignedVarintSize(value):
+ """Compute the size of a signed varint value."""
+ if value < 0: return 10
+ if value <= 0x7f: return 1
+ if value <= 0x3fff: return 2
+ if value <= 0x1fffff: return 3
+ if value <= 0xfffffff: return 4
+ if value <= 0x7ffffffff: return 5
+ if value <= 0x3ffffffffff: return 6
+ if value <= 0x1ffffffffffff: return 7
+ if value <= 0xffffffffffffff: return 8
+ if value <= 0x7fffffffffffffff: return 9
+ return 10
+
+
+def _TagSize(field_number):
+ """Returns the number of bytes required to serialize a tag with this field
+ number."""
+ # Just pass in type 0, since the type won't affect the tag+type size.
+ return _VarintSize(wire_format.PackTag(field_number, 0))
+
+
+# --------------------------------------------------------------------
+# In this section we define some generic sizers. Each of these functions
+# takes parameters specific to a particular field type, e.g. int32 or fixed64.
+# It returns another function which in turn takes parameters specific to a
+# particular field, e.g. the field number and whether it is repeated or packed.
+# Look at the next section to see how these are used.
+
+
+def _SimpleSizer(compute_value_size):
+ """A sizer which uses the function compute_value_size to compute the size of
+ each value. Typically compute_value_size is _VarintSize."""
+
+ def SpecificSizer(field_number, is_repeated, is_packed):
+ tag_size = _TagSize(field_number)
+ if is_packed:
+ local_VarintSize = _VarintSize
+ def PackedFieldSize(value):
+ result = 0
+ for element in value:
+ result += compute_value_size(element)
+ return result + local_VarintSize(result) + tag_size
+ return PackedFieldSize
+ elif is_repeated:
+ def RepeatedFieldSize(value):
+ result = tag_size * len(value)
+ for element in value:
+ result += compute_value_size(element)
+ return result
+ return RepeatedFieldSize
+ else:
+ def FieldSize(value):
+ return tag_size + compute_value_size(value)
+ return FieldSize
+
+ return SpecificSizer
+
+
+def _ModifiedSizer(compute_value_size, modify_value):
+ """Like SimpleSizer, but modify_value is invoked on each value before it is
+ passed to compute_value_size. modify_value is typically ZigZagEncode."""
+
+ def SpecificSizer(field_number, is_repeated, is_packed):
+ tag_size = _TagSize(field_number)
+ if is_packed:
+ local_VarintSize = _VarintSize
+ def PackedFieldSize(value):
+ result = 0
+ for element in value:
+ result += compute_value_size(modify_value(element))
+ return result + local_VarintSize(result) + tag_size
+ return PackedFieldSize
+ elif is_repeated:
+ def RepeatedFieldSize(value):
+ result = tag_size * len(value)
+ for element in value:
+ result += compute_value_size(modify_value(element))
+ return result
+ return RepeatedFieldSize
+ else:
+ def FieldSize(value):
+ return tag_size + compute_value_size(modify_value(value))
+ return FieldSize
+
+ return SpecificSizer
+
+
+def _FixedSizer(value_size):
+ """Like _SimpleSizer except for a fixed-size field. The input is the size
+ of one value."""
+
+ def SpecificSizer(field_number, is_repeated, is_packed):
+ tag_size = _TagSize(field_number)
+ if is_packed:
+ local_VarintSize = _VarintSize
+ def PackedFieldSize(value):
+ result = len(value) * value_size
+ return result + local_VarintSize(result) + tag_size
+ return PackedFieldSize
+ elif is_repeated:
+ element_size = value_size + tag_size
+ def RepeatedFieldSize(value):
+ return len(value) * element_size
+ return RepeatedFieldSize
+ else:
+ field_size = value_size + tag_size
+ def FieldSize(value):
+ return field_size
+ return FieldSize
+
+ return SpecificSizer
+
+
+# ====================================================================
+# Here we declare a sizer constructor for each field type. Each "sizer
+# constructor" is a function that takes (field_number, is_repeated, is_packed)
+# as parameters and returns a sizer, which in turn takes a field value as
+# a parameter and returns its encoded size.
+
+
+Int32Sizer = Int64Sizer = EnumSizer = _SimpleSizer(_SignedVarintSize)
+
+UInt32Sizer = UInt64Sizer = _SimpleSizer(_VarintSize)
+
+SInt32Sizer = SInt64Sizer = _ModifiedSizer(
+ _SignedVarintSize, wire_format.ZigZagEncode)
+
+Fixed32Sizer = SFixed32Sizer = FloatSizer = _FixedSizer(4)
+Fixed64Sizer = SFixed64Sizer = DoubleSizer = _FixedSizer(8)
+
+BoolSizer = _FixedSizer(1)
+
+
+def StringSizer(field_number, is_repeated, is_packed):
+ """Returns a sizer for a string field."""
+
+ tag_size = _TagSize(field_number)
+ local_VarintSize = _VarintSize
+ local_len = len
+ assert not is_packed
+ if is_repeated:
+ def RepeatedFieldSize(value):
+ result = tag_size * len(value)
+ for element in value:
+ l = local_len(element.encode('utf-8'))
+ result += local_VarintSize(l) + l
+ return result
+ return RepeatedFieldSize
+ else:
+ def FieldSize(value):
+ l = local_len(value.encode('utf-8'))
+ return tag_size + local_VarintSize(l) + l
+ return FieldSize
+
+
+def BytesSizer(field_number, is_repeated, is_packed):
+ """Returns a sizer for a bytes field."""
+
+ tag_size = _TagSize(field_number)
+ local_VarintSize = _VarintSize
+ local_len = len
+ assert not is_packed
+ if is_repeated:
+ def RepeatedFieldSize(value):
+ result = tag_size * len(value)
+ for element in value:
+ l = local_len(element)
+ result += local_VarintSize(l) + l
+ return result
+ return RepeatedFieldSize
+ else:
+ def FieldSize(value):
+ l = local_len(value)
+ return tag_size + local_VarintSize(l) + l
+ return FieldSize
+
+
+def GroupSizer(field_number, is_repeated, is_packed):
+ """Returns a sizer for a group field."""
+
+ tag_size = _TagSize(field_number) * 2
+ assert not is_packed
+ if is_repeated:
+ def RepeatedFieldSize(value):
+ result = tag_size * len(value)
+ for element in value:
+ result += element.ByteSize()
+ return result
+ return RepeatedFieldSize
+ else:
+ def FieldSize(value):
+ return tag_size + value.ByteSize()
+ return FieldSize
+
+
+def MessageSizer(field_number, is_repeated, is_packed):
+ """Returns a sizer for a message field."""
+
+ tag_size = _TagSize(field_number)
+ local_VarintSize = _VarintSize
+ assert not is_packed
+ if is_repeated:
+ def RepeatedFieldSize(value):
+ result = tag_size * len(value)
+ for element in value:
+ l = element.ByteSize()
+ result += local_VarintSize(l) + l
+ return result
+ return RepeatedFieldSize
+ else:
+ def FieldSize(value):
+ l = value.ByteSize()
+ return tag_size + local_VarintSize(l) + l
+ return FieldSize
+
+
+# --------------------------------------------------------------------
+# MessageSet is special: it needs custom logic to compute its size properly.
+
+
+def MessageSetItemSizer(field_number):
+ """Returns a sizer for extensions of MessageSet.
+
+ The message set message looks like this:
+ message MessageSet {
+ repeated group Item = 1 {
+ required int32 type_id = 2;
+ required string message = 3;
+ }
+ }
+ """
+ static_size = (_TagSize(1) * 2 + _TagSize(2) + _VarintSize(field_number) +
+ _TagSize(3))
+ local_VarintSize = _VarintSize
+
+ def FieldSize(value):
+ l = value.ByteSize()
+ return static_size + local_VarintSize(l) + l
+
+ return FieldSize
+
+
+# --------------------------------------------------------------------
+# Map is special: it needs custom logic to compute its size properly.
+
+
+def MapSizer(field_descriptor, is_message_map):
+ """Returns a sizer for a map field."""
+
+ # Can't look at field_descriptor.message_type._concrete_class because it may
+ # not have been initialized yet.
+ message_type = field_descriptor.message_type
+ message_sizer = MessageSizer(field_descriptor.number, False, False)
+
+ def FieldSize(map_value):
+ total = 0
+ for key in map_value:
+ value = map_value[key]
+ # It's wasteful to create the messages and throw them away one second
+ # later since we'll do the same for the actual encode. But there's not an
+ # obvious way to avoid this within the current design without tons of code
+ # duplication. For message map, value.ByteSize() should be called to
+ # update the status.
+ entry_msg = message_type._concrete_class(key=key, value=value)
+ total += message_sizer(entry_msg)
+ if is_message_map:
+ value.ByteSize()
+ return total
+
+ return FieldSize
+
+# ====================================================================
+# Encoders!
+
+
+def _VarintEncoder():
+ """Return an encoder for a basic varint value (does not include tag)."""
+
+ local_int2byte = struct.Struct('>B').pack
+
+ def EncodeVarint(write, value, unused_deterministic=None):
+ bits = value & 0x7f
+ value >>= 7
+ while value:
+ write(local_int2byte(0x80|bits))
+ bits = value & 0x7f
+ value >>= 7
+ return write(local_int2byte(bits))
+
+ return EncodeVarint
+
+
+def _SignedVarintEncoder():
+ """Return an encoder for a basic signed varint value (does not include
+ tag)."""
+
+ local_int2byte = struct.Struct('>B').pack
+
+ def EncodeSignedVarint(write, value, unused_deterministic=None):
+ if value < 0:
+ value += (1 << 64)
+ bits = value & 0x7f
+ value >>= 7
+ while value:
+ write(local_int2byte(0x80|bits))
+ bits = value & 0x7f
+ value >>= 7
+ return write(local_int2byte(bits))
+
+ return EncodeSignedVarint
+
+
+_EncodeVarint = _VarintEncoder()
+_EncodeSignedVarint = _SignedVarintEncoder()
+
+
+def _VarintBytes(value):
+ """Encode the given integer as a varint and return the bytes. This is only
+ called at startup time so it doesn't need to be fast."""
+
+ pieces = []
+ _EncodeVarint(pieces.append, value, True)
+ return b"".join(pieces)
+
+
+def TagBytes(field_number, wire_type):
+ """Encode the given tag and return the bytes. Only called at startup."""
+
+ return bytes(_VarintBytes(wire_format.PackTag(field_number, wire_type)))
+
+# --------------------------------------------------------------------
+# As with sizers (see above), we have a number of common encoder
+# implementations.
+
+
+def _SimpleEncoder(wire_type, encode_value, compute_value_size):
+ """Return a constructor for an encoder for fields of a particular type.
+
+ Args:
+ wire_type: The field's wire type, for encoding tags.
+ encode_value: A function which encodes an individual value, e.g.
+ _EncodeVarint().
+ compute_value_size: A function which computes the size of an individual
+ value, e.g. _VarintSize().
+ """
+
+ def SpecificEncoder(field_number, is_repeated, is_packed):
+ if is_packed:
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+ local_EncodeVarint = _EncodeVarint
+ def EncodePackedField(write, value, deterministic):
+ write(tag_bytes)
+ size = 0
+ for element in value:
+ size += compute_value_size(element)
+ local_EncodeVarint(write, size, deterministic)
+ for element in value:
+ encode_value(write, element, deterministic)
+ return EncodePackedField
+ elif is_repeated:
+ tag_bytes = TagBytes(field_number, wire_type)
+ def EncodeRepeatedField(write, value, deterministic):
+ for element in value:
+ write(tag_bytes)
+ encode_value(write, element, deterministic)
+ return EncodeRepeatedField
+ else:
+ tag_bytes = TagBytes(field_number, wire_type)
+ def EncodeField(write, value, deterministic):
+ write(tag_bytes)
+ return encode_value(write, value, deterministic)
+ return EncodeField
+
+ return SpecificEncoder
+
+
+def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value):
+ """Like SimpleEncoder but additionally invokes modify_value on every value
+ before passing it to encode_value. Usually modify_value is ZigZagEncode."""
+
+ def SpecificEncoder(field_number, is_repeated, is_packed):
+ if is_packed:
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+ local_EncodeVarint = _EncodeVarint
+ def EncodePackedField(write, value, deterministic):
+ write(tag_bytes)
+ size = 0
+ for element in value:
+ size += compute_value_size(modify_value(element))
+ local_EncodeVarint(write, size, deterministic)
+ for element in value:
+ encode_value(write, modify_value(element), deterministic)
+ return EncodePackedField
+ elif is_repeated:
+ tag_bytes = TagBytes(field_number, wire_type)
+ def EncodeRepeatedField(write, value, deterministic):
+ for element in value:
+ write(tag_bytes)
+ encode_value(write, modify_value(element), deterministic)
+ return EncodeRepeatedField
+ else:
+ tag_bytes = TagBytes(field_number, wire_type)
+ def EncodeField(write, value, deterministic):
+ write(tag_bytes)
+ return encode_value(write, modify_value(value), deterministic)
+ return EncodeField
+
+ return SpecificEncoder
+
+
+def _StructPackEncoder(wire_type, format):
+ """Return a constructor for an encoder for a fixed-width field.
+
+ Args:
+ wire_type: The field's wire type, for encoding tags.
+ format: The format string to pass to struct.pack().
+ """
+
+ value_size = struct.calcsize(format)
+
+ def SpecificEncoder(field_number, is_repeated, is_packed):
+ local_struct_pack = struct.pack
+ if is_packed:
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+ local_EncodeVarint = _EncodeVarint
+ def EncodePackedField(write, value, deterministic):
+ write(tag_bytes)
+ local_EncodeVarint(write, len(value) * value_size, deterministic)
+ for element in value:
+ write(local_struct_pack(format, element))
+ return EncodePackedField
+ elif is_repeated:
+ tag_bytes = TagBytes(field_number, wire_type)
+ def EncodeRepeatedField(write, value, unused_deterministic=None):
+ for element in value:
+ write(tag_bytes)
+ write(local_struct_pack(format, element))
+ return EncodeRepeatedField
+ else:
+ tag_bytes = TagBytes(field_number, wire_type)
+ def EncodeField(write, value, unused_deterministic=None):
+ write(tag_bytes)
+ return write(local_struct_pack(format, value))
+ return EncodeField
+
+ return SpecificEncoder
+
+
+def _FloatingPointEncoder(wire_type, format):
+ """Return a constructor for an encoder for float fields.
+
+ This is like StructPackEncoder, but catches errors that may be due to
+ passing non-finite floating-point values to struct.pack, and makes a
+ second attempt to encode those values.
+
+ Args:
+ wire_type: The field's wire type, for encoding tags.
+ format: The format string to pass to struct.pack().
+ """
+
+ value_size = struct.calcsize(format)
+ if value_size == 4:
+ def EncodeNonFiniteOrRaise(write, value):
+ # Remember that the serialized form uses little-endian byte order.
+ if value == _POS_INF:
+ write(b'\x00\x00\x80\x7F')
+ elif value == _NEG_INF:
+ write(b'\x00\x00\x80\xFF')
+ elif value != value: # NaN
+ write(b'\x00\x00\xC0\x7F')
+ else:
+ raise
+ elif value_size == 8:
+ def EncodeNonFiniteOrRaise(write, value):
+ if value == _POS_INF:
+ write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F')
+ elif value == _NEG_INF:
+ write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF')
+ elif value != value: # NaN
+ write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F')
+ else:
+ raise
+ else:
+ raise ValueError('Can\'t encode floating-point values that are '
+ '%d bytes long (only 4 or 8)' % value_size)
+
+ def SpecificEncoder(field_number, is_repeated, is_packed):
+ local_struct_pack = struct.pack
+ if is_packed:
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+ local_EncodeVarint = _EncodeVarint
+ def EncodePackedField(write, value, deterministic):
+ write(tag_bytes)
+ local_EncodeVarint(write, len(value) * value_size, deterministic)
+ for element in value:
+ # This try/except block is going to be faster than any code that
+ # we could write to check whether element is finite.
+ try:
+ write(local_struct_pack(format, element))
+ except SystemError:
+ EncodeNonFiniteOrRaise(write, element)
+ return EncodePackedField
+ elif is_repeated:
+ tag_bytes = TagBytes(field_number, wire_type)
+ def EncodeRepeatedField(write, value, unused_deterministic=None):
+ for element in value:
+ write(tag_bytes)
+ try:
+ write(local_struct_pack(format, element))
+ except SystemError:
+ EncodeNonFiniteOrRaise(write, element)
+ return EncodeRepeatedField
+ else:
+ tag_bytes = TagBytes(field_number, wire_type)
+ def EncodeField(write, value, unused_deterministic=None):
+ write(tag_bytes)
+ try:
+ write(local_struct_pack(format, value))
+ except SystemError:
+ EncodeNonFiniteOrRaise(write, value)
+ return EncodeField
+
+ return SpecificEncoder
+
+
+# ====================================================================
+# Here we declare an encoder constructor for each field type. These work
+# very similarly to sizer constructors, described earlier.
+
+
+Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder(
+ wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize)
+
+UInt32Encoder = UInt64Encoder = _SimpleEncoder(
+ wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize)
+
+SInt32Encoder = SInt64Encoder = _ModifiedEncoder(
+ wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize,
+ wire_format.ZigZagEncode)
+
+# Note that Python conveniently guarantees that when using the '<' prefix on
+# formats, they will also have the same size across all platforms (as opposed
+# to without the prefix, where their sizes depend on the C compiler's basic
+# type sizes).
+Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<I')
+Fixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<Q')
+SFixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<i')
+SFixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<q')
+FloatEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED32, '<f')
+DoubleEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED64, '<d')
+
+
+def BoolEncoder(field_number, is_repeated, is_packed):
+ """Returns an encoder for a boolean field."""
+
+ false_byte = b'\x00'
+ true_byte = b'\x01'
+ if is_packed:
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+ local_EncodeVarint = _EncodeVarint
+ def EncodePackedField(write, value, deterministic):
+ write(tag_bytes)
+ local_EncodeVarint(write, len(value), deterministic)
+ for element in value:
+ if element:
+ write(true_byte)
+ else:
+ write(false_byte)
+ return EncodePackedField
+ elif is_repeated:
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
+ def EncodeRepeatedField(write, value, unused_deterministic=None):
+ for element in value:
+ write(tag_bytes)
+ if element:
+ write(true_byte)
+ else:
+ write(false_byte)
+ return EncodeRepeatedField
+ else:
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
+ def EncodeField(write, value, unused_deterministic=None):
+ write(tag_bytes)
+ if value:
+ return write(true_byte)
+ return write(false_byte)
+ return EncodeField
+
+
+def StringEncoder(field_number, is_repeated, is_packed):
+ """Returns an encoder for a string field."""
+
+ tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+ local_EncodeVarint = _EncodeVarint
+ local_len = len
+ assert not is_packed
+ if is_repeated:
+ def EncodeRepeatedField(write, value, deterministic):
+ for element in value:
+ encoded = element.encode('utf-8')
+ write(tag)
+ local_EncodeVarint(write, local_len(encoded), deterministic)
+ write(encoded)
+ return EncodeRepeatedField
+ else:
+ def EncodeField(write, value, deterministic):
+ encoded = value.encode('utf-8')
+ write(tag)
+ local_EncodeVarint(write, local_len(encoded), deterministic)
+ return write(encoded)
+ return EncodeField
+
+
+def BytesEncoder(field_number, is_repeated, is_packed):
+ """Returns an encoder for a bytes field."""
+
+ tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+ local_EncodeVarint = _EncodeVarint
+ local_len = len
+ assert not is_packed
+ if is_repeated:
+ def EncodeRepeatedField(write, value, deterministic):
+ for element in value:
+ write(tag)
+ local_EncodeVarint(write, local_len(element), deterministic)
+ write(element)
+ return EncodeRepeatedField
+ else:
+ def EncodeField(write, value, deterministic):
+ write(tag)
+ local_EncodeVarint(write, local_len(value), deterministic)
+ return write(value)
+ return EncodeField
+
+
+def GroupEncoder(field_number, is_repeated, is_packed):
+ """Returns an encoder for a group field."""
+
+ start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP)
+ end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP)
+ assert not is_packed
+ if is_repeated:
+ def EncodeRepeatedField(write, value, deterministic):
+ for element in value:
+ write(start_tag)
+ element._InternalSerialize(write, deterministic)
+ write(end_tag)
+ return EncodeRepeatedField
+ else:
+ def EncodeField(write, value, deterministic):
+ write(start_tag)
+ value._InternalSerialize(write, deterministic)
+ return write(end_tag)
+ return EncodeField
+
+
+def MessageEncoder(field_number, is_repeated, is_packed):
+ """Returns an encoder for a message field."""
+
+ tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+ local_EncodeVarint = _EncodeVarint
+ assert not is_packed
+ if is_repeated:
+ def EncodeRepeatedField(write, value, deterministic):
+ for element in value:
+ write(tag)
+ local_EncodeVarint(write, element.ByteSize(), deterministic)
+ element._InternalSerialize(write, deterministic)
+ return EncodeRepeatedField
+ else:
+ def EncodeField(write, value, deterministic):
+ write(tag)
+ local_EncodeVarint(write, value.ByteSize(), deterministic)
+ return value._InternalSerialize(write, deterministic)
+ return EncodeField
+
+
+# --------------------------------------------------------------------
+# As before, MessageSet is special.
+
+
+def MessageSetItemEncoder(field_number):
+ """Encoder for extensions of MessageSet.
+
+ The message set message looks like this:
+ message MessageSet {
+ repeated group Item = 1 {
+ required int32 type_id = 2;
+ required string message = 3;
+ }
+ }
+ """
+ start_bytes = b"".join([
+ TagBytes(1, wire_format.WIRETYPE_START_GROUP),
+ TagBytes(2, wire_format.WIRETYPE_VARINT),
+ _VarintBytes(field_number),
+ TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)])
+ end_bytes = TagBytes(1, wire_format.WIRETYPE_END_GROUP)
+ local_EncodeVarint = _EncodeVarint
+
+ def EncodeField(write, value, deterministic):
+ write(start_bytes)
+ local_EncodeVarint(write, value.ByteSize(), deterministic)
+ value._InternalSerialize(write, deterministic)
+ return write(end_bytes)
+
+ return EncodeField
+
+
+# --------------------------------------------------------------------
+# As before, Map is special.
+
+
+def MapEncoder(field_descriptor):
+ """Encoder for extensions of MessageSet.
+
+ Maps always have a wire format like this:
+ message MapEntry {
+ key_type key = 1;
+ value_type value = 2;
+ }
+ repeated MapEntry map = N;
+ """
+ # Can't look at field_descriptor.message_type._concrete_class because it may
+ # not have been initialized yet.
+ message_type = field_descriptor.message_type
+ encode_message = MessageEncoder(field_descriptor.number, False, False)
+
+ def EncodeField(write, value, deterministic):
+ value_keys = sorted(value.keys()) if deterministic else value
+ for key in value_keys:
+ entry_msg = message_type._concrete_class(key=key, value=value[key])
+ encode_message(write, entry_msg, deterministic)
+
+ return EncodeField
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/enum_type_wrapper.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/enum_type_wrapper.py
new file mode 100644
index 0000000000000000000000000000000000000000..9a53999a430885d55a785361a51944b00901f4bf
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/enum_type_wrapper.py
@@ -0,0 +1,115 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""A simple wrapper around enum types to expose utility functions.
+
+Instances are created as properties with the same name as the enum they wrap
+on proto classes. For usage, see:
+ reflection_test.py
+"""
+
+__author__ = 'rabsatt@google.com (Kevin Rabsatt)'
+
+
+class EnumTypeWrapper(object):
+ """A utility for finding the names of enum values."""
+
+ DESCRIPTOR = None
+
+ def __init__(self, enum_type):
+ """Inits EnumTypeWrapper with an EnumDescriptor."""
+ self._enum_type = enum_type
+ self.DESCRIPTOR = enum_type # pylint: disable=invalid-name
+
+ def Name(self, number): # pylint: disable=invalid-name
+ """Returns a string containing the name of an enum value."""
+ try:
+ return self._enum_type.values_by_number[number].name
+ except KeyError:
+ pass # fall out to break exception chaining
+
+ if not isinstance(number, int):
+ raise TypeError(
+ 'Enum value for {} must be an int, but got {} {!r}.'.format(
+ self._enum_type.name, type(number), number))
+ else:
+ # repr here to handle the odd case when you pass in a boolean.
+ raise ValueError('Enum {} has no name defined for value {!r}'.format(
+ self._enum_type.name, number))
+
+ def Value(self, name): # pylint: disable=invalid-name
+ """Returns the value corresponding to the given enum name."""
+ try:
+ return self._enum_type.values_by_name[name].number
+ except KeyError:
+ pass # fall out to break exception chaining
+ raise ValueError('Enum {} has no value defined for name {!r}'.format(
+ self._enum_type.name, name))
+
+ def keys(self):
+ """Return a list of the string names in the enum.
+
+ Returns:
+ A list of strs, in the order they were defined in the .proto file.
+ """
+
+ return [value_descriptor.name
+ for value_descriptor in self._enum_type.values]
+
+ def values(self):
+ """Return a list of the integer values in the enum.
+
+ Returns:
+ A list of ints, in the order they were defined in the .proto file.
+ """
+
+ return [value_descriptor.number
+ for value_descriptor in self._enum_type.values]
+
+ def items(self):
+ """Return a list of the (name, value) pairs of the enum.
+
+ Returns:
+ A list of (str, int) pairs, in the order they were defined
+ in the .proto file.
+ """
+ return [(value_descriptor.name, value_descriptor.number)
+ for value_descriptor in self._enum_type.values]
+
+ def __getattr__(self, name):
+ """Returns the value corresponding to the given enum name."""
+ try:
+ return super(
+ EnumTypeWrapper,
+ self).__getattribute__('_enum_type').values_by_name[name].number
+ except KeyError:
+ pass # fall out to break exception chaining
+ raise AttributeError('Enum {} has no value defined for name {!r}'.format(
+ self._enum_type.name, name))
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/extension_dict.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/extension_dict.py
new file mode 100644
index 0000000000000000000000000000000000000000..b346cf283e2ca7d76306d99ecf153d61709fd1ea
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/extension_dict.py
@@ -0,0 +1,213 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Contains _ExtensionDict class to represent extensions.
+"""
+
+from google.protobuf.internal import type_checkers
+from google.protobuf.descriptor import FieldDescriptor
+
+
+def _VerifyExtensionHandle(message, extension_handle):
+ """Verify that the given extension handle is valid."""
+
+ if not isinstance(extension_handle, FieldDescriptor):
+ raise KeyError('HasExtension() expects an extension handle, got: %s' %
+ extension_handle)
+
+ if not extension_handle.is_extension:
+ raise KeyError('"%s" is not an extension.' % extension_handle.full_name)
+
+ if not extension_handle.containing_type:
+ raise KeyError('"%s" is missing a containing_type.'
+ % extension_handle.full_name)
+
+ if extension_handle.containing_type is not message.DESCRIPTOR:
+ raise KeyError('Extension "%s" extends message type "%s", but this '
+ 'message is of type "%s".' %
+ (extension_handle.full_name,
+ extension_handle.containing_type.full_name,
+ message.DESCRIPTOR.full_name))
+
+
+# TODO(robinson): Unify error handling of "unknown extension" crap.
+# TODO(robinson): Support iteritems()-style iteration over all
+# extensions with the "has" bits turned on?
+class _ExtensionDict(object):
+
+ """Dict-like container for Extension fields on proto instances.
+
+ Note that in all cases we expect extension handles to be
+ FieldDescriptors.
+ """
+
+ def __init__(self, extended_message):
+ """
+ Args:
+ extended_message: Message instance for which we are the Extensions dict.
+ """
+ self._extended_message = extended_message
+
+ def __getitem__(self, extension_handle):
+ """Returns the current value of the given extension handle."""
+
+ _VerifyExtensionHandle(self._extended_message, extension_handle)
+
+ result = self._extended_message._fields.get(extension_handle)
+ if result is not None:
+ return result
+
+ if extension_handle.label == FieldDescriptor.LABEL_REPEATED:
+ result = extension_handle._default_constructor(self._extended_message)
+ elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
+ message_type = extension_handle.message_type
+ if not hasattr(message_type, '_concrete_class'):
+ # pylint: disable=protected-access
+ self._extended_message._FACTORY.GetPrototype(message_type)
+ assert getattr(extension_handle.message_type, '_concrete_class', None), (
+ 'Uninitialized concrete class found for field %r (message type %r)'
+ % (extension_handle.full_name,
+ extension_handle.message_type.full_name))
+ result = extension_handle.message_type._concrete_class()
+ try:
+ result._SetListener(self._extended_message._listener_for_children)
+ except ReferenceError:
+ pass
+ else:
+ # Singular scalar -- just return the default without inserting into the
+ # dict.
+ return extension_handle.default_value
+
+ # Atomically check if another thread has preempted us and, if not, swap
+ # in the new object we just created. If someone has preempted us, we
+ # take that object and discard ours.
+ # WARNING: We are relying on setdefault() being atomic. This is true
+ # in CPython but we haven't investigated others. This warning appears
+ # in several other locations in this file.
+ result = self._extended_message._fields.setdefault(
+ extension_handle, result)
+
+ return result
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ return False
+
+ my_fields = self._extended_message.ListFields()
+ other_fields = other._extended_message.ListFields()
+
+ # Get rid of non-extension fields.
+ my_fields = [field for field in my_fields if field.is_extension]
+ other_fields = [field for field in other_fields if field.is_extension]
+
+ return my_fields == other_fields
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __len__(self):
+ fields = self._extended_message.ListFields()
+ # Get rid of non-extension fields.
+ extension_fields = [field for field in fields if field[0].is_extension]
+ return len(extension_fields)
+
+ def __hash__(self):
+ raise TypeError('unhashable object')
+
+ # Note that this is only meaningful for non-repeated, scalar extension
+ # fields. Note also that we may have to call _Modified() when we do
+ # successfully set a field this way, to set any necessary "has" bits in the
+ # ancestors of the extended message.
+ def __setitem__(self, extension_handle, value):
+ """If extension_handle specifies a non-repeated, scalar extension
+ field, sets the value of that field.
+ """
+
+ _VerifyExtensionHandle(self._extended_message, extension_handle)
+
+ if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or
+ extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE):
+ raise TypeError(
+ 'Cannot assign to extension "%s" because it is a repeated or '
+ 'composite type.' % extension_handle.full_name)
+
+ # It's slightly wasteful to lookup the type checker each time,
+ # but we expect this to be a vanishingly uncommon case anyway.
+ type_checker = type_checkers.GetTypeChecker(extension_handle)
+ # pylint: disable=protected-access
+ self._extended_message._fields[extension_handle] = (
+ type_checker.CheckValue(value))
+ self._extended_message._Modified()
+
+ def __delitem__(self, extension_handle):
+ self._extended_message.ClearExtension(extension_handle)
+
+ def _FindExtensionByName(self, name):
+ """Tries to find a known extension with the specified name.
+
+ Args:
+ name: Extension full name.
+
+ Returns:
+ Extension field descriptor.
+ """
+ return self._extended_message._extensions_by_name.get(name, None)
+
+ def _FindExtensionByNumber(self, number):
+ """Tries to find a known extension with the field number.
+
+ Args:
+ number: Extension field number.
+
+ Returns:
+ Extension field descriptor.
+ """
+ return self._extended_message._extensions_by_number.get(number, None)
+
+ def __iter__(self):
+ # Return a generator over the populated extension fields
+ return (f[0] for f in self._extended_message.ListFields()
+ if f[0].is_extension)
+
+ def __contains__(self, extension_handle):
+ _VerifyExtensionHandle(self._extended_message, extension_handle)
+
+ if extension_handle not in self._extended_message._fields:
+ return False
+
+ if extension_handle.label == FieldDescriptor.LABEL_REPEATED:
+ return bool(self._extended_message._fields.get(extension_handle))
+
+ if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
+ value = self._extended_message._fields.get(extension_handle)
+ # pylint: disable=protected-access
+ return value is not None and value._is_present_in_parent
+
+ return True
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/message_listener.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/message_listener.py
new file mode 100644
index 0000000000000000000000000000000000000000..0fc255a774563473edb7c0c4315d5a60a98b419a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/message_listener.py
@@ -0,0 +1,78 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Defines a listener interface for observing certain
+state transitions on Message objects.
+
+Also defines a null implementation of this interface.
+"""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+
+class MessageListener(object):
+
+ """Listens for modifications made to a message. Meant to be registered via
+ Message._SetListener().
+
+ Attributes:
+ dirty: If True, then calling Modified() would be a no-op. This can be
+ used to avoid these calls entirely in the common case.
+ """
+
+ def Modified(self):
+ """Called every time the message is modified in such a way that the parent
+ message may need to be updated. This currently means either:
+ (a) The message was modified for the first time, so the parent message
+ should henceforth mark the message as present.
+ (b) The message's cached byte size became dirty -- i.e. the message was
+ modified for the first time after a previous call to ByteSize().
+ Therefore the parent should also mark its byte size as dirty.
+ Note that (a) implies (b), since new objects start out with a client cached
+ size (zero). However, we document (a) explicitly because it is important.
+
+ Modified() will *only* be called in response to one of these two events --
+ not every time the sub-message is modified.
+
+ Note that if the listener's |dirty| attribute is true, then calling
+ Modified at the moment would be a no-op, so it can be skipped. Performance-
+ sensitive callers should check this attribute directly before calling since
+ it will be true most of the time.
+ """
+
+ raise NotImplementedError
+
+
+class NullMessageListener(object):
+
+ """No-op MessageListener implementation."""
+
+ def Modified(self):
+ pass
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/python_message.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/python_message.py
new file mode 100644
index 0000000000000000000000000000000000000000..2921d5cb6ef197bb89f8aa81ab9368e04762a29b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/python_message.py
@@ -0,0 +1,1539 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# This code is meant to work on Python 2.4 and above only.
+#
+# TODO(robinson): Helpers for verbose, common checks like seeing if a
+# descriptor's cpp_type is CPPTYPE_MESSAGE.
+
+"""Contains a metaclass and helper functions used to create
+protocol message classes from Descriptor objects at runtime.
+
+Recall that a metaclass is the "type" of a class.
+(A class is to a metaclass what an instance is to a class.)
+
+In this case, we use the GeneratedProtocolMessageType metaclass
+to inject all the useful functionality into the classes
+output by the protocol compiler at compile-time.
+
+The upshot of all this is that the real implementation
+details for ALL pure-Python protocol buffers are *here in
+this file*.
+"""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+from io import BytesIO
+import struct
+import sys
+import weakref
+
+# We use "as" to avoid name collisions with variables.
+from google.protobuf.internal import api_implementation
+from google.protobuf.internal import containers
+from google.protobuf.internal import decoder
+from google.protobuf.internal import encoder
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf.internal import extension_dict
+from google.protobuf.internal import message_listener as message_listener_mod
+from google.protobuf.internal import type_checkers
+from google.protobuf.internal import well_known_types
+from google.protobuf.internal import wire_format
+from google.protobuf import descriptor as descriptor_mod
+from google.protobuf import message as message_mod
+from google.protobuf import text_format
+
+_FieldDescriptor = descriptor_mod.FieldDescriptor
+_AnyFullTypeName = 'google.protobuf.Any'
+_ExtensionDict = extension_dict._ExtensionDict
+
+class GeneratedProtocolMessageType(type):
+
+ """Metaclass for protocol message classes created at runtime from Descriptors.
+
+ We add implementations for all methods described in the Message class. We
+ also create properties to allow getting/setting all fields in the protocol
+ message. Finally, we create slots to prevent users from accidentally
+ "setting" nonexistent fields in the protocol message, which then wouldn't get
+ serialized / deserialized properly.
+
+ The protocol compiler currently uses this metaclass to create protocol
+ message classes at runtime. Clients can also manually create their own
+ classes at runtime, as in this example:
+
+ mydescriptor = Descriptor(.....)
+ factory = symbol_database.Default()
+ factory.pool.AddDescriptor(mydescriptor)
+ MyProtoClass = factory.GetPrototype(mydescriptor)
+ myproto_instance = MyProtoClass()
+ myproto.foo_field = 23
+ ...
+ """
+
+ # Must be consistent with the protocol-compiler code in
+ # proto2/compiler/internal/generator.*.
+ _DESCRIPTOR_KEY = 'DESCRIPTOR'
+
+ def __new__(cls, name, bases, dictionary):
+ """Custom allocation for runtime-generated class types.
+
+ We override __new__ because this is apparently the only place
+ where we can meaningfully set __slots__ on the class we're creating(?).
+ (The interplay between metaclasses and slots is not very well-documented).
+
+ Args:
+ name: Name of the class (ignored, but required by the
+ metaclass protocol).
+ bases: Base classes of the class we're constructing.
+ (Should be message.Message). We ignore this field, but
+ it's required by the metaclass protocol
+ dictionary: The class dictionary of the class we're
+ constructing. dictionary[_DESCRIPTOR_KEY] must contain
+ a Descriptor object describing this protocol message
+ type.
+
+ Returns:
+ Newly-allocated class.
+
+ Raises:
+ RuntimeError: Generated code only work with python cpp extension.
+ """
+ descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
+
+ if isinstance(descriptor, str):
+ raise RuntimeError('The generated code only work with python cpp '
+ 'extension, but it is using pure python runtime.')
+
+ # If a concrete class already exists for this descriptor, don't try to
+ # create another. Doing so will break any messages that already exist with
+ # the existing class.
+ #
+ # The C++ implementation appears to have its own internal `PyMessageFactory`
+ # to achieve similar results.
+ #
+ # This most commonly happens in `text_format.py` when using descriptors from
+ # a custom pool; it calls symbol_database.Global().getPrototype() on a
+ # descriptor which already has an existing concrete class.
+ new_class = getattr(descriptor, '_concrete_class', None)
+ if new_class:
+ return new_class
+
+ if descriptor.full_name in well_known_types.WKTBASES:
+ bases += (well_known_types.WKTBASES[descriptor.full_name],)
+ _AddClassAttributesForNestedExtensions(descriptor, dictionary)
+ _AddSlots(descriptor, dictionary)
+
+ superclass = super(GeneratedProtocolMessageType, cls)
+ new_class = superclass.__new__(cls, name, bases, dictionary)
+ return new_class
+
+ def __init__(cls, name, bases, dictionary):
+ """Here we perform the majority of our work on the class.
+ We add enum getters, an __init__ method, implementations
+ of all Message methods, and properties for all fields
+ in the protocol type.
+
+ Args:
+ name: Name of the class (ignored, but required by the
+ metaclass protocol).
+ bases: Base classes of the class we're constructing.
+ (Should be message.Message). We ignore this field, but
+ it's required by the metaclass protocol
+ dictionary: The class dictionary of the class we're
+ constructing. dictionary[_DESCRIPTOR_KEY] must contain
+ a Descriptor object describing this protocol message
+ type.
+ """
+ descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
+
+ # If this is an _existing_ class looked up via `_concrete_class` in the
+ # __new__ method above, then we don't need to re-initialize anything.
+ existing_class = getattr(descriptor, '_concrete_class', None)
+ if existing_class:
+ assert existing_class is cls, (
+ 'Duplicate `GeneratedProtocolMessageType` created for descriptor %r'
+ % (descriptor.full_name))
+ return
+
+ cls._decoders_by_tag = {}
+ if (descriptor.has_options and
+ descriptor.GetOptions().message_set_wire_format):
+ cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = (
+ decoder.MessageSetItemDecoder(descriptor), None)
+
+ # Attach stuff to each FieldDescriptor for quick lookup later on.
+ for field in descriptor.fields:
+ _AttachFieldHelpers(cls, field)
+
+ descriptor._concrete_class = cls # pylint: disable=protected-access
+ _AddEnumValues(descriptor, cls)
+ _AddInitMethod(descriptor, cls)
+ _AddPropertiesForFields(descriptor, cls)
+ _AddPropertiesForExtensions(descriptor, cls)
+ _AddStaticMethods(cls)
+ _AddMessageMethods(descriptor, cls)
+ _AddPrivateHelperMethods(descriptor, cls)
+
+ superclass = super(GeneratedProtocolMessageType, cls)
+ superclass.__init__(name, bases, dictionary)
+
+
+# Stateless helpers for GeneratedProtocolMessageType below.
+# Outside clients should not access these directly.
+#
+# I opted not to make any of these methods on the metaclass, to make it more
+# clear that I'm not really using any state there and to keep clients from
+# thinking that they have direct access to these construction helpers.
+
+
+def _PropertyName(proto_field_name):
+ """Returns the name of the public property attribute which
+ clients can use to get and (in some cases) set the value
+ of a protocol message field.
+
+ Args:
+ proto_field_name: The protocol message field name, exactly
+ as it appears (or would appear) in a .proto file.
+ """
+ # TODO(robinson): Escape Python keywords (e.g., yield), and test this support.
+ # nnorwitz makes my day by writing:
+ # """
+ # FYI. See the keyword module in the stdlib. This could be as simple as:
+ #
+ # if keyword.iskeyword(proto_field_name):
+ # return proto_field_name + "_"
+ # return proto_field_name
+ # """
+ # Kenton says: The above is a BAD IDEA. People rely on being able to use
+ # getattr() and setattr() to reflectively manipulate field values. If we
+ # rename the properties, then every such user has to also make sure to apply
+ # the same transformation. Note that currently if you name a field "yield",
+ # you can still access it just fine using getattr/setattr -- it's not even
+ # that cumbersome to do so.
+ # TODO(kenton): Remove this method entirely if/when everyone agrees with my
+ # position.
+ return proto_field_name
+
+
+def _AddSlots(message_descriptor, dictionary):
+ """Adds a __slots__ entry to dictionary, containing the names of all valid
+ attributes for this message type.
+
+ Args:
+ message_descriptor: A Descriptor instance describing this message type.
+ dictionary: Class dictionary to which we'll add a '__slots__' entry.
+ """
+ dictionary['__slots__'] = ['_cached_byte_size',
+ '_cached_byte_size_dirty',
+ '_fields',
+ '_unknown_fields',
+ '_unknown_field_set',
+ '_is_present_in_parent',
+ '_listener',
+ '_listener_for_children',
+ '__weakref__',
+ '_oneofs']
+
+
+def _IsMessageSetExtension(field):
+ return (field.is_extension and
+ field.containing_type.has_options and
+ field.containing_type.GetOptions().message_set_wire_format and
+ field.type == _FieldDescriptor.TYPE_MESSAGE and
+ field.label == _FieldDescriptor.LABEL_OPTIONAL)
+
+
+def _IsMapField(field):
+ return (field.type == _FieldDescriptor.TYPE_MESSAGE and
+ field.message_type.has_options and
+ field.message_type.GetOptions().map_entry)
+
+
+def _IsMessageMapField(field):
+ value_type = field.message_type.fields_by_name['value']
+ return value_type.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE
+
+
+def _AttachFieldHelpers(cls, field_descriptor):
+ is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED)
+ is_packable = (is_repeated and
+ wire_format.IsTypePackable(field_descriptor.type))
+ is_proto3 = field_descriptor.containing_type.syntax == 'proto3'
+ if not is_packable:
+ is_packed = False
+ elif field_descriptor.containing_type.syntax == 'proto2':
+ is_packed = (field_descriptor.has_options and
+ field_descriptor.GetOptions().packed)
+ else:
+ has_packed_false = (field_descriptor.has_options and
+ field_descriptor.GetOptions().HasField('packed') and
+ field_descriptor.GetOptions().packed == False)
+ is_packed = not has_packed_false
+ is_map_entry = _IsMapField(field_descriptor)
+
+ if is_map_entry:
+ field_encoder = encoder.MapEncoder(field_descriptor)
+ sizer = encoder.MapSizer(field_descriptor,
+ _IsMessageMapField(field_descriptor))
+ elif _IsMessageSetExtension(field_descriptor):
+ field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number)
+ sizer = encoder.MessageSetItemSizer(field_descriptor.number)
+ else:
+ field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type](
+ field_descriptor.number, is_repeated, is_packed)
+ sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type](
+ field_descriptor.number, is_repeated, is_packed)
+
+ field_descriptor._encoder = field_encoder
+ field_descriptor._sizer = sizer
+ field_descriptor._default_constructor = _DefaultValueConstructorForField(
+ field_descriptor)
+
+ def AddDecoder(wiretype, is_packed):
+ tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype)
+ decode_type = field_descriptor.type
+ if (decode_type == _FieldDescriptor.TYPE_ENUM and
+ type_checkers.SupportsOpenEnums(field_descriptor)):
+ decode_type = _FieldDescriptor.TYPE_INT32
+
+ oneof_descriptor = None
+ clear_if_default = False
+ if field_descriptor.containing_oneof is not None:
+ oneof_descriptor = field_descriptor
+ elif (is_proto3 and not is_repeated and
+ field_descriptor.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE):
+ clear_if_default = True
+
+ if is_map_entry:
+ is_message_map = _IsMessageMapField(field_descriptor)
+
+ field_decoder = decoder.MapDecoder(
+ field_descriptor, _GetInitializeDefaultForMap(field_descriptor),
+ is_message_map)
+ elif decode_type == _FieldDescriptor.TYPE_STRING:
+ field_decoder = decoder.StringDecoder(
+ field_descriptor.number, is_repeated, is_packed,
+ field_descriptor, field_descriptor._default_constructor,
+ clear_if_default)
+ elif field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ field_decoder = type_checkers.TYPE_TO_DECODER[decode_type](
+ field_descriptor.number, is_repeated, is_packed,
+ field_descriptor, field_descriptor._default_constructor)
+ else:
+ field_decoder = type_checkers.TYPE_TO_DECODER[decode_type](
+ field_descriptor.number, is_repeated, is_packed,
+ # pylint: disable=protected-access
+ field_descriptor, field_descriptor._default_constructor,
+ clear_if_default)
+
+ cls._decoders_by_tag[tag_bytes] = (field_decoder, oneof_descriptor)
+
+ AddDecoder(type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type],
+ False)
+
+ if is_repeated and wire_format.IsTypePackable(field_descriptor.type):
+ # To support wire compatibility of adding packed = true, add a decoder for
+ # packed values regardless of the field's options.
+ AddDecoder(wire_format.WIRETYPE_LENGTH_DELIMITED, True)
+
+
+def _AddClassAttributesForNestedExtensions(descriptor, dictionary):
+ extensions = descriptor.extensions_by_name
+ for extension_name, extension_field in extensions.items():
+ assert extension_name not in dictionary
+ dictionary[extension_name] = extension_field
+
+
+def _AddEnumValues(descriptor, cls):
+ """Sets class-level attributes for all enum fields defined in this message.
+
+ Also exporting a class-level object that can name enum values.
+
+ Args:
+ descriptor: Descriptor object for this message type.
+ cls: Class we're constructing for this message type.
+ """
+ for enum_type in descriptor.enum_types:
+ setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type))
+ for enum_value in enum_type.values:
+ setattr(cls, enum_value.name, enum_value.number)
+
+
+def _GetInitializeDefaultForMap(field):
+ if field.label != _FieldDescriptor.LABEL_REPEATED:
+ raise ValueError('map_entry set on non-repeated field %s' % (
+ field.name))
+ fields_by_name = field.message_type.fields_by_name
+ key_checker = type_checkers.GetTypeChecker(fields_by_name['key'])
+
+ value_field = fields_by_name['value']
+ if _IsMessageMapField(field):
+ def MakeMessageMapDefault(message):
+ return containers.MessageMap(
+ message._listener_for_children, value_field.message_type, key_checker,
+ field.message_type)
+ return MakeMessageMapDefault
+ else:
+ value_checker = type_checkers.GetTypeChecker(value_field)
+ def MakePrimitiveMapDefault(message):
+ return containers.ScalarMap(
+ message._listener_for_children, key_checker, value_checker,
+ field.message_type)
+ return MakePrimitiveMapDefault
+
+def _DefaultValueConstructorForField(field):
+ """Returns a function which returns a default value for a field.
+
+ Args:
+ field: FieldDescriptor object for this field.
+
+ The returned function has one argument:
+ message: Message instance containing this field, or a weakref proxy
+ of same.
+
+ That function in turn returns a default value for this field. The default
+ value may refer back to |message| via a weak reference.
+ """
+
+ if _IsMapField(field):
+ return _GetInitializeDefaultForMap(field)
+
+ if field.label == _FieldDescriptor.LABEL_REPEATED:
+ if field.has_default_value and field.default_value != []:
+ raise ValueError('Repeated field default value not empty list: %s' % (
+ field.default_value))
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ # We can't look at _concrete_class yet since it might not have
+ # been set. (Depends on order in which we initialize the classes).
+ message_type = field.message_type
+ def MakeRepeatedMessageDefault(message):
+ return containers.RepeatedCompositeFieldContainer(
+ message._listener_for_children, field.message_type)
+ return MakeRepeatedMessageDefault
+ else:
+ type_checker = type_checkers.GetTypeChecker(field)
+ def MakeRepeatedScalarDefault(message):
+ return containers.RepeatedScalarFieldContainer(
+ message._listener_for_children, type_checker)
+ return MakeRepeatedScalarDefault
+
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ # _concrete_class may not yet be initialized.
+ message_type = field.message_type
+ def MakeSubMessageDefault(message):
+ assert getattr(message_type, '_concrete_class', None), (
+ 'Uninitialized concrete class found for field %r (message type %r)'
+ % (field.full_name, message_type.full_name))
+ result = message_type._concrete_class()
+ result._SetListener(
+ _OneofListener(message, field)
+ if field.containing_oneof is not None
+ else message._listener_for_children)
+ return result
+ return MakeSubMessageDefault
+
+ def MakeScalarDefault(message):
+ # TODO(protobuf-team): This may be broken since there may not be
+ # default_value. Combine with has_default_value somehow.
+ return field.default_value
+ return MakeScalarDefault
+
+
+def _ReraiseTypeErrorWithFieldName(message_name, field_name):
+ """Re-raise the currently-handled TypeError with the field name added."""
+ exc = sys.exc_info()[1]
+ if len(exc.args) == 1 and type(exc) is TypeError:
+ # simple TypeError; add field name to exception message
+ exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name))
+
+ # re-raise possibly-amended exception with original traceback:
+ raise exc.with_traceback(sys.exc_info()[2])
+
+
+def _AddInitMethod(message_descriptor, cls):
+ """Adds an __init__ method to cls."""
+
+ def _GetIntegerEnumValue(enum_type, value):
+ """Convert a string or integer enum value to an integer.
+
+ If the value is a string, it is converted to the enum value in
+ enum_type with the same name. If the value is not a string, it's
+ returned as-is. (No conversion or bounds-checking is done.)
+ """
+ if isinstance(value, str):
+ try:
+ return enum_type.values_by_name[value].number
+ except KeyError:
+ raise ValueError('Enum type %s: unknown label "%s"' % (
+ enum_type.full_name, value))
+ return value
+
+ def init(self, **kwargs):
+ self._cached_byte_size = 0
+ self._cached_byte_size_dirty = len(kwargs) > 0
+ self._fields = {}
+ # Contains a mapping from oneof field descriptors to the descriptor
+ # of the currently set field in that oneof field.
+ self._oneofs = {}
+
+ # _unknown_fields is () when empty for efficiency, and will be turned into
+ # a list if fields are added.
+ self._unknown_fields = ()
+ # _unknown_field_set is None when empty for efficiency, and will be
+ # turned into UnknownFieldSet struct if fields are added.
+ self._unknown_field_set = None # pylint: disable=protected-access
+ self._is_present_in_parent = False
+ self._listener = message_listener_mod.NullMessageListener()
+ self._listener_for_children = _Listener(self)
+ for field_name, field_value in kwargs.items():
+ field = _GetFieldByName(message_descriptor, field_name)
+ if field is None:
+ raise TypeError('%s() got an unexpected keyword argument "%s"' %
+ (message_descriptor.name, field_name))
+ if field_value is None:
+ # field=None is the same as no field at all.
+ continue
+ if field.label == _FieldDescriptor.LABEL_REPEATED:
+ copy = field._default_constructor(self)
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: # Composite
+ if _IsMapField(field):
+ if _IsMessageMapField(field):
+ for key in field_value:
+ copy[key].MergeFrom(field_value[key])
+ else:
+ copy.update(field_value)
+ else:
+ for val in field_value:
+ if isinstance(val, dict):
+ copy.add(**val)
+ else:
+ copy.add().MergeFrom(val)
+ else: # Scalar
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
+ field_value = [_GetIntegerEnumValue(field.enum_type, val)
+ for val in field_value]
+ copy.extend(field_value)
+ self._fields[field] = copy
+ elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ copy = field._default_constructor(self)
+ new_val = field_value
+ if isinstance(field_value, dict):
+ new_val = field.message_type._concrete_class(**field_value)
+ try:
+ copy.MergeFrom(new_val)
+ except TypeError:
+ _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
+ self._fields[field] = copy
+ else:
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
+ field_value = _GetIntegerEnumValue(field.enum_type, field_value)
+ try:
+ setattr(self, field_name, field_value)
+ except TypeError:
+ _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
+
+ init.__module__ = None
+ init.__doc__ = None
+ cls.__init__ = init
+
+
+def _GetFieldByName(message_descriptor, field_name):
+ """Returns a field descriptor by field name.
+
+ Args:
+ message_descriptor: A Descriptor describing all fields in message.
+ field_name: The name of the field to retrieve.
+ Returns:
+ The field descriptor associated with the field name.
+ """
+ try:
+ return message_descriptor.fields_by_name[field_name]
+ except KeyError:
+ raise ValueError('Protocol message %s has no "%s" field.' %
+ (message_descriptor.name, field_name))
+
+
+def _AddPropertiesForFields(descriptor, cls):
+ """Adds properties for all fields in this protocol message type."""
+ for field in descriptor.fields:
+ _AddPropertiesForField(field, cls)
+
+ if descriptor.is_extendable:
+ # _ExtensionDict is just an adaptor with no state so we allocate a new one
+ # every time it is accessed.
+ cls.Extensions = property(lambda self: _ExtensionDict(self))
+
+
+def _AddPropertiesForField(field, cls):
+ """Adds a public property for a protocol message field.
+ Clients can use this property to get and (in the case
+ of non-repeated scalar fields) directly set the value
+ of a protocol message field.
+
+ Args:
+ field: A FieldDescriptor for this field.
+ cls: The class we're constructing.
+ """
+ # Catch it if we add other types that we should
+ # handle specially here.
+ assert _FieldDescriptor.MAX_CPPTYPE == 10
+
+ constant_name = field.name.upper() + '_FIELD_NUMBER'
+ setattr(cls, constant_name, field.number)
+
+ if field.label == _FieldDescriptor.LABEL_REPEATED:
+ _AddPropertiesForRepeatedField(field, cls)
+ elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ _AddPropertiesForNonRepeatedCompositeField(field, cls)
+ else:
+ _AddPropertiesForNonRepeatedScalarField(field, cls)
+
+
+class _FieldProperty(property):
+ __slots__ = ('DESCRIPTOR',)
+
+ def __init__(self, descriptor, getter, setter, doc):
+ property.__init__(self, getter, setter, doc=doc)
+ self.DESCRIPTOR = descriptor
+
+
+def _AddPropertiesForRepeatedField(field, cls):
+ """Adds a public property for a "repeated" protocol message field. Clients
+ can use this property to get the value of the field, which will be either a
+ RepeatedScalarFieldContainer or RepeatedCompositeFieldContainer (see
+ below).
+
+ Note that when clients add values to these containers, we perform
+ type-checking in the case of repeated scalar fields, and we also set any
+ necessary "has" bits as a side-effect.
+
+ Args:
+ field: A FieldDescriptor for this field.
+ cls: The class we're constructing.
+ """
+ proto_field_name = field.name
+ property_name = _PropertyName(proto_field_name)
+
+ def getter(self):
+ field_value = self._fields.get(field)
+ if field_value is None:
+ # Construct a new object to represent this field.
+ field_value = field._default_constructor(self)
+
+ # Atomically check if another thread has preempted us and, if not, swap
+ # in the new object we just created. If someone has preempted us, we
+ # take that object and discard ours.
+ # WARNING: We are relying on setdefault() being atomic. This is true
+ # in CPython but we haven't investigated others. This warning appears
+ # in several other locations in this file.
+ field_value = self._fields.setdefault(field, field_value)
+ return field_value
+ getter.__module__ = None
+ getter.__doc__ = 'Getter for %s.' % proto_field_name
+
+ # We define a setter just so we can throw an exception with a more
+ # helpful error message.
+ def setter(self, new_value):
+ raise AttributeError('Assignment not allowed to repeated field '
+ '"%s" in protocol message object.' % proto_field_name)
+
+ doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
+ setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc))
+
+
+def _AddPropertiesForNonRepeatedScalarField(field, cls):
+ """Adds a public property for a nonrepeated, scalar protocol message field.
+ Clients can use this property to get and directly set the value of the field.
+ Note that when the client sets the value of a field by using this property,
+ all necessary "has" bits are set as a side-effect, and we also perform
+ type-checking.
+
+ Args:
+ field: A FieldDescriptor for this field.
+ cls: The class we're constructing.
+ """
+ proto_field_name = field.name
+ property_name = _PropertyName(proto_field_name)
+ type_checker = type_checkers.GetTypeChecker(field)
+ default_value = field.default_value
+ is_proto3 = field.containing_type.syntax == 'proto3'
+
+ def getter(self):
+ # TODO(protobuf-team): This may be broken since there may not be
+ # default_value. Combine with has_default_value somehow.
+ return self._fields.get(field, default_value)
+ getter.__module__ = None
+ getter.__doc__ = 'Getter for %s.' % proto_field_name
+
+ clear_when_set_to_default = is_proto3 and not field.containing_oneof
+
+ def field_setter(self, new_value):
+ # pylint: disable=protected-access
+ # Testing the value for truthiness captures all of the proto3 defaults
+ # (0, 0.0, enum 0, and False).
+ try:
+ new_value = type_checker.CheckValue(new_value)
+ except TypeError as e:
+ raise TypeError(
+ 'Cannot set %s to %.1024r: %s' % (field.full_name, new_value, e))
+ if clear_when_set_to_default and not new_value:
+ self._fields.pop(field, None)
+ else:
+ self._fields[field] = new_value
+ # Check _cached_byte_size_dirty inline to improve performance, since scalar
+ # setters are called frequently.
+ if not self._cached_byte_size_dirty:
+ self._Modified()
+
+ if field.containing_oneof:
+ def setter(self, new_value):
+ field_setter(self, new_value)
+ self._UpdateOneofState(field)
+ else:
+ setter = field_setter
+
+ setter.__module__ = None
+ setter.__doc__ = 'Setter for %s.' % proto_field_name
+
+ # Add a property to encapsulate the getter/setter.
+ doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
+ setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc))
+
+
+def _AddPropertiesForNonRepeatedCompositeField(field, cls):
+ """Adds a public property for a nonrepeated, composite protocol message field.
+ A composite field is a "group" or "message" field.
+
+ Clients can use this property to get the value of the field, but cannot
+ assign to the property directly.
+
+ Args:
+ field: A FieldDescriptor for this field.
+ cls: The class we're constructing.
+ """
+ # TODO(robinson): Remove duplication with similar method
+ # for non-repeated scalars.
+ proto_field_name = field.name
+ property_name = _PropertyName(proto_field_name)
+
+ def getter(self):
+ field_value = self._fields.get(field)
+ if field_value is None:
+ # Construct a new object to represent this field.
+ field_value = field._default_constructor(self)
+
+ # Atomically check if another thread has preempted us and, if not, swap
+ # in the new object we just created. If someone has preempted us, we
+ # take that object and discard ours.
+ # WARNING: We are relying on setdefault() being atomic. This is true
+ # in CPython but we haven't investigated others. This warning appears
+ # in several other locations in this file.
+ field_value = self._fields.setdefault(field, field_value)
+ return field_value
+ getter.__module__ = None
+ getter.__doc__ = 'Getter for %s.' % proto_field_name
+
+ # We define a setter just so we can throw an exception with a more
+ # helpful error message.
+ def setter(self, new_value):
+ raise AttributeError('Assignment not allowed to composite field '
+ '"%s" in protocol message object.' % proto_field_name)
+
+ # Add a property to encapsulate the getter.
+ doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
+ setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc))
+
+
+def _AddPropertiesForExtensions(descriptor, cls):
+ """Adds properties for all fields in this protocol message type."""
+ extensions = descriptor.extensions_by_name
+ for extension_name, extension_field in extensions.items():
+ constant_name = extension_name.upper() + '_FIELD_NUMBER'
+ setattr(cls, constant_name, extension_field.number)
+
+ # TODO(amauryfa): Migrate all users of these attributes to functions like
+ # pool.FindExtensionByNumber(descriptor).
+ if descriptor.file is not None:
+ # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available.
+ pool = descriptor.file.pool
+ cls._extensions_by_number = pool._extensions_by_number[descriptor]
+ cls._extensions_by_name = pool._extensions_by_name[descriptor]
+
+def _AddStaticMethods(cls):
+ # TODO(robinson): This probably needs to be thread-safe(?)
+ def RegisterExtension(extension_handle):
+ extension_handle.containing_type = cls.DESCRIPTOR
+ # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available.
+ # pylint: disable=protected-access
+ cls.DESCRIPTOR.file.pool._AddExtensionDescriptor(extension_handle)
+ _AttachFieldHelpers(cls, extension_handle)
+ cls.RegisterExtension = staticmethod(RegisterExtension)
+
+ def FromString(s):
+ message = cls()
+ message.MergeFromString(s)
+ return message
+ cls.FromString = staticmethod(FromString)
+
+
+def _IsPresent(item):
+ """Given a (FieldDescriptor, value) tuple from _fields, return true if the
+ value should be included in the list returned by ListFields()."""
+
+ if item[0].label == _FieldDescriptor.LABEL_REPEATED:
+ return bool(item[1])
+ elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ return item[1]._is_present_in_parent
+ else:
+ return True
+
+
+def _AddListFieldsMethod(message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+
+ def ListFields(self):
+ all_fields = [item for item in self._fields.items() if _IsPresent(item)]
+ all_fields.sort(key = lambda item: item[0].number)
+ return all_fields
+
+ cls.ListFields = ListFields
+
+_PROTO3_ERROR_TEMPLATE = \
+ ('Protocol message %s has no non-repeated submessage field "%s" '
+ 'nor marked as optional')
+_PROTO2_ERROR_TEMPLATE = 'Protocol message %s has no non-repeated field "%s"'
+
+def _AddHasFieldMethod(message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+
+ is_proto3 = (message_descriptor.syntax == "proto3")
+ error_msg = _PROTO3_ERROR_TEMPLATE if is_proto3 else _PROTO2_ERROR_TEMPLATE
+
+ hassable_fields = {}
+ for field in message_descriptor.fields:
+ if field.label == _FieldDescriptor.LABEL_REPEATED:
+ continue
+ # For proto3, only submessages and fields inside a oneof have presence.
+ if (is_proto3 and field.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE and
+ not field.containing_oneof):
+ continue
+ hassable_fields[field.name] = field
+
+ # Has methods are supported for oneof descriptors.
+ for oneof in message_descriptor.oneofs:
+ hassable_fields[oneof.name] = oneof
+
+ def HasField(self, field_name):
+ try:
+ field = hassable_fields[field_name]
+ except KeyError:
+ raise ValueError(error_msg % (message_descriptor.full_name, field_name))
+
+ if isinstance(field, descriptor_mod.OneofDescriptor):
+ try:
+ return HasField(self, self._oneofs[field].name)
+ except KeyError:
+ return False
+ else:
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ value = self._fields.get(field)
+ return value is not None and value._is_present_in_parent
+ else:
+ return field in self._fields
+
+ cls.HasField = HasField
+
+
+def _AddClearFieldMethod(message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+ def ClearField(self, field_name):
+ try:
+ field = message_descriptor.fields_by_name[field_name]
+ except KeyError:
+ try:
+ field = message_descriptor.oneofs_by_name[field_name]
+ if field in self._oneofs:
+ field = self._oneofs[field]
+ else:
+ return
+ except KeyError:
+ raise ValueError('Protocol message %s has no "%s" field.' %
+ (message_descriptor.name, field_name))
+
+ if field in self._fields:
+ # To match the C++ implementation, we need to invalidate iterators
+ # for map fields when ClearField() happens.
+ if hasattr(self._fields[field], 'InvalidateIterators'):
+ self._fields[field].InvalidateIterators()
+
+ # Note: If the field is a sub-message, its listener will still point
+ # at us. That's fine, because the worst than can happen is that it
+ # will call _Modified() and invalidate our byte size. Big deal.
+ del self._fields[field]
+
+ if self._oneofs.get(field.containing_oneof, None) is field:
+ del self._oneofs[field.containing_oneof]
+
+ # Always call _Modified() -- even if nothing was changed, this is
+ # a mutating method, and thus calling it should cause the field to become
+ # present in the parent message.
+ self._Modified()
+
+ cls.ClearField = ClearField
+
+
+def _AddClearExtensionMethod(cls):
+ """Helper for _AddMessageMethods()."""
+ def ClearExtension(self, extension_handle):
+ extension_dict._VerifyExtensionHandle(self, extension_handle)
+
+ # Similar to ClearField(), above.
+ if extension_handle in self._fields:
+ del self._fields[extension_handle]
+ self._Modified()
+ cls.ClearExtension = ClearExtension
+
+
+def _AddHasExtensionMethod(cls):
+ """Helper for _AddMessageMethods()."""
+ def HasExtension(self, extension_handle):
+ extension_dict._VerifyExtensionHandle(self, extension_handle)
+ if extension_handle.label == _FieldDescriptor.LABEL_REPEATED:
+ raise KeyError('"%s" is repeated.' % extension_handle.full_name)
+
+ if extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ value = self._fields.get(extension_handle)
+ return value is not None and value._is_present_in_parent
+ else:
+ return extension_handle in self._fields
+ cls.HasExtension = HasExtension
+
+def _InternalUnpackAny(msg):
+ """Unpacks Any message and returns the unpacked message.
+
+ This internal method is different from public Any Unpack method which takes
+ the target message as argument. _InternalUnpackAny method does not have
+ target message type and need to find the message type in descriptor pool.
+
+ Args:
+ msg: An Any message to be unpacked.
+
+ Returns:
+ The unpacked message.
+ """
+ # TODO(amauryfa): Don't use the factory of generated messages.
+ # To make Any work with custom factories, use the message factory of the
+ # parent message.
+ # pylint: disable=g-import-not-at-top
+ from google.protobuf import symbol_database
+ factory = symbol_database.Default()
+
+ type_url = msg.type_url
+
+ if not type_url:
+ return None
+
+ # TODO(haberman): For now we just strip the hostname. Better logic will be
+ # required.
+ type_name = type_url.split('/')[-1]
+ descriptor = factory.pool.FindMessageTypeByName(type_name)
+
+ if descriptor is None:
+ return None
+
+ message_class = factory.GetPrototype(descriptor)
+ message = message_class()
+
+ message.ParseFromString(msg.value)
+ return message
+
+
+def _AddEqualsMethod(message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+ def __eq__(self, other):
+ if (not isinstance(other, message_mod.Message) or
+ other.DESCRIPTOR != self.DESCRIPTOR):
+ return False
+
+ if self is other:
+ return True
+
+ if self.DESCRIPTOR.full_name == _AnyFullTypeName:
+ any_a = _InternalUnpackAny(self)
+ any_b = _InternalUnpackAny(other)
+ if any_a and any_b:
+ return any_a == any_b
+
+ if not self.ListFields() == other.ListFields():
+ return False
+
+ # TODO(jieluo): Fix UnknownFieldSet to consider MessageSet extensions,
+ # then use it for the comparison.
+ unknown_fields = list(self._unknown_fields)
+ unknown_fields.sort()
+ other_unknown_fields = list(other._unknown_fields)
+ other_unknown_fields.sort()
+ return unknown_fields == other_unknown_fields
+
+ cls.__eq__ = __eq__
+
+
+def _AddStrMethod(message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+ def __str__(self):
+ return text_format.MessageToString(self)
+ cls.__str__ = __str__
+
+
+def _AddReprMethod(message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+ def __repr__(self):
+ return text_format.MessageToString(self)
+ cls.__repr__ = __repr__
+
+
+def _AddUnicodeMethod(unused_message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+
+ def __unicode__(self):
+ return text_format.MessageToString(self, as_utf8=True).decode('utf-8')
+ cls.__unicode__ = __unicode__
+
+
+def _BytesForNonRepeatedElement(value, field_number, field_type):
+ """Returns the number of bytes needed to serialize a non-repeated element.
+ The returned byte count includes space for tag information and any
+ other additional space associated with serializing value.
+
+ Args:
+ value: Value we're serializing.
+ field_number: Field number of this value. (Since the field number
+ is stored as part of a varint-encoded tag, this has an impact
+ on the total bytes required to serialize the value).
+ field_type: The type of the field. One of the TYPE_* constants
+ within FieldDescriptor.
+ """
+ try:
+ fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type]
+ return fn(field_number, value)
+ except KeyError:
+ raise message_mod.EncodeError('Unrecognized field type: %d' % field_type)
+
+
+def _AddByteSizeMethod(message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+
+ def ByteSize(self):
+ if not self._cached_byte_size_dirty:
+ return self._cached_byte_size
+
+ size = 0
+ descriptor = self.DESCRIPTOR
+ if descriptor.GetOptions().map_entry:
+ # Fields of map entry should always be serialized.
+ size = descriptor.fields_by_name['key']._sizer(self.key)
+ size += descriptor.fields_by_name['value']._sizer(self.value)
+ else:
+ for field_descriptor, field_value in self.ListFields():
+ size += field_descriptor._sizer(field_value)
+ for tag_bytes, value_bytes in self._unknown_fields:
+ size += len(tag_bytes) + len(value_bytes)
+
+ self._cached_byte_size = size
+ self._cached_byte_size_dirty = False
+ self._listener_for_children.dirty = False
+ return size
+
+ cls.ByteSize = ByteSize
+
+
+def _AddSerializeToStringMethod(message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+
+ def SerializeToString(self, **kwargs):
+ # Check if the message has all of its required fields set.
+ if not self.IsInitialized():
+ raise message_mod.EncodeError(
+ 'Message %s is missing required fields: %s' % (
+ self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors())))
+ return self.SerializePartialToString(**kwargs)
+ cls.SerializeToString = SerializeToString
+
+
+def _AddSerializePartialToStringMethod(message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+
+ def SerializePartialToString(self, **kwargs):
+ out = BytesIO()
+ self._InternalSerialize(out.write, **kwargs)
+ return out.getvalue()
+ cls.SerializePartialToString = SerializePartialToString
+
+ def InternalSerialize(self, write_bytes, deterministic=None):
+ if deterministic is None:
+ deterministic = (
+ api_implementation.IsPythonDefaultSerializationDeterministic())
+ else:
+ deterministic = bool(deterministic)
+
+ descriptor = self.DESCRIPTOR
+ if descriptor.GetOptions().map_entry:
+ # Fields of map entry should always be serialized.
+ descriptor.fields_by_name['key']._encoder(
+ write_bytes, self.key, deterministic)
+ descriptor.fields_by_name['value']._encoder(
+ write_bytes, self.value, deterministic)
+ else:
+ for field_descriptor, field_value in self.ListFields():
+ field_descriptor._encoder(write_bytes, field_value, deterministic)
+ for tag_bytes, value_bytes in self._unknown_fields:
+ write_bytes(tag_bytes)
+ write_bytes(value_bytes)
+ cls._InternalSerialize = InternalSerialize
+
+
+def _AddMergeFromStringMethod(message_descriptor, cls):
+ """Helper for _AddMessageMethods()."""
+ def MergeFromString(self, serialized):
+ serialized = memoryview(serialized)
+ length = len(serialized)
+ try:
+ if self._InternalParse(serialized, 0, length) != length:
+ # The only reason _InternalParse would return early is if it
+ # encountered an end-group tag.
+ raise message_mod.DecodeError('Unexpected end-group tag.')
+ except (IndexError, TypeError):
+ # Now ord(buf[p:p+1]) == ord('') gets TypeError.
+ raise message_mod.DecodeError('Truncated message.')
+ except struct.error as e:
+ raise message_mod.DecodeError(e)
+ return length # Return this for legacy reasons.
+ cls.MergeFromString = MergeFromString
+
+ local_ReadTag = decoder.ReadTag
+ local_SkipField = decoder.SkipField
+ decoders_by_tag = cls._decoders_by_tag
+
+ def InternalParse(self, buffer, pos, end):
+ """Create a message from serialized bytes.
+
+ Args:
+ self: Message, instance of the proto message object.
+ buffer: memoryview of the serialized data.
+ pos: int, position to start in the serialized data.
+ end: int, end position of the serialized data.
+
+ Returns:
+ Message object.
+ """
+ # Guard against internal misuse, since this function is called internally
+ # quite extensively, and its easy to accidentally pass bytes.
+ assert isinstance(buffer, memoryview)
+ self._Modified()
+ field_dict = self._fields
+ # pylint: disable=protected-access
+ unknown_field_set = self._unknown_field_set
+ while pos != end:
+ (tag_bytes, new_pos) = local_ReadTag(buffer, pos)
+ field_decoder, field_desc = decoders_by_tag.get(tag_bytes, (None, None))
+ if field_decoder is None:
+ if not self._unknown_fields: # pylint: disable=protected-access
+ self._unknown_fields = [] # pylint: disable=protected-access
+ if unknown_field_set is None:
+ # pylint: disable=protected-access
+ self._unknown_field_set = containers.UnknownFieldSet()
+ # pylint: disable=protected-access
+ unknown_field_set = self._unknown_field_set
+ # pylint: disable=protected-access
+ (tag, _) = decoder._DecodeVarint(tag_bytes, 0)
+ field_number, wire_type = wire_format.UnpackTag(tag)
+ if field_number == 0:
+ raise message_mod.DecodeError('Field number 0 is illegal.')
+ # TODO(jieluo): remove old_pos.
+ old_pos = new_pos
+ (data, new_pos) = decoder._DecodeUnknownField(
+ buffer, new_pos, wire_type) # pylint: disable=protected-access
+ if new_pos == -1:
+ return pos
+ # pylint: disable=protected-access
+ unknown_field_set._add(field_number, wire_type, data)
+ # TODO(jieluo): remove _unknown_fields.
+ new_pos = local_SkipField(buffer, old_pos, end, tag_bytes)
+ if new_pos == -1:
+ return pos
+ self._unknown_fields.append(
+ (tag_bytes, buffer[old_pos:new_pos].tobytes()))
+ pos = new_pos
+ else:
+ pos = field_decoder(buffer, new_pos, end, self, field_dict)
+ if field_desc:
+ self._UpdateOneofState(field_desc)
+ return pos
+ cls._InternalParse = InternalParse
+
+
+def _AddIsInitializedMethod(message_descriptor, cls):
+ """Adds the IsInitialized and FindInitializationError methods to the
+ protocol message class."""
+
+ required_fields = [field for field in message_descriptor.fields
+ if field.label == _FieldDescriptor.LABEL_REQUIRED]
+
+ def IsInitialized(self, errors=None):
+ """Checks if all required fields of a message are set.
+
+ Args:
+ errors: A list which, if provided, will be populated with the field
+ paths of all missing required fields.
+
+ Returns:
+ True iff the specified message has all required fields set.
+ """
+
+ # Performance is critical so we avoid HasField() and ListFields().
+
+ for field in required_fields:
+ if (field not in self._fields or
+ (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and
+ not self._fields[field]._is_present_in_parent)):
+ if errors is not None:
+ errors.extend(self.FindInitializationErrors())
+ return False
+
+ for field, value in list(self._fields.items()): # dict can change size!
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ if field.label == _FieldDescriptor.LABEL_REPEATED:
+ if (field.message_type.has_options and
+ field.message_type.GetOptions().map_entry):
+ continue
+ for element in value:
+ if not element.IsInitialized():
+ if errors is not None:
+ errors.extend(self.FindInitializationErrors())
+ return False
+ elif value._is_present_in_parent and not value.IsInitialized():
+ if errors is not None:
+ errors.extend(self.FindInitializationErrors())
+ return False
+
+ return True
+
+ cls.IsInitialized = IsInitialized
+
+ def FindInitializationErrors(self):
+ """Finds required fields which are not initialized.
+
+ Returns:
+ A list of strings. Each string is a path to an uninitialized field from
+ the top-level message, e.g. "foo.bar[5].baz".
+ """
+
+ errors = [] # simplify things
+
+ for field in required_fields:
+ if not self.HasField(field.name):
+ errors.append(field.name)
+
+ for field, value in self.ListFields():
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ if field.is_extension:
+ name = '(%s)' % field.full_name
+ else:
+ name = field.name
+
+ if _IsMapField(field):
+ if _IsMessageMapField(field):
+ for key in value:
+ element = value[key]
+ prefix = '%s[%s].' % (name, key)
+ sub_errors = element.FindInitializationErrors()
+ errors += [prefix + error for error in sub_errors]
+ else:
+ # ScalarMaps can't have any initialization errors.
+ pass
+ elif field.label == _FieldDescriptor.LABEL_REPEATED:
+ for i in range(len(value)):
+ element = value[i]
+ prefix = '%s[%d].' % (name, i)
+ sub_errors = element.FindInitializationErrors()
+ errors += [prefix + error for error in sub_errors]
+ else:
+ prefix = name + '.'
+ sub_errors = value.FindInitializationErrors()
+ errors += [prefix + error for error in sub_errors]
+
+ return errors
+
+ cls.FindInitializationErrors = FindInitializationErrors
+
+
+def _FullyQualifiedClassName(klass):
+ module = klass.__module__
+ name = getattr(klass, '__qualname__', klass.__name__)
+ if module in (None, 'builtins', '__builtin__'):
+ return name
+ return module + '.' + name
+
+
+def _AddMergeFromMethod(cls):
+ LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED
+ CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE
+
+ def MergeFrom(self, msg):
+ if not isinstance(msg, cls):
+ raise TypeError(
+ 'Parameter to MergeFrom() must be instance of same class: '
+ 'expected %s got %s.' % (_FullyQualifiedClassName(cls),
+ _FullyQualifiedClassName(msg.__class__)))
+
+ assert msg is not self
+ self._Modified()
+
+ fields = self._fields
+
+ for field, value in msg._fields.items():
+ if field.label == LABEL_REPEATED:
+ field_value = fields.get(field)
+ if field_value is None:
+ # Construct a new object to represent this field.
+ field_value = field._default_constructor(self)
+ fields[field] = field_value
+ field_value.MergeFrom(value)
+ elif field.cpp_type == CPPTYPE_MESSAGE:
+ if value._is_present_in_parent:
+ field_value = fields.get(field)
+ if field_value is None:
+ # Construct a new object to represent this field.
+ field_value = field._default_constructor(self)
+ fields[field] = field_value
+ field_value.MergeFrom(value)
+ else:
+ self._fields[field] = value
+ if field.containing_oneof:
+ self._UpdateOneofState(field)
+
+ if msg._unknown_fields:
+ if not self._unknown_fields:
+ self._unknown_fields = []
+ self._unknown_fields.extend(msg._unknown_fields)
+ # pylint: disable=protected-access
+ if self._unknown_field_set is None:
+ self._unknown_field_set = containers.UnknownFieldSet()
+ self._unknown_field_set._extend(msg._unknown_field_set)
+
+ cls.MergeFrom = MergeFrom
+
+
+def _AddWhichOneofMethod(message_descriptor, cls):
+ def WhichOneof(self, oneof_name):
+ """Returns the name of the currently set field inside a oneof, or None."""
+ try:
+ field = message_descriptor.oneofs_by_name[oneof_name]
+ except KeyError:
+ raise ValueError(
+ 'Protocol message has no oneof "%s" field.' % oneof_name)
+
+ nested_field = self._oneofs.get(field, None)
+ if nested_field is not None and self.HasField(nested_field.name):
+ return nested_field.name
+ else:
+ return None
+
+ cls.WhichOneof = WhichOneof
+
+
+def _Clear(self):
+ # Clear fields.
+ self._fields = {}
+ self._unknown_fields = ()
+ # pylint: disable=protected-access
+ if self._unknown_field_set is not None:
+ self._unknown_field_set._clear()
+ self._unknown_field_set = None
+
+ self._oneofs = {}
+ self._Modified()
+
+
+def _UnknownFields(self):
+ if self._unknown_field_set is None: # pylint: disable=protected-access
+ # pylint: disable=protected-access
+ self._unknown_field_set = containers.UnknownFieldSet()
+ return self._unknown_field_set # pylint: disable=protected-access
+
+
+def _DiscardUnknownFields(self):
+ self._unknown_fields = []
+ self._unknown_field_set = None # pylint: disable=protected-access
+ for field, value in self.ListFields():
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ if _IsMapField(field):
+ if _IsMessageMapField(field):
+ for key in value:
+ value[key].DiscardUnknownFields()
+ elif field.label == _FieldDescriptor.LABEL_REPEATED:
+ for sub_message in value:
+ sub_message.DiscardUnknownFields()
+ else:
+ value.DiscardUnknownFields()
+
+
+def _SetListener(self, listener):
+ if listener is None:
+ self._listener = message_listener_mod.NullMessageListener()
+ else:
+ self._listener = listener
+
+
+def _AddMessageMethods(message_descriptor, cls):
+ """Adds implementations of all Message methods to cls."""
+ _AddListFieldsMethod(message_descriptor, cls)
+ _AddHasFieldMethod(message_descriptor, cls)
+ _AddClearFieldMethod(message_descriptor, cls)
+ if message_descriptor.is_extendable:
+ _AddClearExtensionMethod(cls)
+ _AddHasExtensionMethod(cls)
+ _AddEqualsMethod(message_descriptor, cls)
+ _AddStrMethod(message_descriptor, cls)
+ _AddReprMethod(message_descriptor, cls)
+ _AddUnicodeMethod(message_descriptor, cls)
+ _AddByteSizeMethod(message_descriptor, cls)
+ _AddSerializeToStringMethod(message_descriptor, cls)
+ _AddSerializePartialToStringMethod(message_descriptor, cls)
+ _AddMergeFromStringMethod(message_descriptor, cls)
+ _AddIsInitializedMethod(message_descriptor, cls)
+ _AddMergeFromMethod(cls)
+ _AddWhichOneofMethod(message_descriptor, cls)
+ # Adds methods which do not depend on cls.
+ cls.Clear = _Clear
+ cls.UnknownFields = _UnknownFields
+ cls.DiscardUnknownFields = _DiscardUnknownFields
+ cls._SetListener = _SetListener
+
+
+def _AddPrivateHelperMethods(message_descriptor, cls):
+ """Adds implementation of private helper methods to cls."""
+
+ def Modified(self):
+ """Sets the _cached_byte_size_dirty bit to true,
+ and propagates this to our listener iff this was a state change.
+ """
+
+ # Note: Some callers check _cached_byte_size_dirty before calling
+ # _Modified() as an extra optimization. So, if this method is ever
+ # changed such that it does stuff even when _cached_byte_size_dirty is
+ # already true, the callers need to be updated.
+ if not self._cached_byte_size_dirty:
+ self._cached_byte_size_dirty = True
+ self._listener_for_children.dirty = True
+ self._is_present_in_parent = True
+ self._listener.Modified()
+
+ def _UpdateOneofState(self, field):
+ """Sets field as the active field in its containing oneof.
+
+ Will also delete currently active field in the oneof, if it is different
+ from the argument. Does not mark the message as modified.
+ """
+ other_field = self._oneofs.setdefault(field.containing_oneof, field)
+ if other_field is not field:
+ del self._fields[other_field]
+ self._oneofs[field.containing_oneof] = field
+
+ cls._Modified = Modified
+ cls.SetInParent = Modified
+ cls._UpdateOneofState = _UpdateOneofState
+
+
+class _Listener(object):
+
+ """MessageListener implementation that a parent message registers with its
+ child message.
+
+ In order to support semantics like:
+
+ foo.bar.baz.qux = 23
+ assert foo.HasField('bar')
+
+ ...child objects must have back references to their parents.
+ This helper class is at the heart of this support.
+ """
+
+ def __init__(self, parent_message):
+ """Args:
+ parent_message: The message whose _Modified() method we should call when
+ we receive Modified() messages.
+ """
+ # This listener establishes a back reference from a child (contained) object
+ # to its parent (containing) object. We make this a weak reference to avoid
+ # creating cyclic garbage when the client finishes with the 'parent' object
+ # in the tree.
+ if isinstance(parent_message, weakref.ProxyType):
+ self._parent_message_weakref = parent_message
+ else:
+ self._parent_message_weakref = weakref.proxy(parent_message)
+
+ # As an optimization, we also indicate directly on the listener whether
+ # or not the parent message is dirty. This way we can avoid traversing
+ # up the tree in the common case.
+ self.dirty = False
+
+ def Modified(self):
+ if self.dirty:
+ return
+ try:
+ # Propagate the signal to our parents iff this is the first field set.
+ self._parent_message_weakref._Modified()
+ except ReferenceError:
+ # We can get here if a client has kept a reference to a child object,
+ # and is now setting a field on it, but the child's parent has been
+ # garbage-collected. This is not an error.
+ pass
+
+
+class _OneofListener(_Listener):
+ """Special listener implementation for setting composite oneof fields."""
+
+ def __init__(self, parent_message, field):
+ """Args:
+ parent_message: The message whose _Modified() method we should call when
+ we receive Modified() messages.
+ field: The descriptor of the field being set in the parent message.
+ """
+ super(_OneofListener, self).__init__(parent_message)
+ self._field = field
+
+ def Modified(self):
+ """Also updates the state of the containing oneof in the parent message."""
+ try:
+ self._parent_message_weakref._UpdateOneofState(self._field)
+ super(_OneofListener, self).Modified()
+ except ReferenceError:
+ pass
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/type_checkers.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/type_checkers.py
new file mode 100644
index 0000000000000000000000000000000000000000..9b9b859e1ef265b829884dfc8e6b26043956cd45
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/type_checkers.py
@@ -0,0 +1,410 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Provides type checking routines.
+
+This module defines type checking utilities in the forms of dictionaries:
+
+VALUE_CHECKERS: A dictionary of field types and a value validation object.
+TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing
+ function.
+TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization
+ function.
+FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their
+ corresponding wire types.
+TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization
+ function.
+"""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+import ctypes
+import numbers
+
+from google.protobuf.internal import api_implementation
+from google.protobuf.internal import decoder
+from google.protobuf.internal import encoder
+from google.protobuf.internal import wire_format
+from google.protobuf import descriptor
+
+_FieldDescriptor = descriptor.FieldDescriptor
+
+
+def TruncateToFourByteFloat(original):
+ return ctypes.c_float(original).value
+
+
+def ToShortestFloat(original):
+ """Returns the shortest float that has same value in wire."""
+ # All 4 byte floats have between 6 and 9 significant digits, so we
+ # start with 6 as the lower bound.
+ # It has to be iterative because use '.9g' directly can not get rid
+ # of the noises for most values. For example if set a float_field=0.9
+ # use '.9g' will print 0.899999976.
+ precision = 6
+ rounded = float('{0:.{1}g}'.format(original, precision))
+ while TruncateToFourByteFloat(rounded) != original:
+ precision += 1
+ rounded = float('{0:.{1}g}'.format(original, precision))
+ return rounded
+
+
+def SupportsOpenEnums(field_descriptor):
+ return field_descriptor.containing_type.syntax == "proto3"
+
+def GetTypeChecker(field):
+ """Returns a type checker for a message field of the specified types.
+
+ Args:
+ field: FieldDescriptor object for this field.
+
+ Returns:
+ An instance of TypeChecker which can be used to verify the types
+ of values assigned to a field of the specified type.
+ """
+ if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and
+ field.type == _FieldDescriptor.TYPE_STRING):
+ return UnicodeValueChecker()
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
+ if SupportsOpenEnums(field):
+ # When open enums are supported, any int32 can be assigned.
+ return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32]
+ else:
+ return EnumValueChecker(field.enum_type)
+ return _VALUE_CHECKERS[field.cpp_type]
+
+
+# None of the typecheckers below make any attempt to guard against people
+# subclassing builtin types and doing weird things. We're not trying to
+# protect against malicious clients here, just people accidentally shooting
+# themselves in the foot in obvious ways.
+
+class TypeChecker(object):
+
+ """Type checker used to catch type errors as early as possible
+ when the client is setting scalar fields in protocol messages.
+ """
+
+ def __init__(self, *acceptable_types):
+ self._acceptable_types = acceptable_types
+
+ def CheckValue(self, proposed_value):
+ """Type check the provided value and return it.
+
+ The returned value might have been normalized to another type.
+ """
+ if not isinstance(proposed_value, self._acceptable_types):
+ message = ('%.1024r has type %s, but expected one of: %s' %
+ (proposed_value, type(proposed_value), self._acceptable_types))
+ raise TypeError(message)
+ # Some field types(float, double and bool) accept other types, must
+ # convert to the correct type in such cases.
+ if self._acceptable_types:
+ if self._acceptable_types[0] in (bool, float):
+ return self._acceptable_types[0](proposed_value)
+ return proposed_value
+
+
+class TypeCheckerWithDefault(TypeChecker):
+
+ def __init__(self, default_value, *acceptable_types):
+ TypeChecker.__init__(self, *acceptable_types)
+ self._default_value = default_value
+
+ def DefaultValue(self):
+ return self._default_value
+
+
+# IntValueChecker and its subclasses perform integer type-checks
+# and bounds-checks.
+class IntValueChecker(object):
+
+ """Checker used for integer fields. Performs type-check and range check."""
+
+ def CheckValue(self, proposed_value):
+ if not isinstance(proposed_value, numbers.Integral):
+ message = ('%.1024r has type %s, but expected one of: %s' %
+ (proposed_value, type(proposed_value), (int,)))
+ raise TypeError(message)
+ if not self._MIN <= int(proposed_value) <= self._MAX:
+ raise ValueError('Value out of range: %d' % proposed_value)
+ # We force all values to int to make alternate implementations where the
+ # distinction is more significant (e.g. the C++ implementation) simpler.
+ proposed_value = int(proposed_value)
+ return proposed_value
+
+ def DefaultValue(self):
+ return 0
+
+
+class EnumValueChecker(object):
+
+ """Checker used for enum fields. Performs type-check and range check."""
+
+ def __init__(self, enum_type):
+ self._enum_type = enum_type
+
+ def CheckValue(self, proposed_value):
+ if not isinstance(proposed_value, numbers.Integral):
+ message = ('%.1024r has type %s, but expected one of: %s' %
+ (proposed_value, type(proposed_value), (int,)))
+ raise TypeError(message)
+ if int(proposed_value) not in self._enum_type.values_by_number:
+ raise ValueError('Unknown enum value: %d' % proposed_value)
+ return proposed_value
+
+ def DefaultValue(self):
+ return self._enum_type.values[0].number
+
+
+class UnicodeValueChecker(object):
+
+ """Checker used for string fields.
+
+ Always returns a unicode value, even if the input is of type str.
+ """
+
+ def CheckValue(self, proposed_value):
+ if not isinstance(proposed_value, (bytes, str)):
+ message = ('%.1024r has type %s, but expected one of: %s' %
+ (proposed_value, type(proposed_value), (bytes, str)))
+ raise TypeError(message)
+
+ # If the value is of type 'bytes' make sure that it is valid UTF-8 data.
+ if isinstance(proposed_value, bytes):
+ try:
+ proposed_value = proposed_value.decode('utf-8')
+ except UnicodeDecodeError:
+ raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 '
+ 'encoding. Non-UTF-8 strings must be converted to '
+ 'unicode objects before being added.' %
+ (proposed_value))
+ else:
+ try:
+ proposed_value.encode('utf8')
+ except UnicodeEncodeError:
+ raise ValueError('%.1024r isn\'t a valid unicode string and '
+ 'can\'t be encoded in UTF-8.'%
+ (proposed_value))
+
+ return proposed_value
+
+ def DefaultValue(self):
+ return u""
+
+
+class Int32ValueChecker(IntValueChecker):
+ # We're sure to use ints instead of longs here since comparison may be more
+ # efficient.
+ _MIN = -2147483648
+ _MAX = 2147483647
+
+
+class Uint32ValueChecker(IntValueChecker):
+ _MIN = 0
+ _MAX = (1 << 32) - 1
+
+
+class Int64ValueChecker(IntValueChecker):
+ _MIN = -(1 << 63)
+ _MAX = (1 << 63) - 1
+
+
+class Uint64ValueChecker(IntValueChecker):
+ _MIN = 0
+ _MAX = (1 << 64) - 1
+
+
+# The max 4 bytes float is about 3.4028234663852886e+38
+_FLOAT_MAX = float.fromhex('0x1.fffffep+127')
+_FLOAT_MIN = -_FLOAT_MAX
+_INF = float('inf')
+_NEG_INF = float('-inf')
+
+
+class FloatValueChecker(object):
+
+ """Checker used for float fields. Performs type-check and range check.
+
+ Values exceeding a 32-bit float will be converted to inf/-inf.
+ """
+
+ def CheckValue(self, proposed_value):
+ """Check and convert proposed_value to float."""
+ if not isinstance(proposed_value, numbers.Real):
+ message = ('%.1024r has type %s, but expected one of: numbers.Real' %
+ (proposed_value, type(proposed_value)))
+ raise TypeError(message)
+ converted_value = float(proposed_value)
+ # This inf rounding matches the C++ proto SafeDoubleToFloat logic.
+ if converted_value > _FLOAT_MAX:
+ return _INF
+ if converted_value < _FLOAT_MIN:
+ return _NEG_INF
+
+ return TruncateToFourByteFloat(converted_value)
+
+ def DefaultValue(self):
+ return 0.0
+
+
+# Type-checkers for all scalar CPPTYPEs.
+_VALUE_CHECKERS = {
+ _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(),
+ _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(),
+ _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(),
+ _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(),
+ _FieldDescriptor.CPPTYPE_DOUBLE: TypeCheckerWithDefault(
+ 0.0, float, numbers.Real),
+ _FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(),
+ _FieldDescriptor.CPPTYPE_BOOL: TypeCheckerWithDefault(
+ False, bool, numbers.Integral),
+ _FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes),
+ }
+
+
+# Map from field type to a function F, such that F(field_num, value)
+# gives the total byte size for a value of the given type. This
+# byte size includes tag information and any other additional space
+# associated with serializing "value".
+TYPE_TO_BYTE_SIZE_FN = {
+ _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize,
+ _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize,
+ _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize,
+ _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize,
+ _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize,
+ _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize,
+ _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize,
+ _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize,
+ _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize,
+ _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize,
+ _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize,
+ _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize,
+ _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize,
+ _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize,
+ _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize,
+ _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize,
+ _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize,
+ _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize
+ }
+
+
+# Maps from field types to encoder constructors.
+TYPE_TO_ENCODER = {
+ _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder,
+ _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder,
+ _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder,
+ _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder,
+ _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder,
+ _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder,
+ _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder,
+ _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder,
+ _FieldDescriptor.TYPE_STRING: encoder.StringEncoder,
+ _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder,
+ _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder,
+ _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder,
+ _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder,
+ _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder,
+ _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder,
+ _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder,
+ _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder,
+ _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder,
+ }
+
+
+# Maps from field types to sizer constructors.
+TYPE_TO_SIZER = {
+ _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer,
+ _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer,
+ _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer,
+ _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer,
+ _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer,
+ _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer,
+ _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer,
+ _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer,
+ _FieldDescriptor.TYPE_STRING: encoder.StringSizer,
+ _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer,
+ _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer,
+ _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer,
+ _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer,
+ _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer,
+ _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer,
+ _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer,
+ _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer,
+ _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer,
+ }
+
+
+# Maps from field type to a decoder constructor.
+TYPE_TO_DECODER = {
+ _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder,
+ _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder,
+ _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder,
+ _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder,
+ _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder,
+ _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder,
+ _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder,
+ _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder,
+ _FieldDescriptor.TYPE_STRING: decoder.StringDecoder,
+ _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder,
+ _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder,
+ _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder,
+ _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder,
+ _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder,
+ _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder,
+ _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder,
+ _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder,
+ _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder,
+ }
+
+# Maps from field type to expected wiretype.
+FIELD_TYPE_TO_WIRE_TYPE = {
+ _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64,
+ _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32,
+ _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT,
+ _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT,
+ _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT,
+ _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64,
+ _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32,
+ _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT,
+ _FieldDescriptor.TYPE_STRING:
+ wire_format.WIRETYPE_LENGTH_DELIMITED,
+ _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP,
+ _FieldDescriptor.TYPE_MESSAGE:
+ wire_format.WIRETYPE_LENGTH_DELIMITED,
+ _FieldDescriptor.TYPE_BYTES:
+ wire_format.WIRETYPE_LENGTH_DELIMITED,
+ _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT,
+ _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT,
+ _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32,
+ _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64,
+ _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT,
+ _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT,
+ }
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/well_known_types.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/well_known_types.py
new file mode 100644
index 0000000000000000000000000000000000000000..30ff12588f4dfdcbfbba90bab552a30295a736a8
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/well_known_types.py
@@ -0,0 +1,859 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Contains well known classes.
+
+This files defines well known classes which need extra maintenance including:
+ - Any
+ - Duration
+ - FieldMask
+ - Struct
+ - Timestamp
+"""
+
+__author__ = 'jieluo@google.com (Jie Luo)'
+
+import calendar
+import collections.abc
+from datetime import datetime
+from datetime import timedelta
+
+from google.protobuf.descriptor import FieldDescriptor
+
+_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S'
+_NANOS_PER_SECOND = 1000000000
+_NANOS_PER_MILLISECOND = 1000000
+_NANOS_PER_MICROSECOND = 1000
+_MILLIS_PER_SECOND = 1000
+_MICROS_PER_SECOND = 1000000
+_SECONDS_PER_DAY = 24 * 3600
+_DURATION_SECONDS_MAX = 315576000000
+
+
+class Any(object):
+ """Class for Any Message type."""
+
+ __slots__ = ()
+
+ def Pack(self, msg, type_url_prefix='type.googleapis.com/',
+ deterministic=None):
+ """Packs the specified message into current Any message."""
+ if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/':
+ self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)
+ else:
+ self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)
+ self.value = msg.SerializeToString(deterministic=deterministic)
+
+ def Unpack(self, msg):
+ """Unpacks the current Any message into specified message."""
+ descriptor = msg.DESCRIPTOR
+ if not self.Is(descriptor):
+ return False
+ msg.ParseFromString(self.value)
+ return True
+
+ def TypeName(self):
+ """Returns the protobuf type name of the inner message."""
+ # Only last part is to be used: b/25630112
+ return self.type_url.split('/')[-1]
+
+ def Is(self, descriptor):
+ """Checks if this Any represents the given protobuf type."""
+ return '/' in self.type_url and self.TypeName() == descriptor.full_name
+
+
+_EPOCH_DATETIME = datetime.utcfromtimestamp(0)
+
+
+class Timestamp(object):
+ """Class for Timestamp message type."""
+
+ __slots__ = ()
+
+ def ToJsonString(self):
+ """Converts Timestamp to RFC 3339 date string format.
+
+ Returns:
+ A string converted from timestamp. The string is always Z-normalized
+ and uses 3, 6 or 9 fractional digits as required to represent the
+ exact time. Example of the return format: '1972-01-01T10:00:20.021Z'
+ """
+ nanos = self.nanos % _NANOS_PER_SECOND
+ total_sec = self.seconds + (self.nanos - nanos) // _NANOS_PER_SECOND
+ seconds = total_sec % _SECONDS_PER_DAY
+ days = (total_sec - seconds) // _SECONDS_PER_DAY
+ dt = datetime(1970, 1, 1) + timedelta(days, seconds)
+
+ result = dt.isoformat()
+ if (nanos % 1e9) == 0:
+ # If there are 0 fractional digits, the fractional
+ # point '.' should be omitted when serializing.
+ return result + 'Z'
+ if (nanos % 1e6) == 0:
+ # Serialize 3 fractional digits.
+ return result + '.%03dZ' % (nanos / 1e6)
+ if (nanos % 1e3) == 0:
+ # Serialize 6 fractional digits.
+ return result + '.%06dZ' % (nanos / 1e3)
+ # Serialize 9 fractional digits.
+ return result + '.%09dZ' % nanos
+
+ def FromJsonString(self, value):
+ """Parse a RFC 3339 date string format to Timestamp.
+
+ Args:
+ value: A date string. Any fractional digits (or none) and any offset are
+ accepted as long as they fit into nano-seconds precision.
+ Example of accepted format: '1972-01-01T10:00:20.021-05:00'
+
+ Raises:
+ ValueError: On parsing problems.
+ """
+ if not isinstance(value, str):
+ raise ValueError('Timestamp JSON value not a string: {!r}'.format(value))
+ timezone_offset = value.find('Z')
+ if timezone_offset == -1:
+ timezone_offset = value.find('+')
+ if timezone_offset == -1:
+ timezone_offset = value.rfind('-')
+ if timezone_offset == -1:
+ raise ValueError(
+ 'Failed to parse timestamp: missing valid timezone offset.')
+ time_value = value[0:timezone_offset]
+ # Parse datetime and nanos.
+ point_position = time_value.find('.')
+ if point_position == -1:
+ second_value = time_value
+ nano_value = ''
+ else:
+ second_value = time_value[:point_position]
+ nano_value = time_value[point_position + 1:]
+ if 't' in second_value:
+ raise ValueError(
+ 'time data \'{0}\' does not match format \'%Y-%m-%dT%H:%M:%S\', '
+ 'lowercase \'t\' is not accepted'.format(second_value))
+ date_object = datetime.strptime(second_value, _TIMESTAMPFOMAT)
+ td = date_object - datetime(1970, 1, 1)
+ seconds = td.seconds + td.days * _SECONDS_PER_DAY
+ if len(nano_value) > 9:
+ raise ValueError(
+ 'Failed to parse Timestamp: nanos {0} more than '
+ '9 fractional digits.'.format(nano_value))
+ if nano_value:
+ nanos = round(float('0.' + nano_value) * 1e9)
+ else:
+ nanos = 0
+ # Parse timezone offsets.
+ if value[timezone_offset] == 'Z':
+ if len(value) != timezone_offset + 1:
+ raise ValueError('Failed to parse timestamp: invalid trailing'
+ ' data {0}.'.format(value))
+ else:
+ timezone = value[timezone_offset:]
+ pos = timezone.find(':')
+ if pos == -1:
+ raise ValueError(
+ 'Invalid timezone offset value: {0}.'.format(timezone))
+ if timezone[0] == '+':
+ seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60
+ else:
+ seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60
+ # Set seconds and nanos
+ self.seconds = int(seconds)
+ self.nanos = int(nanos)
+
+ def GetCurrentTime(self):
+ """Get the current UTC into Timestamp."""
+ self.FromDatetime(datetime.utcnow())
+
+ def ToNanoseconds(self):
+ """Converts Timestamp to nanoseconds since epoch."""
+ return self.seconds * _NANOS_PER_SECOND + self.nanos
+
+ def ToMicroseconds(self):
+ """Converts Timestamp to microseconds since epoch."""
+ return (self.seconds * _MICROS_PER_SECOND +
+ self.nanos // _NANOS_PER_MICROSECOND)
+
+ def ToMilliseconds(self):
+ """Converts Timestamp to milliseconds since epoch."""
+ return (self.seconds * _MILLIS_PER_SECOND +
+ self.nanos // _NANOS_PER_MILLISECOND)
+
+ def ToSeconds(self):
+ """Converts Timestamp to seconds since epoch."""
+ return self.seconds
+
+ def FromNanoseconds(self, nanos):
+ """Converts nanoseconds since epoch to Timestamp."""
+ self.seconds = nanos // _NANOS_PER_SECOND
+ self.nanos = nanos % _NANOS_PER_SECOND
+
+ def FromMicroseconds(self, micros):
+ """Converts microseconds since epoch to Timestamp."""
+ self.seconds = micros // _MICROS_PER_SECOND
+ self.nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND
+
+ def FromMilliseconds(self, millis):
+ """Converts milliseconds since epoch to Timestamp."""
+ self.seconds = millis // _MILLIS_PER_SECOND
+ self.nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND
+
+ def FromSeconds(self, seconds):
+ """Converts seconds since epoch to Timestamp."""
+ self.seconds = seconds
+ self.nanos = 0
+
+ def ToDatetime(self):
+ """Converts Timestamp to datetime."""
+ return _EPOCH_DATETIME + timedelta(
+ seconds=self.seconds, microseconds=_RoundTowardZero(
+ self.nanos, _NANOS_PER_MICROSECOND))
+
+ def FromDatetime(self, dt):
+ """Converts datetime to Timestamp."""
+ # Using this guide: http://wiki.python.org/moin/WorkingWithTime
+ # And this conversion guide: http://docs.python.org/library/time.html
+
+ # Turn the date parameter into a tuple (struct_time) that can then be
+ # manipulated into a long value of seconds. During the conversion from
+ # struct_time to long, the source date in UTC, and so it follows that the
+ # correct transformation is calendar.timegm()
+ self.seconds = calendar.timegm(dt.utctimetuple())
+ self.nanos = dt.microsecond * _NANOS_PER_MICROSECOND
+
+
+class Duration(object):
+ """Class for Duration message type."""
+
+ __slots__ = ()
+
+ def ToJsonString(self):
+ """Converts Duration to string format.
+
+ Returns:
+ A string converted from self. The string format will contains
+ 3, 6, or 9 fractional digits depending on the precision required to
+ represent the exact Duration value. For example: "1s", "1.010s",
+ "1.000000100s", "-3.100s"
+ """
+ _CheckDurationValid(self.seconds, self.nanos)
+ if self.seconds < 0 or self.nanos < 0:
+ result = '-'
+ seconds = - self.seconds + int((0 - self.nanos) // 1e9)
+ nanos = (0 - self.nanos) % 1e9
+ else:
+ result = ''
+ seconds = self.seconds + int(self.nanos // 1e9)
+ nanos = self.nanos % 1e9
+ result += '%d' % seconds
+ if (nanos % 1e9) == 0:
+ # If there are 0 fractional digits, the fractional
+ # point '.' should be omitted when serializing.
+ return result + 's'
+ if (nanos % 1e6) == 0:
+ # Serialize 3 fractional digits.
+ return result + '.%03ds' % (nanos / 1e6)
+ if (nanos % 1e3) == 0:
+ # Serialize 6 fractional digits.
+ return result + '.%06ds' % (nanos / 1e3)
+ # Serialize 9 fractional digits.
+ return result + '.%09ds' % nanos
+
+ def FromJsonString(self, value):
+ """Converts a string to Duration.
+
+ Args:
+ value: A string to be converted. The string must end with 's'. Any
+ fractional digits (or none) are accepted as long as they fit into
+ precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s
+
+ Raises:
+ ValueError: On parsing problems.
+ """
+ if not isinstance(value, str):
+ raise ValueError('Duration JSON value not a string: {!r}'.format(value))
+ if len(value) < 1 or value[-1] != 's':
+ raise ValueError(
+ 'Duration must end with letter "s": {0}.'.format(value))
+ try:
+ pos = value.find('.')
+ if pos == -1:
+ seconds = int(value[:-1])
+ nanos = 0
+ else:
+ seconds = int(value[:pos])
+ if value[0] == '-':
+ nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9))
+ else:
+ nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9))
+ _CheckDurationValid(seconds, nanos)
+ self.seconds = seconds
+ self.nanos = nanos
+ except ValueError as e:
+ raise ValueError(
+ 'Couldn\'t parse duration: {0} : {1}.'.format(value, e))
+
+ def ToNanoseconds(self):
+ """Converts a Duration to nanoseconds."""
+ return self.seconds * _NANOS_PER_SECOND + self.nanos
+
+ def ToMicroseconds(self):
+ """Converts a Duration to microseconds."""
+ micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)
+ return self.seconds * _MICROS_PER_SECOND + micros
+
+ def ToMilliseconds(self):
+ """Converts a Duration to milliseconds."""
+ millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND)
+ return self.seconds * _MILLIS_PER_SECOND + millis
+
+ def ToSeconds(self):
+ """Converts a Duration to seconds."""
+ return self.seconds
+
+ def FromNanoseconds(self, nanos):
+ """Converts nanoseconds to Duration."""
+ self._NormalizeDuration(nanos // _NANOS_PER_SECOND,
+ nanos % _NANOS_PER_SECOND)
+
+ def FromMicroseconds(self, micros):
+ """Converts microseconds to Duration."""
+ self._NormalizeDuration(
+ micros // _MICROS_PER_SECOND,
+ (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND)
+
+ def FromMilliseconds(self, millis):
+ """Converts milliseconds to Duration."""
+ self._NormalizeDuration(
+ millis // _MILLIS_PER_SECOND,
+ (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND)
+
+ def FromSeconds(self, seconds):
+ """Converts seconds to Duration."""
+ self.seconds = seconds
+ self.nanos = 0
+
+ def ToTimedelta(self):
+ """Converts Duration to timedelta."""
+ return timedelta(
+ seconds=self.seconds, microseconds=_RoundTowardZero(
+ self.nanos, _NANOS_PER_MICROSECOND))
+
+ def FromTimedelta(self, td):
+ """Converts timedelta to Duration."""
+ self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY,
+ td.microseconds * _NANOS_PER_MICROSECOND)
+
+ def _NormalizeDuration(self, seconds, nanos):
+ """Set Duration by seconds and nanos."""
+ # Force nanos to be negative if the duration is negative.
+ if seconds < 0 and nanos > 0:
+ seconds += 1
+ nanos -= _NANOS_PER_SECOND
+ self.seconds = seconds
+ self.nanos = nanos
+
+
+def _CheckDurationValid(seconds, nanos):
+ if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX:
+ raise ValueError(
+ 'Duration is not valid: Seconds {0} must be in range '
+ '[-315576000000, 315576000000].'.format(seconds))
+ if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND:
+ raise ValueError(
+ 'Duration is not valid: Nanos {0} must be in range '
+ '[-999999999, 999999999].'.format(nanos))
+ if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0):
+ raise ValueError(
+ 'Duration is not valid: Sign mismatch.')
+
+
+def _RoundTowardZero(value, divider):
+ """Truncates the remainder part after division."""
+ # For some languages, the sign of the remainder is implementation
+ # dependent if any of the operands is negative. Here we enforce
+ # "rounded toward zero" semantics. For example, for (-5) / 2 an
+ # implementation may give -3 as the result with the remainder being
+ # 1. This function ensures we always return -2 (closer to zero).
+ result = value // divider
+ remainder = value % divider
+ if result < 0 and remainder > 0:
+ return result + 1
+ else:
+ return result
+
+
+class FieldMask(object):
+ """Class for FieldMask message type."""
+
+ __slots__ = ()
+
+ def ToJsonString(self):
+ """Converts FieldMask to string according to proto3 JSON spec."""
+ camelcase_paths = []
+ for path in self.paths:
+ camelcase_paths.append(_SnakeCaseToCamelCase(path))
+ return ','.join(camelcase_paths)
+
+ def FromJsonString(self, value):
+ """Converts string to FieldMask according to proto3 JSON spec."""
+ if not isinstance(value, str):
+ raise ValueError('FieldMask JSON value not a string: {!r}'.format(value))
+ self.Clear()
+ if value:
+ for path in value.split(','):
+ self.paths.append(_CamelCaseToSnakeCase(path))
+
+ def IsValidForDescriptor(self, message_descriptor):
+ """Checks whether the FieldMask is valid for Message Descriptor."""
+ for path in self.paths:
+ if not _IsValidPath(message_descriptor, path):
+ return False
+ return True
+
+ def AllFieldsFromDescriptor(self, message_descriptor):
+ """Gets all direct fields of Message Descriptor to FieldMask."""
+ self.Clear()
+ for field in message_descriptor.fields:
+ self.paths.append(field.name)
+
+ def CanonicalFormFromMask(self, mask):
+ """Converts a FieldMask to the canonical form.
+
+ Removes paths that are covered by another path. For example,
+ "foo.bar" is covered by "foo" and will be removed if "foo"
+ is also in the FieldMask. Then sorts all paths in alphabetical order.
+
+ Args:
+ mask: The original FieldMask to be converted.
+ """
+ tree = _FieldMaskTree(mask)
+ tree.ToFieldMask(self)
+
+ def Union(self, mask1, mask2):
+ """Merges mask1 and mask2 into this FieldMask."""
+ _CheckFieldMaskMessage(mask1)
+ _CheckFieldMaskMessage(mask2)
+ tree = _FieldMaskTree(mask1)
+ tree.MergeFromFieldMask(mask2)
+ tree.ToFieldMask(self)
+
+ def Intersect(self, mask1, mask2):
+ """Intersects mask1 and mask2 into this FieldMask."""
+ _CheckFieldMaskMessage(mask1)
+ _CheckFieldMaskMessage(mask2)
+ tree = _FieldMaskTree(mask1)
+ intersection = _FieldMaskTree()
+ for path in mask2.paths:
+ tree.IntersectPath(path, intersection)
+ intersection.ToFieldMask(self)
+
+ def MergeMessage(
+ self, source, destination,
+ replace_message_field=False, replace_repeated_field=False):
+ """Merges fields specified in FieldMask from source to destination.
+
+ Args:
+ source: Source message.
+ destination: The destination message to be merged into.
+ replace_message_field: Replace message field if True. Merge message
+ field if False.
+ replace_repeated_field: Replace repeated field if True. Append
+ elements of repeated field if False.
+ """
+ tree = _FieldMaskTree(self)
+ tree.MergeMessage(
+ source, destination, replace_message_field, replace_repeated_field)
+
+
+def _IsValidPath(message_descriptor, path):
+ """Checks whether the path is valid for Message Descriptor."""
+ parts = path.split('.')
+ last = parts.pop()
+ for name in parts:
+ field = message_descriptor.fields_by_name.get(name)
+ if (field is None or
+ field.label == FieldDescriptor.LABEL_REPEATED or
+ field.type != FieldDescriptor.TYPE_MESSAGE):
+ return False
+ message_descriptor = field.message_type
+ return last in message_descriptor.fields_by_name
+
+
+def _CheckFieldMaskMessage(message):
+ """Raises ValueError if message is not a FieldMask."""
+ message_descriptor = message.DESCRIPTOR
+ if (message_descriptor.name != 'FieldMask' or
+ message_descriptor.file.name != 'google/protobuf/field_mask.proto'):
+ raise ValueError('Message {0} is not a FieldMask.'.format(
+ message_descriptor.full_name))
+
+
+def _SnakeCaseToCamelCase(path_name):
+ """Converts a path name from snake_case to camelCase."""
+ result = []
+ after_underscore = False
+ for c in path_name:
+ if c.isupper():
+ raise ValueError(
+ 'Fail to print FieldMask to Json string: Path name '
+ '{0} must not contain uppercase letters.'.format(path_name))
+ if after_underscore:
+ if c.islower():
+ result.append(c.upper())
+ after_underscore = False
+ else:
+ raise ValueError(
+ 'Fail to print FieldMask to Json string: The '
+ 'character after a "_" must be a lowercase letter '
+ 'in path name {0}.'.format(path_name))
+ elif c == '_':
+ after_underscore = True
+ else:
+ result += c
+
+ if after_underscore:
+ raise ValueError('Fail to print FieldMask to Json string: Trailing "_" '
+ 'in path name {0}.'.format(path_name))
+ return ''.join(result)
+
+
+def _CamelCaseToSnakeCase(path_name):
+ """Converts a field name from camelCase to snake_case."""
+ result = []
+ for c in path_name:
+ if c == '_':
+ raise ValueError('Fail to parse FieldMask: Path name '
+ '{0} must not contain "_"s.'.format(path_name))
+ if c.isupper():
+ result += '_'
+ result += c.lower()
+ else:
+ result += c
+ return ''.join(result)
+
+
+class _FieldMaskTree(object):
+ """Represents a FieldMask in a tree structure.
+
+ For example, given a FieldMask "foo.bar,foo.baz,bar.baz",
+ the FieldMaskTree will be:
+ [_root] -+- foo -+- bar
+ | |
+ | +- baz
+ |
+ +- bar --- baz
+ In the tree, each leaf node represents a field path.
+ """
+
+ __slots__ = ('_root',)
+
+ def __init__(self, field_mask=None):
+ """Initializes the tree by FieldMask."""
+ self._root = {}
+ if field_mask:
+ self.MergeFromFieldMask(field_mask)
+
+ def MergeFromFieldMask(self, field_mask):
+ """Merges a FieldMask to the tree."""
+ for path in field_mask.paths:
+ self.AddPath(path)
+
+ def AddPath(self, path):
+ """Adds a field path into the tree.
+
+ If the field path to add is a sub-path of an existing field path
+ in the tree (i.e., a leaf node), it means the tree already matches
+ the given path so nothing will be added to the tree. If the path
+ matches an existing non-leaf node in the tree, that non-leaf node
+ will be turned into a leaf node with all its children removed because
+ the path matches all the node's children. Otherwise, a new path will
+ be added.
+
+ Args:
+ path: The field path to add.
+ """
+ node = self._root
+ for name in path.split('.'):
+ if name not in node:
+ node[name] = {}
+ elif not node[name]:
+ # Pre-existing empty node implies we already have this entire tree.
+ return
+ node = node[name]
+ # Remove any sub-trees we might have had.
+ node.clear()
+
+ def ToFieldMask(self, field_mask):
+ """Converts the tree to a FieldMask."""
+ field_mask.Clear()
+ _AddFieldPaths(self._root, '', field_mask)
+
+ def IntersectPath(self, path, intersection):
+ """Calculates the intersection part of a field path with this tree.
+
+ Args:
+ path: The field path to calculates.
+ intersection: The out tree to record the intersection part.
+ """
+ node = self._root
+ for name in path.split('.'):
+ if name not in node:
+ return
+ elif not node[name]:
+ intersection.AddPath(path)
+ return
+ node = node[name]
+ intersection.AddLeafNodes(path, node)
+
+ def AddLeafNodes(self, prefix, node):
+ """Adds leaf nodes begin with prefix to this tree."""
+ if not node:
+ self.AddPath(prefix)
+ for name in node:
+ child_path = prefix + '.' + name
+ self.AddLeafNodes(child_path, node[name])
+
+ def MergeMessage(
+ self, source, destination,
+ replace_message, replace_repeated):
+ """Merge all fields specified by this tree from source to destination."""
+ _MergeMessage(
+ self._root, source, destination, replace_message, replace_repeated)
+
+
+def _StrConvert(value):
+ """Converts value to str if it is not."""
+ # This file is imported by c extension and some methods like ClearField
+ # requires string for the field name. py2/py3 has different text
+ # type and may use unicode.
+ if not isinstance(value, str):
+ return value.encode('utf-8')
+ return value
+
+
+def _MergeMessage(
+ node, source, destination, replace_message, replace_repeated):
+ """Merge all fields specified by a sub-tree from source to destination."""
+ source_descriptor = source.DESCRIPTOR
+ for name in node:
+ child = node[name]
+ field = source_descriptor.fields_by_name[name]
+ if field is None:
+ raise ValueError('Error: Can\'t find field {0} in message {1}.'.format(
+ name, source_descriptor.full_name))
+ if child:
+ # Sub-paths are only allowed for singular message fields.
+ if (field.label == FieldDescriptor.LABEL_REPEATED or
+ field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE):
+ raise ValueError('Error: Field {0} in message {1} is not a singular '
+ 'message field and cannot have sub-fields.'.format(
+ name, source_descriptor.full_name))
+ if source.HasField(name):
+ _MergeMessage(
+ child, getattr(source, name), getattr(destination, name),
+ replace_message, replace_repeated)
+ continue
+ if field.label == FieldDescriptor.LABEL_REPEATED:
+ if replace_repeated:
+ destination.ClearField(_StrConvert(name))
+ repeated_source = getattr(source, name)
+ repeated_destination = getattr(destination, name)
+ repeated_destination.MergeFrom(repeated_source)
+ else:
+ if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
+ if replace_message:
+ destination.ClearField(_StrConvert(name))
+ if source.HasField(name):
+ getattr(destination, name).MergeFrom(getattr(source, name))
+ else:
+ setattr(destination, name, getattr(source, name))
+
+
+def _AddFieldPaths(node, prefix, field_mask):
+ """Adds the field paths descended from node to field_mask."""
+ if not node and prefix:
+ field_mask.paths.append(prefix)
+ return
+ for name in sorted(node):
+ if prefix:
+ child_path = prefix + '.' + name
+ else:
+ child_path = name
+ _AddFieldPaths(node[name], child_path, field_mask)
+
+
+def _SetStructValue(struct_value, value):
+ if value is None:
+ struct_value.null_value = 0
+ elif isinstance(value, bool):
+ # Note: this check must come before the number check because in Python
+ # True and False are also considered numbers.
+ struct_value.bool_value = value
+ elif isinstance(value, str):
+ struct_value.string_value = value
+ elif isinstance(value, (int, float)):
+ struct_value.number_value = value
+ elif isinstance(value, (dict, Struct)):
+ struct_value.struct_value.Clear()
+ struct_value.struct_value.update(value)
+ elif isinstance(value, (list, ListValue)):
+ struct_value.list_value.Clear()
+ struct_value.list_value.extend(value)
+ else:
+ raise ValueError('Unexpected type')
+
+
+def _GetStructValue(struct_value):
+ which = struct_value.WhichOneof('kind')
+ if which == 'struct_value':
+ return struct_value.struct_value
+ elif which == 'null_value':
+ return None
+ elif which == 'number_value':
+ return struct_value.number_value
+ elif which == 'string_value':
+ return struct_value.string_value
+ elif which == 'bool_value':
+ return struct_value.bool_value
+ elif which == 'list_value':
+ return struct_value.list_value
+ elif which is None:
+ raise ValueError('Value not set')
+
+
+class Struct(object):
+ """Class for Struct message type."""
+
+ __slots__ = ()
+
+ def __getitem__(self, key):
+ return _GetStructValue(self.fields[key])
+
+ def __contains__(self, item):
+ return item in self.fields
+
+ def __setitem__(self, key, value):
+ _SetStructValue(self.fields[key], value)
+
+ def __delitem__(self, key):
+ del self.fields[key]
+
+ def __len__(self):
+ return len(self.fields)
+
+ def __iter__(self):
+ return iter(self.fields)
+
+ def keys(self): # pylint: disable=invalid-name
+ return self.fields.keys()
+
+ def values(self): # pylint: disable=invalid-name
+ return [self[key] for key in self]
+
+ def items(self): # pylint: disable=invalid-name
+ return [(key, self[key]) for key in self]
+
+ def get_or_create_list(self, key):
+ """Returns a list for this key, creating if it didn't exist already."""
+ if not self.fields[key].HasField('list_value'):
+ # Clear will mark list_value modified which will indeed create a list.
+ self.fields[key].list_value.Clear()
+ return self.fields[key].list_value
+
+ def get_or_create_struct(self, key):
+ """Returns a struct for this key, creating if it didn't exist already."""
+ if not self.fields[key].HasField('struct_value'):
+ # Clear will mark struct_value modified which will indeed create a struct.
+ self.fields[key].struct_value.Clear()
+ return self.fields[key].struct_value
+
+ def update(self, dictionary): # pylint: disable=invalid-name
+ for key, value in dictionary.items():
+ _SetStructValue(self.fields[key], value)
+
+collections.abc.MutableMapping.register(Struct)
+
+
+class ListValue(object):
+ """Class for ListValue message type."""
+
+ __slots__ = ()
+
+ def __len__(self):
+ return len(self.values)
+
+ def append(self, value):
+ _SetStructValue(self.values.add(), value)
+
+ def extend(self, elem_seq):
+ for value in elem_seq:
+ self.append(value)
+
+ def __getitem__(self, index):
+ """Retrieves item by the specified index."""
+ return _GetStructValue(self.values.__getitem__(index))
+
+ def __setitem__(self, index, value):
+ _SetStructValue(self.values.__getitem__(index), value)
+
+ def __delitem__(self, key):
+ del self.values[key]
+
+ def items(self):
+ for i in range(len(self)):
+ yield self[i]
+
+ def add_struct(self):
+ """Appends and returns a struct value as the next value in the list."""
+ struct_value = self.values.add().struct_value
+ # Clear will mark struct_value modified which will indeed create a struct.
+ struct_value.Clear()
+ return struct_value
+
+ def add_list(self):
+ """Appends and returns a list value as the next value in the list."""
+ list_value = self.values.add().list_value
+ # Clear will mark list_value modified which will indeed create a list.
+ list_value.Clear()
+ return list_value
+
+collections.abc.MutableSequence.register(ListValue)
+
+
+WKTBASES = {
+ 'google.protobuf.Any': Any,
+ 'google.protobuf.Duration': Duration,
+ 'google.protobuf.FieldMask': FieldMask,
+ 'google.protobuf.ListValue': ListValue,
+ 'google.protobuf.Struct': Struct,
+ 'google.protobuf.Timestamp': Timestamp,
+}
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/wire_format.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/wire_format.py
new file mode 100644
index 0000000000000000000000000000000000000000..883f525585139493438c3c8922bbb82cf1b0084e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/internal/wire_format.py
@@ -0,0 +1,268 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Constants and static functions to support protocol buffer wire format."""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+import struct
+from google.protobuf import descriptor
+from google.protobuf import message
+
+
+TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag.
+TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7
+
+# These numbers identify the wire type of a protocol buffer value.
+# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded
+# tag-and-type to store one of these WIRETYPE_* constants.
+# These values must match WireType enum in google/protobuf/wire_format.h.
+WIRETYPE_VARINT = 0
+WIRETYPE_FIXED64 = 1
+WIRETYPE_LENGTH_DELIMITED = 2
+WIRETYPE_START_GROUP = 3
+WIRETYPE_END_GROUP = 4
+WIRETYPE_FIXED32 = 5
+_WIRETYPE_MAX = 5
+
+
+# Bounds for various integer types.
+INT32_MAX = int((1 << 31) - 1)
+INT32_MIN = int(-(1 << 31))
+UINT32_MAX = (1 << 32) - 1
+
+INT64_MAX = (1 << 63) - 1
+INT64_MIN = -(1 << 63)
+UINT64_MAX = (1 << 64) - 1
+
+# "struct" format strings that will encode/decode the specified formats.
+FORMAT_UINT32_LITTLE_ENDIAN = '<I'
+FORMAT_UINT64_LITTLE_ENDIAN = '<Q'
+FORMAT_FLOAT_LITTLE_ENDIAN = '<f'
+FORMAT_DOUBLE_LITTLE_ENDIAN = '<d'
+
+
+# We'll have to provide alternate implementations of AppendLittleEndian*() on
+# any architectures where these checks fail.
+if struct.calcsize(FORMAT_UINT32_LITTLE_ENDIAN) != 4:
+ raise AssertionError('Format "I" is not a 32-bit number.')
+if struct.calcsize(FORMAT_UINT64_LITTLE_ENDIAN) != 8:
+ raise AssertionError('Format "Q" is not a 64-bit number.')
+
+
+def PackTag(field_number, wire_type):
+ """Returns an unsigned 32-bit integer that encodes the field number and
+ wire type information in standard protocol message wire format.
+
+ Args:
+ field_number: Expected to be an integer in the range [1, 1 << 29)
+ wire_type: One of the WIRETYPE_* constants.
+ """
+ if not 0 <= wire_type <= _WIRETYPE_MAX:
+ raise message.EncodeError('Unknown wire type: %d' % wire_type)
+ return (field_number << TAG_TYPE_BITS) | wire_type
+
+
+def UnpackTag(tag):
+ """The inverse of PackTag(). Given an unsigned 32-bit number,
+ returns a (field_number, wire_type) tuple.
+ """
+ return (tag >> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK)
+
+
+def ZigZagEncode(value):
+ """ZigZag Transform: Encodes signed integers so that they can be
+ effectively used with varint encoding. See wire_format.h for
+ more details.
+ """
+ if value >= 0:
+ return value << 1
+ return (value << 1) ^ (~0)
+
+
+def ZigZagDecode(value):
+ """Inverse of ZigZagEncode()."""
+ if not value & 0x1:
+ return value >> 1
+ return (value >> 1) ^ (~0)
+
+
+
+# The *ByteSize() functions below return the number of bytes required to
+# serialize "field number + type" information and then serialize the value.
+
+
+def Int32ByteSize(field_number, int32):
+ return Int64ByteSize(field_number, int32)
+
+
+def Int32ByteSizeNoTag(int32):
+ return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32)
+
+
+def Int64ByteSize(field_number, int64):
+ # Have to convert to uint before calling UInt64ByteSize().
+ return UInt64ByteSize(field_number, 0xffffffffffffffff & int64)
+
+
+def UInt32ByteSize(field_number, uint32):
+ return UInt64ByteSize(field_number, uint32)
+
+
+def UInt64ByteSize(field_number, uint64):
+ return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64)
+
+
+def SInt32ByteSize(field_number, int32):
+ return UInt32ByteSize(field_number, ZigZagEncode(int32))
+
+
+def SInt64ByteSize(field_number, int64):
+ return UInt64ByteSize(field_number, ZigZagEncode(int64))
+
+
+def Fixed32ByteSize(field_number, fixed32):
+ return TagByteSize(field_number) + 4
+
+
+def Fixed64ByteSize(field_number, fixed64):
+ return TagByteSize(field_number) + 8
+
+
+def SFixed32ByteSize(field_number, sfixed32):
+ return TagByteSize(field_number) + 4
+
+
+def SFixed64ByteSize(field_number, sfixed64):
+ return TagByteSize(field_number) + 8
+
+
+def FloatByteSize(field_number, flt):
+ return TagByteSize(field_number) + 4
+
+
+def DoubleByteSize(field_number, double):
+ return TagByteSize(field_number) + 8
+
+
+def BoolByteSize(field_number, b):
+ return TagByteSize(field_number) + 1
+
+
+def EnumByteSize(field_number, enum):
+ return UInt32ByteSize(field_number, enum)
+
+
+def StringByteSize(field_number, string):
+ return BytesByteSize(field_number, string.encode('utf-8'))
+
+
+def BytesByteSize(field_number, b):
+ return (TagByteSize(field_number)
+ + _VarUInt64ByteSizeNoTag(len(b))
+ + len(b))
+
+
+def GroupByteSize(field_number, message):
+ return (2 * TagByteSize(field_number) # START and END group.
+ + message.ByteSize())
+
+
+def MessageByteSize(field_number, message):
+ return (TagByteSize(field_number)
+ + _VarUInt64ByteSizeNoTag(message.ByteSize())
+ + message.ByteSize())
+
+
+def MessageSetItemByteSize(field_number, msg):
+ # First compute the sizes of the tags.
+ # There are 2 tags for the beginning and ending of the repeated group, that
+ # is field number 1, one with field number 2 (type_id) and one with field
+ # number 3 (message).
+ total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3))
+
+ # Add the number of bytes for type_id.
+ total_size += _VarUInt64ByteSizeNoTag(field_number)
+
+ message_size = msg.ByteSize()
+
+ # The number of bytes for encoding the length of the message.
+ total_size += _VarUInt64ByteSizeNoTag(message_size)
+
+ # The size of the message.
+ total_size += message_size
+ return total_size
+
+
+def TagByteSize(field_number):
+ """Returns the bytes required to serialize a tag with this field number."""
+ # Just pass in type 0, since the type won't affect the tag+type size.
+ return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0))
+
+
+# Private helper function for the *ByteSize() functions above.
+
+def _VarUInt64ByteSizeNoTag(uint64):
+ """Returns the number of bytes required to serialize a single varint
+ using boundary value comparisons. (unrolled loop optimization -WPierce)
+ uint64 must be unsigned.
+ """
+ if uint64 <= 0x7f: return 1
+ if uint64 <= 0x3fff: return 2
+ if uint64 <= 0x1fffff: return 3
+ if uint64 <= 0xfffffff: return 4
+ if uint64 <= 0x7ffffffff: return 5
+ if uint64 <= 0x3ffffffffff: return 6
+ if uint64 <= 0x1ffffffffffff: return 7
+ if uint64 <= 0xffffffffffffff: return 8
+ if uint64 <= 0x7fffffffffffffff: return 9
+ if uint64 > UINT64_MAX:
+ raise message.EncodeError('Value out of range: %d' % uint64)
+ return 10
+
+
+NON_PACKABLE_TYPES = (
+ descriptor.FieldDescriptor.TYPE_STRING,
+ descriptor.FieldDescriptor.TYPE_GROUP,
+ descriptor.FieldDescriptor.TYPE_MESSAGE,
+ descriptor.FieldDescriptor.TYPE_BYTES
+)
+
+
+def IsTypePackable(field_type):
+ """Return true iff packable = true is valid for fields of this type.
+
+ Args:
+ field_type: a FieldDescriptor::Type value.
+
+ Returns:
+ True iff fields of this type are packable.
+ """
+ return field_type not in NON_PACKABLE_TYPES
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/json_format.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/json_format.py
new file mode 100644
index 0000000000000000000000000000000000000000..21eb749b993a14ca3b8d929344fc47cbfa49411e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/json_format.py
@@ -0,0 +1,856 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Contains routines for printing protocol messages in JSON format.
+
+Simple usage example:
+
+ # Create a proto object and serialize it to a json format string.
+ message = my_proto_pb2.MyMessage(foo='bar')
+ json_string = json_format.MessageToJson(message)
+
+ # Parse a json format string to proto object.
+ message = json_format.Parse(json_string, my_proto_pb2.MyMessage())
+"""
+
+__author__ = 'jieluo@google.com (Jie Luo)'
+
+
+import base64
+from collections import OrderedDict
+import json
+import math
+from operator import methodcaller
+import re
+import sys
+
+from google.protobuf.internal import type_checkers
+from google.protobuf import descriptor
+from google.protobuf import symbol_database
+
+
+_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S'
+_INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32,
+ descriptor.FieldDescriptor.CPPTYPE_UINT32,
+ descriptor.FieldDescriptor.CPPTYPE_INT64,
+ descriptor.FieldDescriptor.CPPTYPE_UINT64])
+_INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64,
+ descriptor.FieldDescriptor.CPPTYPE_UINT64])
+_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT,
+ descriptor.FieldDescriptor.CPPTYPE_DOUBLE])
+_INFINITY = 'Infinity'
+_NEG_INFINITY = '-Infinity'
+_NAN = 'NaN'
+
+_UNPAIRED_SURROGATE_PATTERN = re.compile(
+ u'[\ud800-\udbff](?![\udc00-\udfff])|(?<![\ud800-\udbff])[\udc00-\udfff]')
+
+_VALID_EXTENSION_NAME = re.compile(r'\[[a-zA-Z0-9\._]*\]$')
+
+
+class Error(Exception):
+ """Top-level module error for json_format."""
+
+
+class SerializeToJsonError(Error):
+ """Thrown if serialization to JSON fails."""
+
+
+class ParseError(Error):
+ """Thrown in case of parsing error."""
+
+
+def MessageToJson(
+ message,
+ including_default_value_fields=False,
+ preserving_proto_field_name=False,
+ indent=2,
+ sort_keys=False,
+ use_integers_for_enums=False,
+ descriptor_pool=None,
+ float_precision=None):
+ """Converts protobuf message to JSON format.
+
+ Args:
+ message: The protocol buffers message instance to serialize.
+ including_default_value_fields: If True, singular primitive fields,
+ repeated fields, and map fields will always be serialized. If
+ False, only serialize non-empty fields. Singular message fields
+ and oneof fields are not affected by this option.
+ preserving_proto_field_name: If True, use the original proto field
+ names as defined in the .proto file. If False, convert the field
+ names to lowerCamelCase.
+ indent: The JSON object will be pretty-printed with this indent level.
+ An indent level of 0 or negative will only insert newlines.
+ sort_keys: If True, then the output will be sorted by field names.
+ use_integers_for_enums: If true, print integers instead of enum names.
+ descriptor_pool: A Descriptor Pool for resolving types. If None use the
+ default.
+ float_precision: If set, use this to specify float field valid digits.
+
+ Returns:
+ A string containing the JSON formatted protocol buffer message.
+ """
+ printer = _Printer(
+ including_default_value_fields,
+ preserving_proto_field_name,
+ use_integers_for_enums,
+ descriptor_pool,
+ float_precision=float_precision)
+ return printer.ToJsonString(message, indent, sort_keys)
+
+
+def MessageToDict(
+ message,
+ including_default_value_fields=False,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ descriptor_pool=None,
+ float_precision=None):
+ """Converts protobuf message to a dictionary.
+
+ When the dictionary is encoded to JSON, it conforms to proto3 JSON spec.
+
+ Args:
+ message: The protocol buffers message instance to serialize.
+ including_default_value_fields: If True, singular primitive fields,
+ repeated fields, and map fields will always be serialized. If
+ False, only serialize non-empty fields. Singular message fields
+ and oneof fields are not affected by this option.
+ preserving_proto_field_name: If True, use the original proto field
+ names as defined in the .proto file. If False, convert the field
+ names to lowerCamelCase.
+ use_integers_for_enums: If true, print integers instead of enum names.
+ descriptor_pool: A Descriptor Pool for resolving types. If None use the
+ default.
+ float_precision: If set, use this to specify float field valid digits.
+
+ Returns:
+ A dict representation of the protocol buffer message.
+ """
+ printer = _Printer(
+ including_default_value_fields,
+ preserving_proto_field_name,
+ use_integers_for_enums,
+ descriptor_pool,
+ float_precision=float_precision)
+ # pylint: disable=protected-access
+ return printer._MessageToJsonObject(message)
+
+
+def _IsMapEntry(field):
+ return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
+ field.message_type.has_options and
+ field.message_type.GetOptions().map_entry)
+
+
+class _Printer(object):
+ """JSON format printer for protocol message."""
+
+ def __init__(
+ self,
+ including_default_value_fields=False,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ descriptor_pool=None,
+ float_precision=None):
+ self.including_default_value_fields = including_default_value_fields
+ self.preserving_proto_field_name = preserving_proto_field_name
+ self.use_integers_for_enums = use_integers_for_enums
+ self.descriptor_pool = descriptor_pool
+ if float_precision:
+ self.float_format = '.{}g'.format(float_precision)
+ else:
+ self.float_format = None
+
+ def ToJsonString(self, message, indent, sort_keys):
+ js = self._MessageToJsonObject(message)
+ return json.dumps(js, indent=indent, sort_keys=sort_keys)
+
+ def _MessageToJsonObject(self, message):
+ """Converts message to an object according to Proto3 JSON Specification."""
+ message_descriptor = message.DESCRIPTOR
+ full_name = message_descriptor.full_name
+ if _IsWrapperMessage(message_descriptor):
+ return self._WrapperMessageToJsonObject(message)
+ if full_name in _WKTJSONMETHODS:
+ return methodcaller(_WKTJSONMETHODS[full_name][0], message)(self)
+ js = {}
+ return self._RegularMessageToJsonObject(message, js)
+
+ def _RegularMessageToJsonObject(self, message, js):
+ """Converts normal message according to Proto3 JSON Specification."""
+ fields = message.ListFields()
+
+ try:
+ for field, value in fields:
+ if self.preserving_proto_field_name:
+ name = field.name
+ else:
+ name = field.json_name
+ if _IsMapEntry(field):
+ # Convert a map field.
+ v_field = field.message_type.fields_by_name['value']
+ js_map = {}
+ for key in value:
+ if isinstance(key, bool):
+ if key:
+ recorded_key = 'true'
+ else:
+ recorded_key = 'false'
+ else:
+ recorded_key = str(key)
+ js_map[recorded_key] = self._FieldToJsonObject(
+ v_field, value[key])
+ js[name] = js_map
+ elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
+ # Convert a repeated field.
+ js[name] = [self._FieldToJsonObject(field, k)
+ for k in value]
+ elif field.is_extension:
+ name = '[%s]' % field.full_name
+ js[name] = self._FieldToJsonObject(field, value)
+ else:
+ js[name] = self._FieldToJsonObject(field, value)
+
+ # Serialize default value if including_default_value_fields is True.
+ if self.including_default_value_fields:
+ message_descriptor = message.DESCRIPTOR
+ for field in message_descriptor.fields:
+ # Singular message fields and oneof fields will not be affected.
+ if ((field.label != descriptor.FieldDescriptor.LABEL_REPEATED and
+ field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE) or
+ field.containing_oneof):
+ continue
+ if self.preserving_proto_field_name:
+ name = field.name
+ else:
+ name = field.json_name
+ if name in js:
+ # Skip the field which has been serialized already.
+ continue
+ if _IsMapEntry(field):
+ js[name] = {}
+ elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
+ js[name] = []
+ else:
+ js[name] = self._FieldToJsonObject(field, field.default_value)
+
+ except ValueError as e:
+ raise SerializeToJsonError(
+ 'Failed to serialize {0} field: {1}.'.format(field.name, e))
+
+ return js
+
+ def _FieldToJsonObject(self, field, value):
+ """Converts field value according to Proto3 JSON Specification."""
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
+ return self._MessageToJsonObject(value)
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
+ if self.use_integers_for_enums:
+ return value
+ if field.enum_type.full_name == 'google.protobuf.NullValue':
+ return None
+ enum_value = field.enum_type.values_by_number.get(value, None)
+ if enum_value is not None:
+ return enum_value.name
+ else:
+ if field.file.syntax == 'proto3':
+ return value
+ raise SerializeToJsonError('Enum field contains an integer value '
+ 'which can not mapped to an enum value.')
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
+ if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
+ # Use base64 Data encoding for bytes
+ return base64.b64encode(value).decode('utf-8')
+ else:
+ return value
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
+ return bool(value)
+ elif field.cpp_type in _INT64_TYPES:
+ return str(value)
+ elif field.cpp_type in _FLOAT_TYPES:
+ if math.isinf(value):
+ if value < 0.0:
+ return _NEG_INFINITY
+ else:
+ return _INFINITY
+ if math.isnan(value):
+ return _NAN
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT:
+ if self.float_format:
+ return float(format(value, self.float_format))
+ else:
+ return type_checkers.ToShortestFloat(value)
+
+ return value
+
+ def _AnyMessageToJsonObject(self, message):
+ """Converts Any message according to Proto3 JSON Specification."""
+ if not message.ListFields():
+ return {}
+ # Must print @type first, use OrderedDict instead of {}
+ js = OrderedDict()
+ type_url = message.type_url
+ js['@type'] = type_url
+ sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool)
+ sub_message.ParseFromString(message.value)
+ message_descriptor = sub_message.DESCRIPTOR
+ full_name = message_descriptor.full_name
+ if _IsWrapperMessage(message_descriptor):
+ js['value'] = self._WrapperMessageToJsonObject(sub_message)
+ return js
+ if full_name in _WKTJSONMETHODS:
+ js['value'] = methodcaller(_WKTJSONMETHODS[full_name][0],
+ sub_message)(self)
+ return js
+ return self._RegularMessageToJsonObject(sub_message, js)
+
+ def _GenericMessageToJsonObject(self, message):
+ """Converts message according to Proto3 JSON Specification."""
+ # Duration, Timestamp and FieldMask have ToJsonString method to do the
+ # convert. Users can also call the method directly.
+ return message.ToJsonString()
+
+ def _ValueMessageToJsonObject(self, message):
+ """Converts Value message according to Proto3 JSON Specification."""
+ which = message.WhichOneof('kind')
+ # If the Value message is not set treat as null_value when serialize
+ # to JSON. The parse back result will be different from original message.
+ if which is None or which == 'null_value':
+ return None
+ if which == 'list_value':
+ return self._ListValueMessageToJsonObject(message.list_value)
+ if which == 'struct_value':
+ value = message.struct_value
+ else:
+ value = getattr(message, which)
+ oneof_descriptor = message.DESCRIPTOR.fields_by_name[which]
+ return self._FieldToJsonObject(oneof_descriptor, value)
+
+ def _ListValueMessageToJsonObject(self, message):
+ """Converts ListValue message according to Proto3 JSON Specification."""
+ return [self._ValueMessageToJsonObject(value)
+ for value in message.values]
+
+ def _StructMessageToJsonObject(self, message):
+ """Converts Struct message according to Proto3 JSON Specification."""
+ fields = message.fields
+ ret = {}
+ for key in fields:
+ ret[key] = self._ValueMessageToJsonObject(fields[key])
+ return ret
+
+ def _WrapperMessageToJsonObject(self, message):
+ return self._FieldToJsonObject(
+ message.DESCRIPTOR.fields_by_name['value'], message.value)
+
+
+def _IsWrapperMessage(message_descriptor):
+ return message_descriptor.file.name == 'google/protobuf/wrappers.proto'
+
+
+def _DuplicateChecker(js):
+ result = {}
+ for name, value in js:
+ if name in result:
+ raise ParseError('Failed to load JSON: duplicate key {0}.'.format(name))
+ result[name] = value
+ return result
+
+
+def _CreateMessageFromTypeUrl(type_url, descriptor_pool):
+ """Creates a message from a type URL."""
+ db = symbol_database.Default()
+ pool = db.pool if descriptor_pool is None else descriptor_pool
+ type_name = type_url.split('/')[-1]
+ try:
+ message_descriptor = pool.FindMessageTypeByName(type_name)
+ except KeyError:
+ raise TypeError(
+ 'Can not find message descriptor by type_url: {0}.'.format(type_url))
+ message_class = db.GetPrototype(message_descriptor)
+ return message_class()
+
+
+def Parse(text, message, ignore_unknown_fields=False, descriptor_pool=None):
+ """Parses a JSON representation of a protocol message into a message.
+
+ Args:
+ text: Message JSON representation.
+ message: A protocol buffer message to merge into.
+ ignore_unknown_fields: If True, do not raise errors for unknown fields.
+ descriptor_pool: A Descriptor Pool for resolving types. If None use the
+ default.
+
+ Returns:
+ The same message passed as argument.
+
+ Raises::
+ ParseError: On JSON parsing problems.
+ """
+ if not isinstance(text, str):
+ text = text.decode('utf-8')
+ try:
+ js = json.loads(text, object_pairs_hook=_DuplicateChecker)
+ except ValueError as e:
+ raise ParseError('Failed to load JSON: {0}.'.format(str(e)))
+ return ParseDict(js, message, ignore_unknown_fields, descriptor_pool)
+
+
+def ParseDict(js_dict,
+ message,
+ ignore_unknown_fields=False,
+ descriptor_pool=None):
+ """Parses a JSON dictionary representation into a message.
+
+ Args:
+ js_dict: Dict representation of a JSON message.
+ message: A protocol buffer message to merge into.
+ ignore_unknown_fields: If True, do not raise errors for unknown fields.
+ descriptor_pool: A Descriptor Pool for resolving types. If None use the
+ default.
+
+ Returns:
+ The same message passed as argument.
+ """
+ parser = _Parser(ignore_unknown_fields, descriptor_pool)
+ parser.ConvertMessage(js_dict, message)
+ return message
+
+
+_INT_OR_FLOAT = (int, float)
+
+
+class _Parser(object):
+ """JSON format parser for protocol message."""
+
+ def __init__(self, ignore_unknown_fields, descriptor_pool):
+ self.ignore_unknown_fields = ignore_unknown_fields
+ self.descriptor_pool = descriptor_pool
+
+ def ConvertMessage(self, value, message):
+ """Convert a JSON object into a message.
+
+ Args:
+ value: A JSON object.
+ message: A WKT or regular protocol message to record the data.
+
+ Raises:
+ ParseError: In case of convert problems.
+ """
+ message_descriptor = message.DESCRIPTOR
+ full_name = message_descriptor.full_name
+ if _IsWrapperMessage(message_descriptor):
+ self._ConvertWrapperMessage(value, message)
+ elif full_name in _WKTJSONMETHODS:
+ methodcaller(_WKTJSONMETHODS[full_name][1], value, message)(self)
+ else:
+ self._ConvertFieldValuePair(value, message)
+
+ def _ConvertFieldValuePair(self, js, message):
+ """Convert field value pairs into regular message.
+
+ Args:
+ js: A JSON object to convert the field value pairs.
+ message: A regular protocol message to record the data.
+
+ Raises:
+ ParseError: In case of problems converting.
+ """
+ names = []
+ message_descriptor = message.DESCRIPTOR
+ fields_by_json_name = dict((f.json_name, f)
+ for f in message_descriptor.fields)
+ for name in js:
+ try:
+ field = fields_by_json_name.get(name, None)
+ if not field:
+ field = message_descriptor.fields_by_name.get(name, None)
+ if not field and _VALID_EXTENSION_NAME.match(name):
+ if not message_descriptor.is_extendable:
+ raise ParseError('Message type {0} does not have extensions'.format(
+ message_descriptor.full_name))
+ identifier = name[1:-1] # strip [] brackets
+ # pylint: disable=protected-access
+ field = message.Extensions._FindExtensionByName(identifier)
+ # pylint: enable=protected-access
+ if not field:
+ # Try looking for extension by the message type name, dropping the
+ # field name following the final . separator in full_name.
+ identifier = '.'.join(identifier.split('.')[:-1])
+ # pylint: disable=protected-access
+ field = message.Extensions._FindExtensionByName(identifier)
+ # pylint: enable=protected-access
+ if not field:
+ if self.ignore_unknown_fields:
+ continue
+ raise ParseError(
+ ('Message type "{0}" has no field named "{1}".\n'
+ ' Available Fields(except extensions): {2}').format(
+ message_descriptor.full_name, name,
+ [f.json_name for f in message_descriptor.fields]))
+ if name in names:
+ raise ParseError('Message type "{0}" should not have multiple '
+ '"{1}" fields.'.format(
+ message.DESCRIPTOR.full_name, name))
+ names.append(name)
+ value = js[name]
+ # Check no other oneof field is parsed.
+ if field.containing_oneof is not None and value is not None:
+ oneof_name = field.containing_oneof.name
+ if oneof_name in names:
+ raise ParseError('Message type "{0}" should not have multiple '
+ '"{1}" oneof fields.'.format(
+ message.DESCRIPTOR.full_name, oneof_name))
+ names.append(oneof_name)
+
+ if value is None:
+ if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE
+ and field.message_type.full_name == 'google.protobuf.Value'):
+ sub_message = getattr(message, field.name)
+ sub_message.null_value = 0
+ elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM
+ and field.enum_type.full_name == 'google.protobuf.NullValue'):
+ setattr(message, field.name, 0)
+ else:
+ message.ClearField(field.name)
+ continue
+
+ # Parse field value.
+ if _IsMapEntry(field):
+ message.ClearField(field.name)
+ self._ConvertMapFieldValue(value, message, field)
+ elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
+ message.ClearField(field.name)
+ if not isinstance(value, list):
+ raise ParseError('repeated field {0} must be in [] which is '
+ '{1}.'.format(name, value))
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
+ # Repeated message field.
+ for item in value:
+ sub_message = getattr(message, field.name).add()
+ # None is a null_value in Value.
+ if (item is None and
+ sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'):
+ raise ParseError('null is not allowed to be used as an element'
+ ' in a repeated field.')
+ self.ConvertMessage(item, sub_message)
+ else:
+ # Repeated scalar field.
+ for item in value:
+ if item is None:
+ raise ParseError('null is not allowed to be used as an element'
+ ' in a repeated field.')
+ getattr(message, field.name).append(
+ _ConvertScalarFieldValue(item, field))
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
+ if field.is_extension:
+ sub_message = message.Extensions[field]
+ else:
+ sub_message = getattr(message, field.name)
+ sub_message.SetInParent()
+ self.ConvertMessage(value, sub_message)
+ else:
+ if field.is_extension:
+ message.Extensions[field] = _ConvertScalarFieldValue(value, field)
+ else:
+ setattr(message, field.name, _ConvertScalarFieldValue(value, field))
+ except ParseError as e:
+ if field and field.containing_oneof is None:
+ raise ParseError('Failed to parse {0} field: {1}.'.format(name, e))
+ else:
+ raise ParseError(str(e))
+ except ValueError as e:
+ raise ParseError('Failed to parse {0} field: {1}.'.format(name, e))
+ except TypeError as e:
+ raise ParseError('Failed to parse {0} field: {1}.'.format(name, e))
+
+ def _ConvertAnyMessage(self, value, message):
+ """Convert a JSON representation into Any message."""
+ if isinstance(value, dict) and not value:
+ return
+ try:
+ type_url = value['@type']
+ except KeyError:
+ raise ParseError('@type is missing when parsing any message.')
+
+ sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool)
+ message_descriptor = sub_message.DESCRIPTOR
+ full_name = message_descriptor.full_name
+ if _IsWrapperMessage(message_descriptor):
+ self._ConvertWrapperMessage(value['value'], sub_message)
+ elif full_name in _WKTJSONMETHODS:
+ methodcaller(
+ _WKTJSONMETHODS[full_name][1], value['value'], sub_message)(self)
+ else:
+ del value['@type']
+ self._ConvertFieldValuePair(value, sub_message)
+ value['@type'] = type_url
+ # Sets Any message
+ message.value = sub_message.SerializeToString()
+ message.type_url = type_url
+
+ def _ConvertGenericMessage(self, value, message):
+ """Convert a JSON representation into message with FromJsonString."""
+ # Duration, Timestamp, FieldMask have a FromJsonString method to do the
+ # conversion. Users can also call the method directly.
+ try:
+ message.FromJsonString(value)
+ except ValueError as e:
+ raise ParseError(e)
+
+ def _ConvertValueMessage(self, value, message):
+ """Convert a JSON representation into Value message."""
+ if isinstance(value, dict):
+ self._ConvertStructMessage(value, message.struct_value)
+ elif isinstance(value, list):
+ self. _ConvertListValueMessage(value, message.list_value)
+ elif value is None:
+ message.null_value = 0
+ elif isinstance(value, bool):
+ message.bool_value = value
+ elif isinstance(value, str):
+ message.string_value = value
+ elif isinstance(value, _INT_OR_FLOAT):
+ message.number_value = value
+ else:
+ raise ParseError('Value {0} has unexpected type {1}.'.format(
+ value, type(value)))
+
+ def _ConvertListValueMessage(self, value, message):
+ """Convert a JSON representation into ListValue message."""
+ if not isinstance(value, list):
+ raise ParseError(
+ 'ListValue must be in [] which is {0}.'.format(value))
+ message.ClearField('values')
+ for item in value:
+ self._ConvertValueMessage(item, message.values.add())
+
+ def _ConvertStructMessage(self, value, message):
+ """Convert a JSON representation into Struct message."""
+ if not isinstance(value, dict):
+ raise ParseError(
+ 'Struct must be in a dict which is {0}.'.format(value))
+ # Clear will mark the struct as modified so it will be created even if
+ # there are no values.
+ message.Clear()
+ for key in value:
+ self._ConvertValueMessage(value[key], message.fields[key])
+ return
+
+ def _ConvertWrapperMessage(self, value, message):
+ """Convert a JSON representation into Wrapper message."""
+ field = message.DESCRIPTOR.fields_by_name['value']
+ setattr(message, 'value', _ConvertScalarFieldValue(value, field))
+
+ def _ConvertMapFieldValue(self, value, message, field):
+ """Convert map field value for a message map field.
+
+ Args:
+ value: A JSON object to convert the map field value.
+ message: A protocol message to record the converted data.
+ field: The descriptor of the map field to be converted.
+
+ Raises:
+ ParseError: In case of convert problems.
+ """
+ if not isinstance(value, dict):
+ raise ParseError(
+ 'Map field {0} must be in a dict which is {1}.'.format(
+ field.name, value))
+ key_field = field.message_type.fields_by_name['key']
+ value_field = field.message_type.fields_by_name['value']
+ for key in value:
+ key_value = _ConvertScalarFieldValue(key, key_field, True)
+ if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
+ self.ConvertMessage(value[key], getattr(
+ message, field.name)[key_value])
+ else:
+ getattr(message, field.name)[key_value] = _ConvertScalarFieldValue(
+ value[key], value_field)
+
+
+def _ConvertScalarFieldValue(value, field, require_str=False):
+ """Convert a single scalar field value.
+
+ Args:
+ value: A scalar value to convert the scalar field value.
+ field: The descriptor of the field to convert.
+ require_str: If True, the field value must be a str.
+
+ Returns:
+ The converted scalar field value
+
+ Raises:
+ ParseError: In case of convert problems.
+ """
+ if field.cpp_type in _INT_TYPES:
+ return _ConvertInteger(value)
+ elif field.cpp_type in _FLOAT_TYPES:
+ return _ConvertFloat(value, field)
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
+ return _ConvertBool(value, require_str)
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
+ if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
+ if isinstance(value, str):
+ encoded = value.encode('utf-8')
+ else:
+ encoded = value
+ # Add extra padding '='
+ padded_value = encoded + b'=' * (4 - len(encoded) % 4)
+ return base64.urlsafe_b64decode(padded_value)
+ else:
+ # Checking for unpaired surrogates appears to be unreliable,
+ # depending on the specific Python version, so we check manually.
+ if _UNPAIRED_SURROGATE_PATTERN.search(value):
+ raise ParseError('Unpaired surrogate')
+ return value
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
+ # Convert an enum value.
+ enum_value = field.enum_type.values_by_name.get(value, None)
+ if enum_value is None:
+ try:
+ number = int(value)
+ enum_value = field.enum_type.values_by_number.get(number, None)
+ except ValueError:
+ raise ParseError('Invalid enum value {0} for enum type {1}.'.format(
+ value, field.enum_type.full_name))
+ if enum_value is None:
+ if field.file.syntax == 'proto3':
+ # Proto3 accepts unknown enums.
+ return number
+ raise ParseError('Invalid enum value {0} for enum type {1}.'.format(
+ value, field.enum_type.full_name))
+ return enum_value.number
+
+
+def _ConvertInteger(value):
+ """Convert an integer.
+
+ Args:
+ value: A scalar value to convert.
+
+ Returns:
+ The integer value.
+
+ Raises:
+ ParseError: If an integer couldn't be consumed.
+ """
+ if isinstance(value, float) and not value.is_integer():
+ raise ParseError('Couldn\'t parse integer: {0}.'.format(value))
+
+ if isinstance(value, str) and value.find(' ') != -1:
+ raise ParseError('Couldn\'t parse integer: "{0}".'.format(value))
+
+ if isinstance(value, bool):
+ raise ParseError('Bool value {0} is not acceptable for '
+ 'integer field.'.format(value))
+
+ return int(value)
+
+
+def _ConvertFloat(value, field):
+ """Convert an floating point number."""
+ if isinstance(value, float):
+ if math.isnan(value):
+ raise ParseError('Couldn\'t parse NaN, use quoted "NaN" instead.')
+ if math.isinf(value):
+ if value > 0:
+ raise ParseError('Couldn\'t parse Infinity or value too large, '
+ 'use quoted "Infinity" instead.')
+ else:
+ raise ParseError('Couldn\'t parse -Infinity or value too small, '
+ 'use quoted "-Infinity" instead.')
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT:
+ # pylint: disable=protected-access
+ if value > type_checkers._FLOAT_MAX:
+ raise ParseError('Float value too large')
+ # pylint: disable=protected-access
+ if value < type_checkers._FLOAT_MIN:
+ raise ParseError('Float value too small')
+ if value == 'nan':
+ raise ParseError('Couldn\'t parse float "nan", use "NaN" instead.')
+ try:
+ # Assume Python compatible syntax.
+ return float(value)
+ except ValueError:
+ # Check alternative spellings.
+ if value == _NEG_INFINITY:
+ return float('-inf')
+ elif value == _INFINITY:
+ return float('inf')
+ elif value == _NAN:
+ return float('nan')
+ else:
+ raise ParseError('Couldn\'t parse float: {0}.'.format(value))
+
+
+def _ConvertBool(value, require_str):
+ """Convert a boolean value.
+
+ Args:
+ value: A scalar value to convert.
+ require_str: If True, value must be a str.
+
+ Returns:
+ The bool parsed.
+
+ Raises:
+ ParseError: If a boolean value couldn't be consumed.
+ """
+ if require_str:
+ if value == 'true':
+ return True
+ elif value == 'false':
+ return False
+ else:
+ raise ParseError('Expected "true" or "false", not {0}.'.format(value))
+
+ if not isinstance(value, bool):
+ raise ParseError('Expected true or false without quotes.')
+ return value
+
+_WKTJSONMETHODS = {
+ 'google.protobuf.Any': ['_AnyMessageToJsonObject',
+ '_ConvertAnyMessage'],
+ 'google.protobuf.Duration': ['_GenericMessageToJsonObject',
+ '_ConvertGenericMessage'],
+ 'google.protobuf.FieldMask': ['_GenericMessageToJsonObject',
+ '_ConvertGenericMessage'],
+ 'google.protobuf.ListValue': ['_ListValueMessageToJsonObject',
+ '_ConvertListValueMessage'],
+ 'google.protobuf.Struct': ['_StructMessageToJsonObject',
+ '_ConvertStructMessage'],
+ 'google.protobuf.Timestamp': ['_GenericMessageToJsonObject',
+ '_ConvertGenericMessage'],
+ 'google.protobuf.Value': ['_ValueMessageToJsonObject',
+ '_ConvertValueMessage']
+}
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/message.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/message.py
new file mode 100644
index 0000000000000000000000000000000000000000..ee46d0e4c9c68ec3d1f5e41f02b78b5632fd14a3
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/message.py
@@ -0,0 +1,421 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# TODO(robinson): We should just make these methods all "pure-virtual" and move
+# all implementation out, into reflection.py for now.
+
+
+"""Contains an abstract base class for protocol messages."""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+class Error(Exception):
+ """Base error type for this module."""
+ pass
+
+
+class DecodeError(Error):
+ """Exception raised when deserializing messages."""
+ pass
+
+
+class EncodeError(Error):
+ """Exception raised when serializing messages."""
+ pass
+
+
+class Message(object):
+
+ """Abstract base class for protocol messages.
+
+ Protocol message classes are almost always generated by the protocol
+ compiler. These generated types subclass Message and implement the methods
+ shown below.
+ """
+
+ # TODO(robinson): Link to an HTML document here.
+
+ # TODO(robinson): Document that instances of this class will also
+ # have an Extensions attribute with __getitem__ and __setitem__.
+ # Again, not sure how to best convey this.
+
+ # TODO(robinson): Document that the class must also have a static
+ # RegisterExtension(extension_field) method.
+ # Not sure how to best express at this point.
+
+ # TODO(robinson): Document these fields and methods.
+
+ __slots__ = []
+
+ #: The :class:`google.protobuf.descriptor.Descriptor` for this message type.
+ DESCRIPTOR = None
+
+ def __deepcopy__(self, memo=None):
+ clone = type(self)()
+ clone.MergeFrom(self)
+ return clone
+
+ def __eq__(self, other_msg):
+ """Recursively compares two messages by value and structure."""
+ raise NotImplementedError
+
+ def __ne__(self, other_msg):
+ # Can't just say self != other_msg, since that would infinitely recurse. :)
+ return not self == other_msg
+
+ def __hash__(self):
+ raise TypeError('unhashable object')
+
+ def __str__(self):
+ """Outputs a human-readable representation of the message."""
+ raise NotImplementedError
+
+ def __unicode__(self):
+ """Outputs a human-readable representation of the message."""
+ raise NotImplementedError
+
+ def MergeFrom(self, other_msg):
+ """Merges the contents of the specified message into current message.
+
+ This method merges the contents of the specified message into the current
+ message. Singular fields that are set in the specified message overwrite
+ the corresponding fields in the current message. Repeated fields are
+ appended. Singular sub-messages and groups are recursively merged.
+
+ Args:
+ other_msg (Message): A message to merge into the current message.
+ """
+ raise NotImplementedError
+
+ def CopyFrom(self, other_msg):
+ """Copies the content of the specified message into the current message.
+
+ The method clears the current message and then merges the specified
+ message using MergeFrom.
+
+ Args:
+ other_msg (Message): A message to copy into the current one.
+ """
+ if self is other_msg:
+ return
+ self.Clear()
+ self.MergeFrom(other_msg)
+
+ def Clear(self):
+ """Clears all data that was set in the message."""
+ raise NotImplementedError
+
+ def SetInParent(self):
+ """Mark this as present in the parent.
+
+ This normally happens automatically when you assign a field of a
+ sub-message, but sometimes you want to make the sub-message
+ present while keeping it empty. If you find yourself using this,
+ you may want to reconsider your design.
+ """
+ raise NotImplementedError
+
+ def IsInitialized(self):
+ """Checks if the message is initialized.
+
+ Returns:
+ bool: The method returns True if the message is initialized (i.e. all of
+ its required fields are set).
+ """
+ raise NotImplementedError
+
+ # TODO(robinson): MergeFromString() should probably return None and be
+ # implemented in terms of a helper that returns the # of bytes read. Our
+ # deserialization routines would use the helper when recursively
+ # deserializing, but the end user would almost always just want the no-return
+ # MergeFromString().
+
+ def MergeFromString(self, serialized):
+ """Merges serialized protocol buffer data into this message.
+
+ When we find a field in `serialized` that is already present
+ in this message:
+
+ - If it's a "repeated" field, we append to the end of our list.
+ - Else, if it's a scalar, we overwrite our field.
+ - Else, (it's a nonrepeated composite), we recursively merge
+ into the existing composite.
+
+ Args:
+ serialized (bytes): Any object that allows us to call
+ ``memoryview(serialized)`` to access a string of bytes using the
+ buffer interface.
+
+ Returns:
+ int: The number of bytes read from `serialized`.
+ For non-group messages, this will always be `len(serialized)`,
+ but for messages which are actually groups, this will
+ generally be less than `len(serialized)`, since we must
+ stop when we reach an ``END_GROUP`` tag. Note that if
+ we *do* stop because of an ``END_GROUP`` tag, the number
+ of bytes returned does not include the bytes
+ for the ``END_GROUP`` tag information.
+
+ Raises:
+ DecodeError: if the input cannot be parsed.
+ """
+ # TODO(robinson): Document handling of unknown fields.
+ # TODO(robinson): When we switch to a helper, this will return None.
+ raise NotImplementedError
+
+ def ParseFromString(self, serialized):
+ """Parse serialized protocol buffer data into this message.
+
+ Like :func:`MergeFromString()`, except we clear the object first.
+ """
+ self.Clear()
+ return self.MergeFromString(serialized)
+
+ def SerializeToString(self, **kwargs):
+ """Serializes the protocol message to a binary string.
+
+ Keyword Args:
+ deterministic (bool): If true, requests deterministic serialization
+ of the protobuf, with predictable ordering of map keys.
+
+ Returns:
+ A binary string representation of the message if all of the required
+ fields in the message are set (i.e. the message is initialized).
+
+ Raises:
+ EncodeError: if the message isn't initialized (see :func:`IsInitialized`).
+ """
+ raise NotImplementedError
+
+ def SerializePartialToString(self, **kwargs):
+ """Serializes the protocol message to a binary string.
+
+ This method is similar to SerializeToString but doesn't check if the
+ message is initialized.
+
+ Keyword Args:
+ deterministic (bool): If true, requests deterministic serialization
+ of the protobuf, with predictable ordering of map keys.
+
+ Returns:
+ bytes: A serialized representation of the partial message.
+ """
+ raise NotImplementedError
+
+ # TODO(robinson): Decide whether we like these better
+ # than auto-generated has_foo() and clear_foo() methods
+ # on the instances themselves. This way is less consistent
+ # with C++, but it makes reflection-type access easier and
+ # reduces the number of magically autogenerated things.
+ #
+ # TODO(robinson): Be sure to document (and test) exactly
+ # which field names are accepted here. Are we case-sensitive?
+ # What do we do with fields that share names with Python keywords
+ # like 'lambda' and 'yield'?
+ #
+ # nnorwitz says:
+ # """
+ # Typically (in python), an underscore is appended to names that are
+ # keywords. So they would become lambda_ or yield_.
+ # """
+ def ListFields(self):
+ """Returns a list of (FieldDescriptor, value) tuples for present fields.
+
+ A message field is non-empty if HasField() would return true. A singular
+ primitive field is non-empty if HasField() would return true in proto2 or it
+ is non zero in proto3. A repeated field is non-empty if it contains at least
+ one element. The fields are ordered by field number.
+
+ Returns:
+ list[tuple(FieldDescriptor, value)]: field descriptors and values
+ for all fields in the message which are not empty. The values vary by
+ field type.
+ """
+ raise NotImplementedError
+
+ def HasField(self, field_name):
+ """Checks if a certain field is set for the message.
+
+ For a oneof group, checks if any field inside is set. Note that if the
+ field_name is not defined in the message descriptor, :exc:`ValueError` will
+ be raised.
+
+ Args:
+ field_name (str): The name of the field to check for presence.
+
+ Returns:
+ bool: Whether a value has been set for the named field.
+
+ Raises:
+ ValueError: if the `field_name` is not a member of this message.
+ """
+ raise NotImplementedError
+
+ def ClearField(self, field_name):
+ """Clears the contents of a given field.
+
+ Inside a oneof group, clears the field set. If the name neither refers to a
+ defined field or oneof group, :exc:`ValueError` is raised.
+
+ Args:
+ field_name (str): The name of the field to check for presence.
+
+ Raises:
+ ValueError: if the `field_name` is not a member of this message.
+ """
+ raise NotImplementedError
+
+ def WhichOneof(self, oneof_group):
+ """Returns the name of the field that is set inside a oneof group.
+
+ If no field is set, returns None.
+
+ Args:
+ oneof_group (str): the name of the oneof group to check.
+
+ Returns:
+ str or None: The name of the group that is set, or None.
+
+ Raises:
+ ValueError: no group with the given name exists
+ """
+ raise NotImplementedError
+
+ def HasExtension(self, extension_handle):
+ """Checks if a certain extension is present for this message.
+
+ Extensions are retrieved using the :attr:`Extensions` mapping (if present).
+
+ Args:
+ extension_handle: The handle for the extension to check.
+
+ Returns:
+ bool: Whether the extension is present for this message.
+
+ Raises:
+ KeyError: if the extension is repeated. Similar to repeated fields,
+ there is no separate notion of presence: a "not present" repeated
+ extension is an empty list.
+ """
+ raise NotImplementedError
+
+ def ClearExtension(self, extension_handle):
+ """Clears the contents of a given extension.
+
+ Args:
+ extension_handle: The handle for the extension to clear.
+ """
+ raise NotImplementedError
+
+ def UnknownFields(self):
+ """Returns the UnknownFieldSet.
+
+ Returns:
+ UnknownFieldSet: The unknown fields stored in this message.
+ """
+ raise NotImplementedError
+
+ def DiscardUnknownFields(self):
+ """Clears all fields in the :class:`UnknownFieldSet`.
+
+ This operation is recursive for nested message.
+ """
+ raise NotImplementedError
+
+ def ByteSize(self):
+ """Returns the serialized size of this message.
+
+ Recursively calls ByteSize() on all contained messages.
+
+ Returns:
+ int: The number of bytes required to serialize this message.
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def FromString(cls, s):
+ raise NotImplementedError
+
+ @staticmethod
+ def RegisterExtension(extension_handle):
+ raise NotImplementedError
+
+ def _SetListener(self, message_listener):
+ """Internal method used by the protocol message implementation.
+ Clients should not call this directly.
+
+ Sets a listener that this message will call on certain state transitions.
+
+ The purpose of this method is to register back-edges from children to
+ parents at runtime, for the purpose of setting "has" bits and
+ byte-size-dirty bits in the parent and ancestor objects whenever a child or
+ descendant object is modified.
+
+ If the client wants to disconnect this Message from the object tree, she
+ explicitly sets callback to None.
+
+ If message_listener is None, unregisters any existing listener. Otherwise,
+ message_listener must implement the MessageListener interface in
+ internal/message_listener.py, and we discard any listener registered
+ via a previous _SetListener() call.
+ """
+ raise NotImplementedError
+
+ def __getstate__(self):
+ """Support the pickle protocol."""
+ return dict(serialized=self.SerializePartialToString())
+
+ def __setstate__(self, state):
+ """Support the pickle protocol."""
+ self.__init__()
+ serialized = state['serialized']
+ # On Python 3, using encoding='latin1' is required for unpickling
+ # protos pickled by Python 2.
+ if not isinstance(serialized, bytes):
+ serialized = serialized.encode('latin1')
+ self.ParseFromString(serialized)
+
+ def __reduce__(self):
+ message_descriptor = self.DESCRIPTOR
+ if message_descriptor.containing_type is None:
+ return type(self), (), self.__getstate__()
+ # the message type must be nested.
+ # Python does not pickle nested classes; use the symbol_database on the
+ # receiving end.
+ container = message_descriptor
+ return (_InternalConstructMessage, (container.full_name,),
+ self.__getstate__())
+
+
+def _InternalConstructMessage(full_name):
+ """Constructs a nested message."""
+ from google.protobuf import symbol_database # pylint:disable=g-import-not-at-top
+
+ return symbol_database.Default().GetSymbol(full_name)()
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/message_factory.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/message_factory.py
new file mode 100644
index 0000000000000000000000000000000000000000..3656fa68747d520cd166c36b4752bc15cc211015
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/message_factory.py
@@ -0,0 +1,185 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Provides a factory class for generating dynamic messages.
+
+The easiest way to use this class is if you have access to the FileDescriptor
+protos containing the messages you want to create you can just do the following:
+
+message_classes = message_factory.GetMessages(iterable_of_file_descriptors)
+my_proto_instance = message_classes['some.proto.package.MessageName']()
+"""
+
+__author__ = 'matthewtoia@google.com (Matt Toia)'
+
+from google.protobuf.internal import api_implementation
+from google.protobuf import descriptor_pool
+from google.protobuf import message
+
+if api_implementation.Type() == 'cpp':
+ from google.protobuf.pyext import cpp_message as message_impl
+else:
+ from google.protobuf.internal import python_message as message_impl
+
+
+# The type of all Message classes.
+_GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType
+
+
+class MessageFactory(object):
+ """Factory for creating Proto2 messages from descriptors in a pool."""
+
+ def __init__(self, pool=None):
+ """Initializes a new factory."""
+ self.pool = pool or descriptor_pool.DescriptorPool()
+
+ # local cache of all classes built from protobuf descriptors
+ self._classes = {}
+
+ def GetPrototype(self, descriptor):
+ """Obtains a proto2 message class based on the passed in descriptor.
+
+ Passing a descriptor with a fully qualified name matching a previous
+ invocation will cause the same class to be returned.
+
+ Args:
+ descriptor: The descriptor to build from.
+
+ Returns:
+ A class describing the passed in descriptor.
+ """
+ if descriptor not in self._classes:
+ result_class = self.CreatePrototype(descriptor)
+ # The assignment to _classes is redundant for the base implementation, but
+ # might avoid confusion in cases where CreatePrototype gets overridden and
+ # does not call the base implementation.
+ self._classes[descriptor] = result_class
+ return result_class
+ return self._classes[descriptor]
+
+ def CreatePrototype(self, descriptor):
+ """Builds a proto2 message class based on the passed in descriptor.
+
+ Don't call this function directly, it always creates a new class. Call
+ GetPrototype() instead. This method is meant to be overridden in subblasses
+ to perform additional operations on the newly constructed class.
+
+ Args:
+ descriptor: The descriptor to build from.
+
+ Returns:
+ A class describing the passed in descriptor.
+ """
+ descriptor_name = descriptor.name
+ result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE(
+ descriptor_name,
+ (message.Message,),
+ {
+ 'DESCRIPTOR': descriptor,
+ # If module not set, it wrongly points to message_factory module.
+ '__module__': None,
+ })
+ result_class._FACTORY = self # pylint: disable=protected-access
+ # Assign in _classes before doing recursive calls to avoid infinite
+ # recursion.
+ self._classes[descriptor] = result_class
+ for field in descriptor.fields:
+ if field.message_type:
+ self.GetPrototype(field.message_type)
+ for extension in result_class.DESCRIPTOR.extensions:
+ if extension.containing_type not in self._classes:
+ self.GetPrototype(extension.containing_type)
+ extended_class = self._classes[extension.containing_type]
+ extended_class.RegisterExtension(extension)
+ return result_class
+
+ def GetMessages(self, files):
+ """Gets all the messages from a specified file.
+
+ This will find and resolve dependencies, failing if the descriptor
+ pool cannot satisfy them.
+
+ Args:
+ files: The file names to extract messages from.
+
+ Returns:
+ A dictionary mapping proto names to the message classes. This will include
+ any dependent messages as well as any messages defined in the same file as
+ a specified message.
+ """
+ result = {}
+ for file_name in files:
+ file_desc = self.pool.FindFileByName(file_name)
+ for desc in file_desc.message_types_by_name.values():
+ result[desc.full_name] = self.GetPrototype(desc)
+
+ # While the extension FieldDescriptors are created by the descriptor pool,
+ # the python classes created in the factory need them to be registered
+ # explicitly, which is done below.
+ #
+ # The call to RegisterExtension will specifically check if the
+ # extension was already registered on the object and either
+ # ignore the registration if the original was the same, or raise
+ # an error if they were different.
+
+ for extension in file_desc.extensions_by_name.values():
+ if extension.containing_type not in self._classes:
+ self.GetPrototype(extension.containing_type)
+ extended_class = self._classes[extension.containing_type]
+ extended_class.RegisterExtension(extension)
+ return result
+
+
+_FACTORY = MessageFactory()
+
+
+def GetMessages(file_protos):
+ """Builds a dictionary of all the messages available in a set of files.
+
+ Args:
+ file_protos: Iterable of FileDescriptorProto to build messages out of.
+
+ Returns:
+ A dictionary mapping proto names to the message classes. This will include
+ any dependent messages as well as any messages defined in the same file as
+ a specified message.
+ """
+ # The cpp implementation of the protocol buffer library requires to add the
+ # message in topological order of the dependency graph.
+ file_by_name = {file_proto.name: file_proto for file_proto in file_protos}
+ def _AddFile(file_proto):
+ for dependency in file_proto.dependency:
+ if dependency in file_by_name:
+ # Remove from elements to be visited, in order to cut cycles.
+ _AddFile(file_by_name.pop(dependency))
+ _FACTORY.pool.Add(file_proto)
+ while file_by_name:
+ _AddFile(file_by_name.popitem()[1])
+ return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos])
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/proto_builder.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/proto_builder.py
new file mode 100644
index 0000000000000000000000000000000000000000..a4667ce63ec3c971a7233961da9a3adf100aa6b7
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/proto_builder.py
@@ -0,0 +1,134 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Dynamic Protobuf class creator."""
+
+from collections import OrderedDict
+import hashlib
+import os
+
+from google.protobuf import descriptor_pb2
+from google.protobuf import descriptor
+from google.protobuf import message_factory
+
+
+def _GetMessageFromFactory(factory, full_name):
+ """Get a proto class from the MessageFactory by name.
+
+ Args:
+ factory: a MessageFactory instance.
+ full_name: str, the fully qualified name of the proto type.
+ Returns:
+ A class, for the type identified by full_name.
+ Raises:
+ KeyError, if the proto is not found in the factory's descriptor pool.
+ """
+ proto_descriptor = factory.pool.FindMessageTypeByName(full_name)
+ proto_cls = factory.GetPrototype(proto_descriptor)
+ return proto_cls
+
+
+def MakeSimpleProtoClass(fields, full_name=None, pool=None):
+ """Create a Protobuf class whose fields are basic types.
+
+ Note: this doesn't validate field names!
+
+ Args:
+ fields: dict of {name: field_type} mappings for each field in the proto. If
+ this is an OrderedDict the order will be maintained, otherwise the
+ fields will be sorted by name.
+ full_name: optional str, the fully-qualified name of the proto type.
+ pool: optional DescriptorPool instance.
+ Returns:
+ a class, the new protobuf class with a FileDescriptor.
+ """
+ factory = message_factory.MessageFactory(pool=pool)
+
+ if full_name is not None:
+ try:
+ proto_cls = _GetMessageFromFactory(factory, full_name)
+ return proto_cls
+ except KeyError:
+ # The factory's DescriptorPool doesn't know about this class yet.
+ pass
+
+ # Get a list of (name, field_type) tuples from the fields dict. If fields was
+ # an OrderedDict we keep the order, but otherwise we sort the field to ensure
+ # consistent ordering.
+ field_items = fields.items()
+ if not isinstance(fields, OrderedDict):
+ field_items = sorted(field_items)
+
+ # Use a consistent file name that is unlikely to conflict with any imported
+ # proto files.
+ fields_hash = hashlib.sha1()
+ for f_name, f_type in field_items:
+ fields_hash.update(f_name.encode('utf-8'))
+ fields_hash.update(str(f_type).encode('utf-8'))
+ proto_file_name = fields_hash.hexdigest() + '.proto'
+
+ # If the proto is anonymous, use the same hash to name it.
+ if full_name is None:
+ full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' +
+ fields_hash.hexdigest())
+ try:
+ proto_cls = _GetMessageFromFactory(factory, full_name)
+ return proto_cls
+ except KeyError:
+ # The factory's DescriptorPool doesn't know about this class yet.
+ pass
+
+ # This is the first time we see this proto: add a new descriptor to the pool.
+ factory.pool.Add(
+ _MakeFileDescriptorProto(proto_file_name, full_name, field_items))
+ return _GetMessageFromFactory(factory, full_name)
+
+
+def _MakeFileDescriptorProto(proto_file_name, full_name, field_items):
+ """Populate FileDescriptorProto for MessageFactory's DescriptorPool."""
+ package, name = full_name.rsplit('.', 1)
+ file_proto = descriptor_pb2.FileDescriptorProto()
+ file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name)
+ file_proto.package = package
+ desc_proto = file_proto.message_type.add()
+ desc_proto.name = name
+ for f_number, (f_name, f_type) in enumerate(field_items, 1):
+ field_proto = desc_proto.field.add()
+ field_proto.name = f_name
+ # # If the number falls in the reserved range, reassign it to the correct
+ # # number after the range.
+ if f_number >= descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER:
+ f_number += (
+ descriptor.FieldDescriptor.LAST_RESERVED_FIELD_NUMBER -
+ descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER + 1)
+ field_proto.number = f_number
+ field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL
+ field_proto.type = f_type
+ return file_proto
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/pyext/__init__.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/pyext/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/pyext/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/pyext/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..401fa8ada3b8d3c8535a758961f15346c6f0017d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/pyext/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/pyext/__pycache__/cpp_message.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/pyext/__pycache__/cpp_message.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c90e4fd20611ab6ff9a39024643ef02d19bb5a91
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/pyext/__pycache__/cpp_message.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/pyext/_message.cpython-38-x86_64-linux-gnu.so b/monEnvTP/lib/python3.8/site-packages/google/protobuf/pyext/_message.cpython-38-x86_64-linux-gnu.so
new file mode 100755
index 0000000000000000000000000000000000000000..01d2489a92b362f9000922562992245bb136b4ef
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/pyext/_message.cpython-38-x86_64-linux-gnu.so differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/pyext/cpp_message.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/pyext/cpp_message.py
new file mode 100644
index 0000000000000000000000000000000000000000..fc8eb32d79f60ff95b328ec5a828593ab78e1802
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/pyext/cpp_message.py
@@ -0,0 +1,65 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Protocol message implementation hooks for C++ implementation.
+
+Contains helper functions used to create protocol message classes from
+Descriptor objects at runtime backed by the protocol buffer C++ API.
+"""
+
+__author__ = 'tibell@google.com (Johan Tibell)'
+
+from google.protobuf.pyext import _message
+
+
+class GeneratedProtocolMessageType(_message.MessageMeta):
+
+ """Metaclass for protocol message classes created at runtime from Descriptors.
+
+ The protocol compiler currently uses this metaclass to create protocol
+ message classes at runtime. Clients can also manually create their own
+ classes at runtime, as in this example:
+
+ mydescriptor = Descriptor(.....)
+ factory = symbol_database.Default()
+ factory.pool.AddDescriptor(mydescriptor)
+ MyProtoClass = factory.GetPrototype(mydescriptor)
+ myproto_instance = MyProtoClass()
+ myproto.foo_field = 23
+ ...
+
+ The above example will not work for nested types. If you wish to include them,
+ use reflection.MakeClass() instead of manually instantiating the class in
+ order to create the appropriate class structure.
+ """
+
+ # Must be consistent with the protocol-compiler code in
+ # proto2/compiler/internal/generator.*.
+ _DESCRIPTOR_KEY = 'DESCRIPTOR'
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/reflection.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/reflection.py
new file mode 100644
index 0000000000000000000000000000000000000000..81e18859a804d589d67b0a0642de718ba1bbce13
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/reflection.py
@@ -0,0 +1,95 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# This code is meant to work on Python 2.4 and above only.
+
+"""Contains a metaclass and helper functions used to create
+protocol message classes from Descriptor objects at runtime.
+
+Recall that a metaclass is the "type" of a class.
+(A class is to a metaclass what an instance is to a class.)
+
+In this case, we use the GeneratedProtocolMessageType metaclass
+to inject all the useful functionality into the classes
+output by the protocol compiler at compile-time.
+
+The upshot of all this is that the real implementation
+details for ALL pure-Python protocol buffers are *here in
+this file*.
+"""
+
+__author__ = 'robinson@google.com (Will Robinson)'
+
+
+from google.protobuf import message_factory
+from google.protobuf import symbol_database
+
+# The type of all Message classes.
+# Part of the public interface, but normally only used by message factories.
+GeneratedProtocolMessageType = message_factory._GENERATED_PROTOCOL_MESSAGE_TYPE
+
+MESSAGE_CLASS_CACHE = {}
+
+
+# Deprecated. Please NEVER use reflection.ParseMessage().
+def ParseMessage(descriptor, byte_str):
+ """Generate a new Message instance from this Descriptor and a byte string.
+
+ DEPRECATED: ParseMessage is deprecated because it is using MakeClass().
+ Please use MessageFactory.GetPrototype() instead.
+
+ Args:
+ descriptor: Protobuf Descriptor object
+ byte_str: Serialized protocol buffer byte string
+
+ Returns:
+ Newly created protobuf Message object.
+ """
+ result_class = MakeClass(descriptor)
+ new_msg = result_class()
+ new_msg.ParseFromString(byte_str)
+ return new_msg
+
+
+# Deprecated. Please NEVER use reflection.MakeClass().
+def MakeClass(descriptor):
+ """Construct a class object for a protobuf described by descriptor.
+
+ DEPRECATED: use MessageFactory.GetPrototype() instead.
+
+ Args:
+ descriptor: A descriptor.Descriptor object describing the protobuf.
+ Returns:
+ The Message class object described by the descriptor.
+ """
+ # Original implementation leads to duplicate message classes, which won't play
+ # well with extensions. Message factory info is also missing.
+ # Redirect to message_factory.
+ return symbol_database.Default().GetPrototype(descriptor)
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/service.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/service.py
new file mode 100644
index 0000000000000000000000000000000000000000..5625246324cad3c71108a4466466d1d3b1568907
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/service.py
@@ -0,0 +1,228 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""DEPRECATED: Declares the RPC service interfaces.
+
+This module declares the abstract interfaces underlying proto2 RPC
+services. These are intended to be independent of any particular RPC
+implementation, so that proto2 services can be used on top of a variety
+of implementations. Starting with version 2.3.0, RPC implementations should
+not try to build on these, but should instead provide code generator plugins
+which generate code specific to the particular RPC implementation. This way
+the generated code can be more appropriate for the implementation in use
+and can avoid unnecessary layers of indirection.
+"""
+
+__author__ = 'petar@google.com (Petar Petrov)'
+
+
+class RpcException(Exception):
+ """Exception raised on failed blocking RPC method call."""
+ pass
+
+
+class Service(object):
+
+ """Abstract base interface for protocol-buffer-based RPC services.
+
+ Services themselves are abstract classes (implemented either by servers or as
+ stubs), but they subclass this base interface. The methods of this
+ interface can be used to call the methods of the service without knowing
+ its exact type at compile time (analogous to the Message interface).
+ """
+
+ def GetDescriptor():
+ """Retrieves this service's descriptor."""
+ raise NotImplementedError
+
+ def CallMethod(self, method_descriptor, rpc_controller,
+ request, done):
+ """Calls a method of the service specified by method_descriptor.
+
+ If "done" is None then the call is blocking and the response
+ message will be returned directly. Otherwise the call is asynchronous
+ and "done" will later be called with the response value.
+
+ In the blocking case, RpcException will be raised on error.
+
+ Preconditions:
+
+ * method_descriptor.service == GetDescriptor
+ * request is of the exact same classes as returned by
+ GetRequestClass(method).
+ * After the call has started, the request must not be modified.
+ * "rpc_controller" is of the correct type for the RPC implementation being
+ used by this Service. For stubs, the "correct type" depends on the
+ RpcChannel which the stub is using.
+
+ Postconditions:
+
+ * "done" will be called when the method is complete. This may be
+ before CallMethod() returns or it may be at some point in the future.
+ * If the RPC failed, the response value passed to "done" will be None.
+ Further details about the failure can be found by querying the
+ RpcController.
+ """
+ raise NotImplementedError
+
+ def GetRequestClass(self, method_descriptor):
+ """Returns the class of the request message for the specified method.
+
+ CallMethod() requires that the request is of a particular subclass of
+ Message. GetRequestClass() gets the default instance of this required
+ type.
+
+ Example:
+ method = service.GetDescriptor().FindMethodByName("Foo")
+ request = stub.GetRequestClass(method)()
+ request.ParseFromString(input)
+ service.CallMethod(method, request, callback)
+ """
+ raise NotImplementedError
+
+ def GetResponseClass(self, method_descriptor):
+ """Returns the class of the response message for the specified method.
+
+ This method isn't really needed, as the RpcChannel's CallMethod constructs
+ the response protocol message. It's provided anyway in case it is useful
+ for the caller to know the response type in advance.
+ """
+ raise NotImplementedError
+
+
+class RpcController(object):
+
+ """An RpcController mediates a single method call.
+
+ The primary purpose of the controller is to provide a way to manipulate
+ settings specific to the RPC implementation and to find out about RPC-level
+ errors. The methods provided by the RpcController interface are intended
+ to be a "least common denominator" set of features which we expect all
+ implementations to support. Specific implementations may provide more
+ advanced features (e.g. deadline propagation).
+ """
+
+ # Client-side methods below
+
+ def Reset(self):
+ """Resets the RpcController to its initial state.
+
+ After the RpcController has been reset, it may be reused in
+ a new call. Must not be called while an RPC is in progress.
+ """
+ raise NotImplementedError
+
+ def Failed(self):
+ """Returns true if the call failed.
+
+ After a call has finished, returns true if the call failed. The possible
+ reasons for failure depend on the RPC implementation. Failed() must not
+ be called before a call has finished. If Failed() returns true, the
+ contents of the response message are undefined.
+ """
+ raise NotImplementedError
+
+ def ErrorText(self):
+ """If Failed is true, returns a human-readable description of the error."""
+ raise NotImplementedError
+
+ def StartCancel(self):
+ """Initiate cancellation.
+
+ Advises the RPC system that the caller desires that the RPC call be
+ canceled. The RPC system may cancel it immediately, may wait awhile and
+ then cancel it, or may not even cancel the call at all. If the call is
+ canceled, the "done" callback will still be called and the RpcController
+ will indicate that the call failed at that time.
+ """
+ raise NotImplementedError
+
+ # Server-side methods below
+
+ def SetFailed(self, reason):
+ """Sets a failure reason.
+
+ Causes Failed() to return true on the client side. "reason" will be
+ incorporated into the message returned by ErrorText(). If you find
+ you need to return machine-readable information about failures, you
+ should incorporate it into your response protocol buffer and should
+ NOT call SetFailed().
+ """
+ raise NotImplementedError
+
+ def IsCanceled(self):
+ """Checks if the client cancelled the RPC.
+
+ If true, indicates that the client canceled the RPC, so the server may
+ as well give up on replying to it. The server should still call the
+ final "done" callback.
+ """
+ raise NotImplementedError
+
+ def NotifyOnCancel(self, callback):
+ """Sets a callback to invoke on cancel.
+
+ Asks that the given callback be called when the RPC is canceled. The
+ callback will always be called exactly once. If the RPC completes without
+ being canceled, the callback will be called after completion. If the RPC
+ has already been canceled when NotifyOnCancel() is called, the callback
+ will be called immediately.
+
+ NotifyOnCancel() must be called no more than once per request.
+ """
+ raise NotImplementedError
+
+
+class RpcChannel(object):
+
+ """Abstract interface for an RPC channel.
+
+ An RpcChannel represents a communication line to a service which can be used
+ to call that service's methods. The service may be running on another
+ machine. Normally, you should not use an RpcChannel directly, but instead
+ construct a stub {@link Service} wrapping it. Example:
+
+ Example:
+ RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234")
+ RpcController controller = rpcImpl.Controller()
+ MyService service = MyService_Stub(channel)
+ service.MyMethod(controller, request, callback)
+ """
+
+ def CallMethod(self, method_descriptor, rpc_controller,
+ request, response_class, done):
+ """Calls the method identified by the descriptor.
+
+ Call the given method of the remote service. The signature of this
+ procedure looks the same as Service.CallMethod(), but the requirements
+ are less strict in one important way: the request object doesn't have to
+ be of any specific class as long as its descriptor is method.input_type.
+ """
+ raise NotImplementedError
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/service_reflection.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/service_reflection.py
new file mode 100644
index 0000000000000000000000000000000000000000..75c51ff3221af114d73e9eca6e74b14e32a59a57
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/service_reflection.py
@@ -0,0 +1,287 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Contains metaclasses used to create protocol service and service stub
+classes from ServiceDescriptor objects at runtime.
+
+The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to
+inject all useful functionality into the classes output by the protocol
+compiler at compile-time.
+"""
+
+__author__ = 'petar@google.com (Petar Petrov)'
+
+
+class GeneratedServiceType(type):
+
+ """Metaclass for service classes created at runtime from ServiceDescriptors.
+
+ Implementations for all methods described in the Service class are added here
+ by this class. We also create properties to allow getting/setting all fields
+ in the protocol message.
+
+ The protocol compiler currently uses this metaclass to create protocol service
+ classes at runtime. Clients can also manually create their own classes at
+ runtime, as in this example::
+
+ mydescriptor = ServiceDescriptor(.....)
+ class MyProtoService(service.Service):
+ __metaclass__ = GeneratedServiceType
+ DESCRIPTOR = mydescriptor
+ myservice_instance = MyProtoService()
+ # ...
+ """
+
+ _DESCRIPTOR_KEY = 'DESCRIPTOR'
+
+ def __init__(cls, name, bases, dictionary):
+ """Creates a message service class.
+
+ Args:
+ name: Name of the class (ignored, but required by the metaclass
+ protocol).
+ bases: Base classes of the class being constructed.
+ dictionary: The class dictionary of the class being constructed.
+ dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
+ describing this protocol service type.
+ """
+ # Don't do anything if this class doesn't have a descriptor. This happens
+ # when a service class is subclassed.
+ if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary:
+ return
+
+ descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY]
+ service_builder = _ServiceBuilder(descriptor)
+ service_builder.BuildService(cls)
+ cls.DESCRIPTOR = descriptor
+
+
+class GeneratedServiceStubType(GeneratedServiceType):
+
+ """Metaclass for service stubs created at runtime from ServiceDescriptors.
+
+ This class has similar responsibilities as GeneratedServiceType, except that
+ it creates the service stub classes.
+ """
+
+ _DESCRIPTOR_KEY = 'DESCRIPTOR'
+
+ def __init__(cls, name, bases, dictionary):
+ """Creates a message service stub class.
+
+ Args:
+ name: Name of the class (ignored, here).
+ bases: Base classes of the class being constructed.
+ dictionary: The class dictionary of the class being constructed.
+ dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
+ describing this protocol service type.
+ """
+ super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary)
+ # Don't do anything if this class doesn't have a descriptor. This happens
+ # when a service stub is subclassed.
+ if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary:
+ return
+
+ descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY]
+ service_stub_builder = _ServiceStubBuilder(descriptor)
+ service_stub_builder.BuildServiceStub(cls)
+
+
+class _ServiceBuilder(object):
+
+ """This class constructs a protocol service class using a service descriptor.
+
+ Given a service descriptor, this class constructs a class that represents
+ the specified service descriptor. One service builder instance constructs
+ exactly one service class. That means all instances of that class share the
+ same builder.
+ """
+
+ def __init__(self, service_descriptor):
+ """Initializes an instance of the service class builder.
+
+ Args:
+ service_descriptor: ServiceDescriptor to use when constructing the
+ service class.
+ """
+ self.descriptor = service_descriptor
+
+ def BuildService(self, cls):
+ """Constructs the service class.
+
+ Args:
+ cls: The class that will be constructed.
+ """
+
+ # CallMethod needs to operate with an instance of the Service class. This
+ # internal wrapper function exists only to be able to pass the service
+ # instance to the method that does the real CallMethod work.
+ def _WrapCallMethod(srvc, method_descriptor,
+ rpc_controller, request, callback):
+ return self._CallMethod(srvc, method_descriptor,
+ rpc_controller, request, callback)
+ self.cls = cls
+ cls.CallMethod = _WrapCallMethod
+ cls.GetDescriptor = staticmethod(lambda: self.descriptor)
+ cls.GetDescriptor.__doc__ = "Returns the service descriptor."
+ cls.GetRequestClass = self._GetRequestClass
+ cls.GetResponseClass = self._GetResponseClass
+ for method in self.descriptor.methods:
+ setattr(cls, method.name, self._GenerateNonImplementedMethod(method))
+
+ def _CallMethod(self, srvc, method_descriptor,
+ rpc_controller, request, callback):
+ """Calls the method described by a given method descriptor.
+
+ Args:
+ srvc: Instance of the service for which this method is called.
+ method_descriptor: Descriptor that represent the method to call.
+ rpc_controller: RPC controller to use for this method's execution.
+ request: Request protocol message.
+ callback: A callback to invoke after the method has completed.
+ """
+ if method_descriptor.containing_service != self.descriptor:
+ raise RuntimeError(
+ 'CallMethod() given method descriptor for wrong service type.')
+ method = getattr(srvc, method_descriptor.name)
+ return method(rpc_controller, request, callback)
+
+ def _GetRequestClass(self, method_descriptor):
+ """Returns the class of the request protocol message.
+
+ Args:
+ method_descriptor: Descriptor of the method for which to return the
+ request protocol message class.
+
+ Returns:
+ A class that represents the input protocol message of the specified
+ method.
+ """
+ if method_descriptor.containing_service != self.descriptor:
+ raise RuntimeError(
+ 'GetRequestClass() given method descriptor for wrong service type.')
+ return method_descriptor.input_type._concrete_class
+
+ def _GetResponseClass(self, method_descriptor):
+ """Returns the class of the response protocol message.
+
+ Args:
+ method_descriptor: Descriptor of the method for which to return the
+ response protocol message class.
+
+ Returns:
+ A class that represents the output protocol message of the specified
+ method.
+ """
+ if method_descriptor.containing_service != self.descriptor:
+ raise RuntimeError(
+ 'GetResponseClass() given method descriptor for wrong service type.')
+ return method_descriptor.output_type._concrete_class
+
+ def _GenerateNonImplementedMethod(self, method):
+ """Generates and returns a method that can be set for a service methods.
+
+ Args:
+ method: Descriptor of the service method for which a method is to be
+ generated.
+
+ Returns:
+ A method that can be added to the service class.
+ """
+ return lambda inst, rpc_controller, request, callback: (
+ self._NonImplementedMethod(method.name, rpc_controller, callback))
+
+ def _NonImplementedMethod(self, method_name, rpc_controller, callback):
+ """The body of all methods in the generated service class.
+
+ Args:
+ method_name: Name of the method being executed.
+ rpc_controller: RPC controller used to execute this method.
+ callback: A callback which will be invoked when the method finishes.
+ """
+ rpc_controller.SetFailed('Method %s not implemented.' % method_name)
+ callback(None)
+
+
+class _ServiceStubBuilder(object):
+
+ """Constructs a protocol service stub class using a service descriptor.
+
+ Given a service descriptor, this class constructs a suitable stub class.
+ A stub is just a type-safe wrapper around an RpcChannel which emulates a
+ local implementation of the service.
+
+ One service stub builder instance constructs exactly one class. It means all
+ instances of that class share the same service stub builder.
+ """
+
+ def __init__(self, service_descriptor):
+ """Initializes an instance of the service stub class builder.
+
+ Args:
+ service_descriptor: ServiceDescriptor to use when constructing the
+ stub class.
+ """
+ self.descriptor = service_descriptor
+
+ def BuildServiceStub(self, cls):
+ """Constructs the stub class.
+
+ Args:
+ cls: The class that will be constructed.
+ """
+
+ def _ServiceStubInit(stub, rpc_channel):
+ stub.rpc_channel = rpc_channel
+ self.cls = cls
+ cls.__init__ = _ServiceStubInit
+ for method in self.descriptor.methods:
+ setattr(cls, method.name, self._GenerateStubMethod(method))
+
+ def _GenerateStubMethod(self, method):
+ return (lambda inst, rpc_controller, request, callback=None:
+ self._StubMethod(inst, method, rpc_controller, request, callback))
+
+ def _StubMethod(self, stub, method_descriptor,
+ rpc_controller, request, callback):
+ """The body of all service methods in the generated stub class.
+
+ Args:
+ stub: Stub instance.
+ method_descriptor: Descriptor of the invoked method.
+ rpc_controller: Rpc controller to execute the method.
+ request: Request protocol message.
+ callback: A callback to execute when the method finishes.
+ Returns:
+ Response message (in case of blocking call).
+ """
+ return stub.rpc_channel.CallMethod(
+ method_descriptor, rpc_controller, request,
+ method_descriptor.output_type._concrete_class, callback)
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/source_context_pb2.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/source_context_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..e2a03753d44497c47f33354a55df71a54147928d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/source_context_pb2.py
@@ -0,0 +1,35 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/source_context.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\"\"\n\rSourceContext\x12\x11\n\tfile_name\x18\x01 \x01(\tB\x8a\x01\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01Z6google.golang.org/protobuf/types/known/sourcecontextpb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
+
+
+
+_SOURCECONTEXT = DESCRIPTOR.message_types_by_name['SourceContext']
+SourceContext = _reflection.GeneratedProtocolMessageType('SourceContext', (_message.Message,), {
+ 'DESCRIPTOR' : _SOURCECONTEXT,
+ '__module__' : 'google.protobuf.source_context_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.SourceContext)
+ })
+_sym_db.RegisterMessage(SourceContext)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\022SourceContextProtoP\001Z6google.golang.org/protobuf/types/known/sourcecontextpb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
+ _SOURCECONTEXT._serialized_start=57
+ _SOURCECONTEXT._serialized_end=91
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/struct_pb2.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/struct_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..9285b3e63dbc5828d7a3ad48b937eb472c52860d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/struct_pb2.py
@@ -0,0 +1,74 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/struct.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x84\x01\n\x06Struct\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntry\x1a\x45\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xea\x01\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x16\n\x0cnumber_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12/\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x30\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00\x42\x06\n\x04kind\"3\n\tListValue\x12&\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.Value*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x7f\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z/google.golang.org/protobuf/types/known/structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
+
+_NULLVALUE = DESCRIPTOR.enum_types_by_name['NullValue']
+NullValue = enum_type_wrapper.EnumTypeWrapper(_NULLVALUE)
+NULL_VALUE = 0
+
+
+_STRUCT = DESCRIPTOR.message_types_by_name['Struct']
+_STRUCT_FIELDSENTRY = _STRUCT.nested_types_by_name['FieldsEntry']
+_VALUE = DESCRIPTOR.message_types_by_name['Value']
+_LISTVALUE = DESCRIPTOR.message_types_by_name['ListValue']
+Struct = _reflection.GeneratedProtocolMessageType('Struct', (_message.Message,), {
+
+ 'FieldsEntry' : _reflection.GeneratedProtocolMessageType('FieldsEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _STRUCT_FIELDSENTRY,
+ '__module__' : 'google.protobuf.struct_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.Struct.FieldsEntry)
+ })
+ ,
+ 'DESCRIPTOR' : _STRUCT,
+ '__module__' : 'google.protobuf.struct_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.Struct)
+ })
+_sym_db.RegisterMessage(Struct)
+_sym_db.RegisterMessage(Struct.FieldsEntry)
+
+Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), {
+ 'DESCRIPTOR' : _VALUE,
+ '__module__' : 'google.protobuf.struct_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.Value)
+ })
+_sym_db.RegisterMessage(Value)
+
+ListValue = _reflection.GeneratedProtocolMessageType('ListValue', (_message.Message,), {
+ 'DESCRIPTOR' : _LISTVALUE,
+ '__module__' : 'google.protobuf.struct_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.ListValue)
+ })
+_sym_db.RegisterMessage(ListValue)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\013StructProtoP\001Z/google.golang.org/protobuf/types/known/structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
+ _STRUCT_FIELDSENTRY._options = None
+ _STRUCT_FIELDSENTRY._serialized_options = b'8\001'
+ _NULLVALUE._serialized_start=474
+ _NULLVALUE._serialized_end=501
+ _STRUCT._serialized_start=50
+ _STRUCT._serialized_end=182
+ _STRUCT_FIELDSENTRY._serialized_start=113
+ _STRUCT_FIELDSENTRY._serialized_end=182
+ _VALUE._serialized_start=185
+ _VALUE._serialized_end=419
+ _LISTVALUE._serialized_start=421
+ _LISTVALUE._serialized_end=472
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/symbol_database.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/symbol_database.py
new file mode 100644
index 0000000000000000000000000000000000000000..fdcf8cf06ced70c01d291c672a08850238e5d3c9
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/symbol_database.py
@@ -0,0 +1,194 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""A database of Python protocol buffer generated symbols.
+
+SymbolDatabase is the MessageFactory for messages generated at compile time,
+and makes it easy to create new instances of a registered type, given only the
+type's protocol buffer symbol name.
+
+Example usage::
+
+ db = symbol_database.SymbolDatabase()
+
+ # Register symbols of interest, from one or multiple files.
+ db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR)
+ db.RegisterMessage(my_proto_pb2.MyMessage)
+ db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR)
+
+ # The database can be used as a MessageFactory, to generate types based on
+ # their name:
+ types = db.GetMessages(['my_proto.proto'])
+ my_message_instance = types['MyMessage']()
+
+ # The database's underlying descriptor pool can be queried, so it's not
+ # necessary to know a type's filename to be able to generate it:
+ filename = db.pool.FindFileContainingSymbol('MyMessage')
+ my_message_instance = db.GetMessages([filename])['MyMessage']()
+
+ # This functionality is also provided directly via a convenience method:
+ my_message_instance = db.GetSymbol('MyMessage')()
+"""
+
+
+from google.protobuf.internal import api_implementation
+from google.protobuf import descriptor_pool
+from google.protobuf import message_factory
+
+
+class SymbolDatabase(message_factory.MessageFactory):
+ """A database of Python generated symbols."""
+
+ def RegisterMessage(self, message):
+ """Registers the given message type in the local database.
+
+ Calls to GetSymbol() and GetMessages() will return messages registered here.
+
+ Args:
+ message: A :class:`google.protobuf.message.Message` subclass (or
+ instance); its descriptor will be registered.
+
+ Returns:
+ The provided message.
+ """
+
+ desc = message.DESCRIPTOR
+ self._classes[desc] = message
+ self.RegisterMessageDescriptor(desc)
+ return message
+
+ def RegisterMessageDescriptor(self, message_descriptor):
+ """Registers the given message descriptor in the local database.
+
+ Args:
+ message_descriptor (Descriptor): the message descriptor to add.
+ """
+ if api_implementation.Type() == 'python':
+ # pylint: disable=protected-access
+ self.pool._AddDescriptor(message_descriptor)
+
+ def RegisterEnumDescriptor(self, enum_descriptor):
+ """Registers the given enum descriptor in the local database.
+
+ Args:
+ enum_descriptor (EnumDescriptor): The enum descriptor to register.
+
+ Returns:
+ EnumDescriptor: The provided descriptor.
+ """
+ if api_implementation.Type() == 'python':
+ # pylint: disable=protected-access
+ self.pool._AddEnumDescriptor(enum_descriptor)
+ return enum_descriptor
+
+ def RegisterServiceDescriptor(self, service_descriptor):
+ """Registers the given service descriptor in the local database.
+
+ Args:
+ service_descriptor (ServiceDescriptor): the service descriptor to
+ register.
+ """
+ if api_implementation.Type() == 'python':
+ # pylint: disable=protected-access
+ self.pool._AddServiceDescriptor(service_descriptor)
+
+ def RegisterFileDescriptor(self, file_descriptor):
+ """Registers the given file descriptor in the local database.
+
+ Args:
+ file_descriptor (FileDescriptor): The file descriptor to register.
+ """
+ if api_implementation.Type() == 'python':
+ # pylint: disable=protected-access
+ self.pool._InternalAddFileDescriptor(file_descriptor)
+
+ def GetSymbol(self, symbol):
+ """Tries to find a symbol in the local database.
+
+ Currently, this method only returns message.Message instances, however, if
+ may be extended in future to support other symbol types.
+
+ Args:
+ symbol (str): a protocol buffer symbol.
+
+ Returns:
+ A Python class corresponding to the symbol.
+
+ Raises:
+ KeyError: if the symbol could not be found.
+ """
+
+ return self._classes[self.pool.FindMessageTypeByName(symbol)]
+
+ def GetMessages(self, files):
+ # TODO(amauryfa): Fix the differences with MessageFactory.
+ """Gets all registered messages from a specified file.
+
+ Only messages already created and registered will be returned; (this is the
+ case for imported _pb2 modules)
+ But unlike MessageFactory, this version also returns already defined nested
+ messages, but does not register any message extensions.
+
+ Args:
+ files (list[str]): The file names to extract messages from.
+
+ Returns:
+ A dictionary mapping proto names to the message classes.
+
+ Raises:
+ KeyError: if a file could not be found.
+ """
+
+ def _GetAllMessages(desc):
+ """Walk a message Descriptor and recursively yields all message names."""
+ yield desc
+ for msg_desc in desc.nested_types:
+ for nested_desc in _GetAllMessages(msg_desc):
+ yield nested_desc
+
+ result = {}
+ for file_name in files:
+ file_desc = self.pool.FindFileByName(file_name)
+ for msg_desc in file_desc.message_types_by_name.values():
+ for desc in _GetAllMessages(msg_desc):
+ try:
+ result[desc.full_name] = self._classes[desc]
+ except KeyError:
+ # This descriptor has no registered class, skip it.
+ pass
+ return result
+
+
+_DEFAULT = SymbolDatabase(pool=descriptor_pool.Default())
+
+
+def Default():
+ """Returns the default SymbolDatabase."""
+ return _DEFAULT
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/text_encoding.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/text_encoding.py
new file mode 100644
index 0000000000000000000000000000000000000000..759cf11f62bc4f1dac7f73af45fc4d3c892580d2
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/text_encoding.py
@@ -0,0 +1,110 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Encoding related utilities."""
+import re
+
+_cescape_chr_to_symbol_map = {}
+_cescape_chr_to_symbol_map[9] = r'\t' # optional escape
+_cescape_chr_to_symbol_map[10] = r'\n' # optional escape
+_cescape_chr_to_symbol_map[13] = r'\r' # optional escape
+_cescape_chr_to_symbol_map[34] = r'\"' # necessary escape
+_cescape_chr_to_symbol_map[39] = r"\'" # optional escape
+_cescape_chr_to_symbol_map[92] = r'\\' # necessary escape
+
+# Lookup table for unicode
+_cescape_unicode_to_str = [chr(i) for i in range(0, 256)]
+for byte, string in _cescape_chr_to_symbol_map.items():
+ _cescape_unicode_to_str[byte] = string
+
+# Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32)
+_cescape_byte_to_str = ([r'\%03o' % i for i in range(0, 32)] +
+ [chr(i) for i in range(32, 127)] +
+ [r'\%03o' % i for i in range(127, 256)])
+for byte, string in _cescape_chr_to_symbol_map.items():
+ _cescape_byte_to_str[byte] = string
+del byte, string
+
+
+def CEscape(text, as_utf8):
+ # type: (...) -> str
+ """Escape a bytes string for use in an text protocol buffer.
+
+ Args:
+ text: A byte string to be escaped.
+ as_utf8: Specifies if result may contain non-ASCII characters.
+ In Python 3 this allows unescaped non-ASCII Unicode characters.
+ In Python 2 the return value will be valid UTF-8 rather than only ASCII.
+ Returns:
+ Escaped string (str).
+ """
+ # Python's text.encode() 'string_escape' or 'unicode_escape' codecs do not
+ # satisfy our needs; they encodes unprintable characters using two-digit hex
+ # escapes whereas our C++ unescaping function allows hex escapes to be any
+ # length. So, "\0011".encode('string_escape') ends up being "\\x011", which
+ # will be decoded in C++ as a single-character string with char code 0x11.
+ text_is_unicode = isinstance(text, str)
+ if as_utf8 and text_is_unicode:
+ # We're already unicode, no processing beyond control char escapes.
+ return text.translate(_cescape_chr_to_symbol_map)
+ ord_ = ord if text_is_unicode else lambda x: x # bytes iterate as ints.
+ if as_utf8:
+ return ''.join(_cescape_unicode_to_str[ord_(c)] for c in text)
+ return ''.join(_cescape_byte_to_str[ord_(c)] for c in text)
+
+
+_CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])')
+
+
+def CUnescape(text):
+ # type: (str) -> bytes
+ """Unescape a text string with C-style escape sequences to UTF-8 bytes.
+
+ Args:
+ text: The data to parse in a str.
+ Returns:
+ A byte string.
+ """
+
+ def ReplaceHex(m):
+ # Only replace the match if the number of leading back slashes is odd. i.e.
+ # the slash itself is not escaped.
+ if len(m.group(1)) & 1:
+ return m.group(1) + 'x0' + m.group(2)
+ return m.group(0)
+
+ # This is required because the 'string_escape' encoding doesn't
+ # allow single-digit hex escapes (like '\xf').
+ result = _CUNESCAPE_HEX.sub(ReplaceHex, text)
+
+ return (result.encode('utf-8') # Make it bytes to allow decode.
+ .decode('unicode_escape')
+ # Make it bytes again to return the proper type.
+ .encode('raw_unicode_escape'))
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/text_format.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/text_format.py
new file mode 100644
index 0000000000000000000000000000000000000000..412385c26f995e2fea4fa868c79a07c80f315f18
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/text_format.py
@@ -0,0 +1,1795 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Contains routines for printing protocol messages in text format.
+
+Simple usage example::
+
+ # Create a proto object and serialize it to a text proto string.
+ message = my_proto_pb2.MyMessage(foo='bar')
+ text_proto = text_format.MessageToString(message)
+
+ # Parse a text proto string.
+ message = text_format.Parse(text_proto, my_proto_pb2.MyMessage())
+"""
+
+__author__ = 'kenton@google.com (Kenton Varda)'
+
+# TODO(b/129989314) Import thread contention leads to test failures.
+import encodings.raw_unicode_escape # pylint: disable=unused-import
+import encodings.unicode_escape # pylint: disable=unused-import
+import io
+import math
+import re
+
+from google.protobuf.internal import decoder
+from google.protobuf.internal import type_checkers
+from google.protobuf import descriptor
+from google.protobuf import text_encoding
+
+# pylint: disable=g-import-not-at-top
+__all__ = ['MessageToString', 'Parse', 'PrintMessage', 'PrintField',
+ 'PrintFieldValue', 'Merge', 'MessageToBytes']
+
+_INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(),
+ type_checkers.Int32ValueChecker(),
+ type_checkers.Uint64ValueChecker(),
+ type_checkers.Int64ValueChecker())
+_FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?$', re.IGNORECASE)
+_FLOAT_NAN = re.compile('nanf?$', re.IGNORECASE)
+_QUOTES = frozenset(("'", '"'))
+_ANY_FULL_TYPE_NAME = 'google.protobuf.Any'
+
+
+class Error(Exception):
+ """Top-level module error for text_format."""
+
+
+class ParseError(Error):
+ """Thrown in case of text parsing or tokenizing error."""
+
+ def __init__(self, message=None, line=None, column=None):
+ if message is not None and line is not None:
+ loc = str(line)
+ if column is not None:
+ loc += ':{0}'.format(column)
+ message = '{0} : {1}'.format(loc, message)
+ if message is not None:
+ super(ParseError, self).__init__(message)
+ else:
+ super(ParseError, self).__init__()
+ self._line = line
+ self._column = column
+
+ def GetLine(self):
+ return self._line
+
+ def GetColumn(self):
+ return self._column
+
+
+class TextWriter(object):
+
+ def __init__(self, as_utf8):
+ self._writer = io.StringIO()
+
+ def write(self, val):
+ return self._writer.write(val)
+
+ def close(self):
+ return self._writer.close()
+
+ def getvalue(self):
+ return self._writer.getvalue()
+
+
+def MessageToString(
+ message,
+ as_utf8=False,
+ as_one_line=False,
+ use_short_repeated_primitives=False,
+ pointy_brackets=False,
+ use_index_order=False,
+ float_format=None,
+ double_format=None,
+ use_field_number=False,
+ descriptor_pool=None,
+ indent=0,
+ message_formatter=None,
+ print_unknown_fields=False,
+ force_colon=False):
+ # type: (...) -> str
+ """Convert protobuf message to text format.
+
+ Double values can be formatted compactly with 15 digits of
+ precision (which is the most that IEEE 754 "double" can guarantee)
+ using double_format='.15g'. To ensure that converting to text and back to a
+ proto will result in an identical value, double_format='.17g' should be used.
+
+ Args:
+ message: The protocol buffers message.
+ as_utf8: Return unescaped Unicode for non-ASCII characters.
+ In Python 3 actual Unicode characters may appear as is in strings.
+ In Python 2 the return value will be valid UTF-8 rather than only ASCII.
+ as_one_line: Don't introduce newlines between fields.
+ use_short_repeated_primitives: Use short repeated format for primitives.
+ pointy_brackets: If True, use angle brackets instead of curly braces for
+ nesting.
+ use_index_order: If True, fields of a proto message will be printed using
+ the order defined in source code instead of the field number, extensions
+ will be printed at the end of the message and their relative order is
+ determined by the extension number. By default, use the field number
+ order.
+ float_format (str): If set, use this to specify float field formatting
+ (per the "Format Specification Mini-Language"); otherwise, shortest float
+ that has same value in wire will be printed. Also affect double field
+ if double_format is not set but float_format is set.
+ double_format (str): If set, use this to specify double field formatting
+ (per the "Format Specification Mini-Language"); if it is not set but
+ float_format is set, use float_format. Otherwise, use ``str()``
+ use_field_number: If True, print field numbers instead of names.
+ descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types.
+ indent (int): The initial indent level, in terms of spaces, for pretty
+ print.
+ message_formatter (function(message, indent, as_one_line) -> unicode|None):
+ Custom formatter for selected sub-messages (usually based on message
+ type). Use to pretty print parts of the protobuf for easier diffing.
+ print_unknown_fields: If True, unknown fields will be printed.
+ force_colon: If set, a colon will be added after the field name even if the
+ field is a proto message.
+
+ Returns:
+ str: A string of the text formatted protocol buffer message.
+ """
+ out = TextWriter(as_utf8)
+ printer = _Printer(
+ out,
+ indent,
+ as_utf8,
+ as_one_line,
+ use_short_repeated_primitives,
+ pointy_brackets,
+ use_index_order,
+ float_format,
+ double_format,
+ use_field_number,
+ descriptor_pool,
+ message_formatter,
+ print_unknown_fields=print_unknown_fields,
+ force_colon=force_colon)
+ printer.PrintMessage(message)
+ result = out.getvalue()
+ out.close()
+ if as_one_line:
+ return result.rstrip()
+ return result
+
+
+def MessageToBytes(message, **kwargs):
+ # type: (...) -> bytes
+ """Convert protobuf message to encoded text format. See MessageToString."""
+ text = MessageToString(message, **kwargs)
+ if isinstance(text, bytes):
+ return text
+ codec = 'utf-8' if kwargs.get('as_utf8') else 'ascii'
+ return text.encode(codec)
+
+
+def _IsMapEntry(field):
+ return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
+ field.message_type.has_options and
+ field.message_type.GetOptions().map_entry)
+
+
+def PrintMessage(message,
+ out,
+ indent=0,
+ as_utf8=False,
+ as_one_line=False,
+ use_short_repeated_primitives=False,
+ pointy_brackets=False,
+ use_index_order=False,
+ float_format=None,
+ double_format=None,
+ use_field_number=False,
+ descriptor_pool=None,
+ message_formatter=None,
+ print_unknown_fields=False,
+ force_colon=False):
+ printer = _Printer(
+ out=out, indent=indent, as_utf8=as_utf8,
+ as_one_line=as_one_line,
+ use_short_repeated_primitives=use_short_repeated_primitives,
+ pointy_brackets=pointy_brackets,
+ use_index_order=use_index_order,
+ float_format=float_format,
+ double_format=double_format,
+ use_field_number=use_field_number,
+ descriptor_pool=descriptor_pool,
+ message_formatter=message_formatter,
+ print_unknown_fields=print_unknown_fields,
+ force_colon=force_colon)
+ printer.PrintMessage(message)
+
+
+def PrintField(field,
+ value,
+ out,
+ indent=0,
+ as_utf8=False,
+ as_one_line=False,
+ use_short_repeated_primitives=False,
+ pointy_brackets=False,
+ use_index_order=False,
+ float_format=None,
+ double_format=None,
+ message_formatter=None,
+ print_unknown_fields=False,
+ force_colon=False):
+ """Print a single field name/value pair."""
+ printer = _Printer(out, indent, as_utf8, as_one_line,
+ use_short_repeated_primitives, pointy_brackets,
+ use_index_order, float_format, double_format,
+ message_formatter=message_formatter,
+ print_unknown_fields=print_unknown_fields,
+ force_colon=force_colon)
+ printer.PrintField(field, value)
+
+
+def PrintFieldValue(field,
+ value,
+ out,
+ indent=0,
+ as_utf8=False,
+ as_one_line=False,
+ use_short_repeated_primitives=False,
+ pointy_brackets=False,
+ use_index_order=False,
+ float_format=None,
+ double_format=None,
+ message_formatter=None,
+ print_unknown_fields=False,
+ force_colon=False):
+ """Print a single field value (not including name)."""
+ printer = _Printer(out, indent, as_utf8, as_one_line,
+ use_short_repeated_primitives, pointy_brackets,
+ use_index_order, float_format, double_format,
+ message_formatter=message_formatter,
+ print_unknown_fields=print_unknown_fields,
+ force_colon=force_colon)
+ printer.PrintFieldValue(field, value)
+
+
+def _BuildMessageFromTypeName(type_name, descriptor_pool):
+ """Returns a protobuf message instance.
+
+ Args:
+ type_name: Fully-qualified protobuf message type name string.
+ descriptor_pool: DescriptorPool instance.
+
+ Returns:
+ A Message instance of type matching type_name, or None if the a Descriptor
+ wasn't found matching type_name.
+ """
+ # pylint: disable=g-import-not-at-top
+ if descriptor_pool is None:
+ from google.protobuf import descriptor_pool as pool_mod
+ descriptor_pool = pool_mod.Default()
+ from google.protobuf import symbol_database
+ database = symbol_database.Default()
+ try:
+ message_descriptor = descriptor_pool.FindMessageTypeByName(type_name)
+ except KeyError:
+ return None
+ message_type = database.GetPrototype(message_descriptor)
+ return message_type()
+
+
+# These values must match WireType enum in google/protobuf/wire_format.h.
+WIRETYPE_LENGTH_DELIMITED = 2
+WIRETYPE_START_GROUP = 3
+
+
+class _Printer(object):
+ """Text format printer for protocol message."""
+
+ def __init__(
+ self,
+ out,
+ indent=0,
+ as_utf8=False,
+ as_one_line=False,
+ use_short_repeated_primitives=False,
+ pointy_brackets=False,
+ use_index_order=False,
+ float_format=None,
+ double_format=None,
+ use_field_number=False,
+ descriptor_pool=None,
+ message_formatter=None,
+ print_unknown_fields=False,
+ force_colon=False):
+ """Initialize the Printer.
+
+ Double values can be formatted compactly with 15 digits of precision
+ (which is the most that IEEE 754 "double" can guarantee) using
+ double_format='.15g'. To ensure that converting to text and back to a proto
+ will result in an identical value, double_format='.17g' should be used.
+
+ Args:
+ out: To record the text format result.
+ indent: The initial indent level for pretty print.
+ as_utf8: Return unescaped Unicode for non-ASCII characters.
+ In Python 3 actual Unicode characters may appear as is in strings.
+ In Python 2 the return value will be valid UTF-8 rather than ASCII.
+ as_one_line: Don't introduce newlines between fields.
+ use_short_repeated_primitives: Use short repeated format for primitives.
+ pointy_brackets: If True, use angle brackets instead of curly braces for
+ nesting.
+ use_index_order: If True, print fields of a proto message using the order
+ defined in source code instead of the field number. By default, use the
+ field number order.
+ float_format: If set, use this to specify float field formatting
+ (per the "Format Specification Mini-Language"); otherwise, shortest
+ float that has same value in wire will be printed. Also affect double
+ field if double_format is not set but float_format is set.
+ double_format: If set, use this to specify double field formatting
+ (per the "Format Specification Mini-Language"); if it is not set but
+ float_format is set, use float_format. Otherwise, str() is used.
+ use_field_number: If True, print field numbers instead of names.
+ descriptor_pool: A DescriptorPool used to resolve Any types.
+ message_formatter: A function(message, indent, as_one_line): unicode|None
+ to custom format selected sub-messages (usually based on message type).
+ Use to pretty print parts of the protobuf for easier diffing.
+ print_unknown_fields: If True, unknown fields will be printed.
+ force_colon: If set, a colon will be added after the field name even if
+ the field is a proto message.
+ """
+ self.out = out
+ self.indent = indent
+ self.as_utf8 = as_utf8
+ self.as_one_line = as_one_line
+ self.use_short_repeated_primitives = use_short_repeated_primitives
+ self.pointy_brackets = pointy_brackets
+ self.use_index_order = use_index_order
+ self.float_format = float_format
+ if double_format is not None:
+ self.double_format = double_format
+ else:
+ self.double_format = float_format
+ self.use_field_number = use_field_number
+ self.descriptor_pool = descriptor_pool
+ self.message_formatter = message_formatter
+ self.print_unknown_fields = print_unknown_fields
+ self.force_colon = force_colon
+
+ def _TryPrintAsAnyMessage(self, message):
+ """Serializes if message is a google.protobuf.Any field."""
+ if '/' not in message.type_url:
+ return False
+ packed_message = _BuildMessageFromTypeName(message.TypeName(),
+ self.descriptor_pool)
+ if packed_message:
+ packed_message.MergeFromString(message.value)
+ colon = ':' if self.force_colon else ''
+ self.out.write('%s[%s]%s ' % (self.indent * ' ', message.type_url, colon))
+ self._PrintMessageFieldValue(packed_message)
+ self.out.write(' ' if self.as_one_line else '\n')
+ return True
+ else:
+ return False
+
+ def _TryCustomFormatMessage(self, message):
+ formatted = self.message_formatter(message, self.indent, self.as_one_line)
+ if formatted is None:
+ return False
+
+ out = self.out
+ out.write(' ' * self.indent)
+ out.write(formatted)
+ out.write(' ' if self.as_one_line else '\n')
+ return True
+
+ def PrintMessage(self, message):
+ """Convert protobuf message to text format.
+
+ Args:
+ message: The protocol buffers message.
+ """
+ if self.message_formatter and self._TryCustomFormatMessage(message):
+ return
+ if (message.DESCRIPTOR.full_name == _ANY_FULL_TYPE_NAME and
+ self._TryPrintAsAnyMessage(message)):
+ return
+ fields = message.ListFields()
+ if self.use_index_order:
+ fields.sort(
+ key=lambda x: x[0].number if x[0].is_extension else x[0].index)
+ for field, value in fields:
+ if _IsMapEntry(field):
+ for key in sorted(value):
+ # This is slow for maps with submessage entries because it copies the
+ # entire tree. Unfortunately this would take significant refactoring
+ # of this file to work around.
+ #
+ # TODO(haberman): refactor and optimize if this becomes an issue.
+ entry_submsg = value.GetEntryClass()(key=key, value=value[key])
+ self.PrintField(field, entry_submsg)
+ elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
+ if (self.use_short_repeated_primitives
+ and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE
+ and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_STRING):
+ self._PrintShortRepeatedPrimitivesValue(field, value)
+ else:
+ for element in value:
+ self.PrintField(field, element)
+ else:
+ self.PrintField(field, value)
+
+ if self.print_unknown_fields:
+ self._PrintUnknownFields(message.UnknownFields())
+
+ def _PrintUnknownFields(self, unknown_fields):
+ """Print unknown fields."""
+ out = self.out
+ for field in unknown_fields:
+ out.write(' ' * self.indent)
+ out.write(str(field.field_number))
+ if field.wire_type == WIRETYPE_START_GROUP:
+ if self.as_one_line:
+ out.write(' { ')
+ else:
+ out.write(' {\n')
+ self.indent += 2
+
+ self._PrintUnknownFields(field.data)
+
+ if self.as_one_line:
+ out.write('} ')
+ else:
+ self.indent -= 2
+ out.write(' ' * self.indent + '}\n')
+ elif field.wire_type == WIRETYPE_LENGTH_DELIMITED:
+ try:
+ # If this field is parseable as a Message, it is probably
+ # an embedded message.
+ # pylint: disable=protected-access
+ (embedded_unknown_message, pos) = decoder._DecodeUnknownFieldSet(
+ memoryview(field.data), 0, len(field.data))
+ except Exception: # pylint: disable=broad-except
+ pos = 0
+
+ if pos == len(field.data):
+ if self.as_one_line:
+ out.write(' { ')
+ else:
+ out.write(' {\n')
+ self.indent += 2
+
+ self._PrintUnknownFields(embedded_unknown_message)
+
+ if self.as_one_line:
+ out.write('} ')
+ else:
+ self.indent -= 2
+ out.write(' ' * self.indent + '}\n')
+ else:
+ # A string or bytes field. self.as_utf8 may not work.
+ out.write(': \"')
+ out.write(text_encoding.CEscape(field.data, False))
+ out.write('\" ' if self.as_one_line else '\"\n')
+ else:
+ # varint, fixed32, fixed64
+ out.write(': ')
+ out.write(str(field.data))
+ out.write(' ' if self.as_one_line else '\n')
+
+ def _PrintFieldName(self, field):
+ """Print field name."""
+ out = self.out
+ out.write(' ' * self.indent)
+ if self.use_field_number:
+ out.write(str(field.number))
+ else:
+ if field.is_extension:
+ out.write('[')
+ if (field.containing_type.GetOptions().message_set_wire_format and
+ field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
+ field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL):
+ out.write(field.message_type.full_name)
+ else:
+ out.write(field.full_name)
+ out.write(']')
+ elif field.type == descriptor.FieldDescriptor.TYPE_GROUP:
+ # For groups, use the capitalized name.
+ out.write(field.message_type.name)
+ else:
+ out.write(field.name)
+
+ if (self.force_colon or
+ field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE):
+ # The colon is optional in this case, but our cross-language golden files
+ # don't include it. Here, the colon is only included if force_colon is
+ # set to True
+ out.write(':')
+
+ def PrintField(self, field, value):
+ """Print a single field name/value pair."""
+ self._PrintFieldName(field)
+ self.out.write(' ')
+ self.PrintFieldValue(field, value)
+ self.out.write(' ' if self.as_one_line else '\n')
+
+ def _PrintShortRepeatedPrimitivesValue(self, field, value):
+ """"Prints short repeated primitives value."""
+ # Note: this is called only when value has at least one element.
+ self._PrintFieldName(field)
+ self.out.write(' [')
+ for i in range(len(value) - 1):
+ self.PrintFieldValue(field, value[i])
+ self.out.write(', ')
+ self.PrintFieldValue(field, value[-1])
+ self.out.write(']')
+ self.out.write(' ' if self.as_one_line else '\n')
+
+ def _PrintMessageFieldValue(self, value):
+ if self.pointy_brackets:
+ openb = '<'
+ closeb = '>'
+ else:
+ openb = '{'
+ closeb = '}'
+
+ if self.as_one_line:
+ self.out.write('%s ' % openb)
+ self.PrintMessage(value)
+ self.out.write(closeb)
+ else:
+ self.out.write('%s\n' % openb)
+ self.indent += 2
+ self.PrintMessage(value)
+ self.indent -= 2
+ self.out.write(' ' * self.indent + closeb)
+
+ def PrintFieldValue(self, field, value):
+ """Print a single field value (not including name).
+
+ For repeated fields, the value should be a single element.
+
+ Args:
+ field: The descriptor of the field to be printed.
+ value: The value of the field.
+ """
+ out = self.out
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
+ self._PrintMessageFieldValue(value)
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
+ enum_value = field.enum_type.values_by_number.get(value, None)
+ if enum_value is not None:
+ out.write(enum_value.name)
+ else:
+ out.write(str(value))
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
+ out.write('\"')
+ if isinstance(value, str) and not self.as_utf8:
+ out_value = value.encode('utf-8')
+ else:
+ out_value = value
+ if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
+ # We always need to escape all binary data in TYPE_BYTES fields.
+ out_as_utf8 = False
+ else:
+ out_as_utf8 = self.as_utf8
+ out.write(text_encoding.CEscape(out_value, out_as_utf8))
+ out.write('\"')
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
+ if value:
+ out.write('true')
+ else:
+ out.write('false')
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT:
+ if self.float_format is not None:
+ out.write('{1:{0}}'.format(self.float_format, value))
+ else:
+ if math.isnan(value):
+ out.write(str(value))
+ else:
+ out.write(str(type_checkers.ToShortestFloat(value)))
+ elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_DOUBLE and
+ self.double_format is not None):
+ out.write('{1:{0}}'.format(self.double_format, value))
+ else:
+ out.write(str(value))
+
+
+def Parse(text,
+ message,
+ allow_unknown_extension=False,
+ allow_field_number=False,
+ descriptor_pool=None,
+ allow_unknown_field=False):
+ """Parses a text representation of a protocol message into a message.
+
+ NOTE: for historical reasons this function does not clear the input
+ message. This is different from what the binary msg.ParseFrom(...) does.
+ If text contains a field already set in message, the value is appended if the
+ field is repeated. Otherwise, an error is raised.
+
+ Example::
+
+ a = MyProto()
+ a.repeated_field.append('test')
+ b = MyProto()
+
+ # Repeated fields are combined
+ text_format.Parse(repr(a), b)
+ text_format.Parse(repr(a), b) # repeated_field contains ["test", "test"]
+
+ # Non-repeated fields cannot be overwritten
+ a.singular_field = 1
+ b.singular_field = 2
+ text_format.Parse(repr(a), b) # ParseError
+
+ # Binary version:
+ b.ParseFromString(a.SerializeToString()) # repeated_field is now "test"
+
+ Caller is responsible for clearing the message as needed.
+
+ Args:
+ text (str): Message text representation.
+ message (Message): A protocol buffer message to merge into.
+ allow_unknown_extension: if True, skip over missing extensions and keep
+ parsing
+ allow_field_number: if True, both field number and field name are allowed.
+ descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types.
+ allow_unknown_field: if True, skip over unknown field and keep
+ parsing. Avoid to use this option if possible. It may hide some
+ errors (e.g. spelling error on field name)
+
+ Returns:
+ Message: The same message passed as argument.
+
+ Raises:
+ ParseError: On text parsing problems.
+ """
+ return ParseLines(text.split(b'\n' if isinstance(text, bytes) else u'\n'),
+ message,
+ allow_unknown_extension,
+ allow_field_number,
+ descriptor_pool=descriptor_pool,
+ allow_unknown_field=allow_unknown_field)
+
+
+def Merge(text,
+ message,
+ allow_unknown_extension=False,
+ allow_field_number=False,
+ descriptor_pool=None,
+ allow_unknown_field=False):
+ """Parses a text representation of a protocol message into a message.
+
+ Like Parse(), but allows repeated values for a non-repeated field, and uses
+ the last one. This means any non-repeated, top-level fields specified in text
+ replace those in the message.
+
+ Args:
+ text (str): Message text representation.
+ message (Message): A protocol buffer message to merge into.
+ allow_unknown_extension: if True, skip over missing extensions and keep
+ parsing
+ allow_field_number: if True, both field number and field name are allowed.
+ descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types.
+ allow_unknown_field: if True, skip over unknown field and keep
+ parsing. Avoid to use this option if possible. It may hide some
+ errors (e.g. spelling error on field name)
+
+ Returns:
+ Message: The same message passed as argument.
+
+ Raises:
+ ParseError: On text parsing problems.
+ """
+ return MergeLines(
+ text.split(b'\n' if isinstance(text, bytes) else u'\n'),
+ message,
+ allow_unknown_extension,
+ allow_field_number,
+ descriptor_pool=descriptor_pool,
+ allow_unknown_field=allow_unknown_field)
+
+
+def ParseLines(lines,
+ message,
+ allow_unknown_extension=False,
+ allow_field_number=False,
+ descriptor_pool=None,
+ allow_unknown_field=False):
+ """Parses a text representation of a protocol message into a message.
+
+ See Parse() for caveats.
+
+ Args:
+ lines: An iterable of lines of a message's text representation.
+ message: A protocol buffer message to merge into.
+ allow_unknown_extension: if True, skip over missing extensions and keep
+ parsing
+ allow_field_number: if True, both field number and field name are allowed.
+ descriptor_pool: A DescriptorPool used to resolve Any types.
+ allow_unknown_field: if True, skip over unknown field and keep
+ parsing. Avoid to use this option if possible. It may hide some
+ errors (e.g. spelling error on field name)
+
+ Returns:
+ The same message passed as argument.
+
+ Raises:
+ ParseError: On text parsing problems.
+ """
+ parser = _Parser(allow_unknown_extension,
+ allow_field_number,
+ descriptor_pool=descriptor_pool,
+ allow_unknown_field=allow_unknown_field)
+ return parser.ParseLines(lines, message)
+
+
+def MergeLines(lines,
+ message,
+ allow_unknown_extension=False,
+ allow_field_number=False,
+ descriptor_pool=None,
+ allow_unknown_field=False):
+ """Parses a text representation of a protocol message into a message.
+
+ See Merge() for more details.
+
+ Args:
+ lines: An iterable of lines of a message's text representation.
+ message: A protocol buffer message to merge into.
+ allow_unknown_extension: if True, skip over missing extensions and keep
+ parsing
+ allow_field_number: if True, both field number and field name are allowed.
+ descriptor_pool: A DescriptorPool used to resolve Any types.
+ allow_unknown_field: if True, skip over unknown field and keep
+ parsing. Avoid to use this option if possible. It may hide some
+ errors (e.g. spelling error on field name)
+
+ Returns:
+ The same message passed as argument.
+
+ Raises:
+ ParseError: On text parsing problems.
+ """
+ parser = _Parser(allow_unknown_extension,
+ allow_field_number,
+ descriptor_pool=descriptor_pool,
+ allow_unknown_field=allow_unknown_field)
+ return parser.MergeLines(lines, message)
+
+
+class _Parser(object):
+ """Text format parser for protocol message."""
+
+ def __init__(self,
+ allow_unknown_extension=False,
+ allow_field_number=False,
+ descriptor_pool=None,
+ allow_unknown_field=False):
+ self.allow_unknown_extension = allow_unknown_extension
+ self.allow_field_number = allow_field_number
+ self.descriptor_pool = descriptor_pool
+ self.allow_unknown_field = allow_unknown_field
+
+ def ParseLines(self, lines, message):
+ """Parses a text representation of a protocol message into a message."""
+ self._allow_multiple_scalars = False
+ self._ParseOrMerge(lines, message)
+ return message
+
+ def MergeLines(self, lines, message):
+ """Merges a text representation of a protocol message into a message."""
+ self._allow_multiple_scalars = True
+ self._ParseOrMerge(lines, message)
+ return message
+
+ def _ParseOrMerge(self, lines, message):
+ """Converts a text representation of a protocol message into a message.
+
+ Args:
+ lines: Lines of a message's text representation.
+ message: A protocol buffer message to merge into.
+
+ Raises:
+ ParseError: On text parsing problems.
+ """
+ # Tokenize expects native str lines.
+ str_lines = (
+ line if isinstance(line, str) else line.decode('utf-8')
+ for line in lines)
+ tokenizer = Tokenizer(str_lines)
+ while not tokenizer.AtEnd():
+ self._MergeField(tokenizer, message)
+
+ def _MergeField(self, tokenizer, message):
+ """Merges a single protocol message field into a message.
+
+ Args:
+ tokenizer: A tokenizer to parse the field name and values.
+ message: A protocol message to record the data.
+
+ Raises:
+ ParseError: In case of text parsing problems.
+ """
+ message_descriptor = message.DESCRIPTOR
+ if (message_descriptor.full_name == _ANY_FULL_TYPE_NAME and
+ tokenizer.TryConsume('[')):
+ type_url_prefix, packed_type_name = self._ConsumeAnyTypeUrl(tokenizer)
+ tokenizer.Consume(']')
+ tokenizer.TryConsume(':')
+ if tokenizer.TryConsume('<'):
+ expanded_any_end_token = '>'
+ else:
+ tokenizer.Consume('{')
+ expanded_any_end_token = '}'
+ expanded_any_sub_message = _BuildMessageFromTypeName(packed_type_name,
+ self.descriptor_pool)
+ if not expanded_any_sub_message:
+ raise ParseError('Type %s not found in descriptor pool' %
+ packed_type_name)
+ while not tokenizer.TryConsume(expanded_any_end_token):
+ if tokenizer.AtEnd():
+ raise tokenizer.ParseErrorPreviousToken('Expected "%s".' %
+ (expanded_any_end_token,))
+ self._MergeField(tokenizer, expanded_any_sub_message)
+ deterministic = False
+
+ message.Pack(expanded_any_sub_message,
+ type_url_prefix=type_url_prefix,
+ deterministic=deterministic)
+ return
+
+ if tokenizer.TryConsume('['):
+ name = [tokenizer.ConsumeIdentifier()]
+ while tokenizer.TryConsume('.'):
+ name.append(tokenizer.ConsumeIdentifier())
+ name = '.'.join(name)
+
+ if not message_descriptor.is_extendable:
+ raise tokenizer.ParseErrorPreviousToken(
+ 'Message type "%s" does not have extensions.' %
+ message_descriptor.full_name)
+ # pylint: disable=protected-access
+ field = message.Extensions._FindExtensionByName(name)
+ # pylint: enable=protected-access
+
+
+ if not field:
+ if self.allow_unknown_extension:
+ field = None
+ else:
+ raise tokenizer.ParseErrorPreviousToken(
+ 'Extension "%s" not registered. '
+ 'Did you import the _pb2 module which defines it? '
+ 'If you are trying to place the extension in the MessageSet '
+ 'field of another message that is in an Any or MessageSet field, '
+ 'that message\'s _pb2 module must be imported as well' % name)
+ elif message_descriptor != field.containing_type:
+ raise tokenizer.ParseErrorPreviousToken(
+ 'Extension "%s" does not extend message type "%s".' %
+ (name, message_descriptor.full_name))
+
+ tokenizer.Consume(']')
+
+ else:
+ name = tokenizer.ConsumeIdentifierOrNumber()
+ if self.allow_field_number and name.isdigit():
+ number = ParseInteger(name, True, True)
+ field = message_descriptor.fields_by_number.get(number, None)
+ if not field and message_descriptor.is_extendable:
+ field = message.Extensions._FindExtensionByNumber(number)
+ else:
+ field = message_descriptor.fields_by_name.get(name, None)
+
+ # Group names are expected to be capitalized as they appear in the
+ # .proto file, which actually matches their type names, not their field
+ # names.
+ if not field:
+ field = message_descriptor.fields_by_name.get(name.lower(), None)
+ if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP:
+ field = None
+
+ if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and
+ field.message_type.name != name):
+ field = None
+
+ if not field and not self.allow_unknown_field:
+ raise tokenizer.ParseErrorPreviousToken(
+ 'Message type "%s" has no field named "%s".' %
+ (message_descriptor.full_name, name))
+
+ if field:
+ if not self._allow_multiple_scalars and field.containing_oneof:
+ # Check if there's a different field set in this oneof.
+ # Note that we ignore the case if the same field was set before, and we
+ # apply _allow_multiple_scalars to non-scalar fields as well.
+ which_oneof = message.WhichOneof(field.containing_oneof.name)
+ if which_oneof is not None and which_oneof != field.name:
+ raise tokenizer.ParseErrorPreviousToken(
+ 'Field "%s" is specified along with field "%s", another member '
+ 'of oneof "%s" for message type "%s".' %
+ (field.name, which_oneof, field.containing_oneof.name,
+ message_descriptor.full_name))
+
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
+ tokenizer.TryConsume(':')
+ merger = self._MergeMessageField
+ else:
+ tokenizer.Consume(':')
+ merger = self._MergeScalarField
+
+ if (field.label == descriptor.FieldDescriptor.LABEL_REPEATED and
+ tokenizer.TryConsume('[')):
+ # Short repeated format, e.g. "foo: [1, 2, 3]"
+ if not tokenizer.TryConsume(']'):
+ while True:
+ merger(tokenizer, message, field)
+ if tokenizer.TryConsume(']'):
+ break
+ tokenizer.Consume(',')
+
+ else:
+ merger(tokenizer, message, field)
+
+ else: # Proto field is unknown.
+ assert (self.allow_unknown_extension or self.allow_unknown_field)
+ _SkipFieldContents(tokenizer)
+
+ # For historical reasons, fields may optionally be separated by commas or
+ # semicolons.
+ if not tokenizer.TryConsume(','):
+ tokenizer.TryConsume(';')
+
+
+ def _ConsumeAnyTypeUrl(self, tokenizer):
+ """Consumes a google.protobuf.Any type URL and returns the type name."""
+ # Consume "type.googleapis.com/".
+ prefix = [tokenizer.ConsumeIdentifier()]
+ tokenizer.Consume('.')
+ prefix.append(tokenizer.ConsumeIdentifier())
+ tokenizer.Consume('.')
+ prefix.append(tokenizer.ConsumeIdentifier())
+ tokenizer.Consume('/')
+ # Consume the fully-qualified type name.
+ name = [tokenizer.ConsumeIdentifier()]
+ while tokenizer.TryConsume('.'):
+ name.append(tokenizer.ConsumeIdentifier())
+ return '.'.join(prefix), '.'.join(name)
+
+ def _MergeMessageField(self, tokenizer, message, field):
+ """Merges a single scalar field into a message.
+
+ Args:
+ tokenizer: A tokenizer to parse the field value.
+ message: The message of which field is a member.
+ field: The descriptor of the field to be merged.
+
+ Raises:
+ ParseError: In case of text parsing problems.
+ """
+ is_map_entry = _IsMapEntry(field)
+
+ if tokenizer.TryConsume('<'):
+ end_token = '>'
+ else:
+ tokenizer.Consume('{')
+ end_token = '}'
+
+ if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
+ if field.is_extension:
+ sub_message = message.Extensions[field].add()
+ elif is_map_entry:
+ sub_message = getattr(message, field.name).GetEntryClass()()
+ else:
+ sub_message = getattr(message, field.name).add()
+ else:
+ if field.is_extension:
+ if (not self._allow_multiple_scalars and
+ message.HasExtension(field)):
+ raise tokenizer.ParseErrorPreviousToken(
+ 'Message type "%s" should not have multiple "%s" extensions.' %
+ (message.DESCRIPTOR.full_name, field.full_name))
+ sub_message = message.Extensions[field]
+ else:
+ # Also apply _allow_multiple_scalars to message field.
+ # TODO(jieluo): Change to _allow_singular_overwrites.
+ if (not self._allow_multiple_scalars and
+ message.HasField(field.name)):
+ raise tokenizer.ParseErrorPreviousToken(
+ 'Message type "%s" should not have multiple "%s" fields.' %
+ (message.DESCRIPTOR.full_name, field.name))
+ sub_message = getattr(message, field.name)
+ sub_message.SetInParent()
+
+ while not tokenizer.TryConsume(end_token):
+ if tokenizer.AtEnd():
+ raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token,))
+ self._MergeField(tokenizer, sub_message)
+
+ if is_map_entry:
+ value_cpptype = field.message_type.fields_by_name['value'].cpp_type
+ if value_cpptype == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
+ value = getattr(message, field.name)[sub_message.key]
+ value.CopyFrom(sub_message.value)
+ else:
+ getattr(message, field.name)[sub_message.key] = sub_message.value
+
+ @staticmethod
+ def _IsProto3Syntax(message):
+ message_descriptor = message.DESCRIPTOR
+ return (hasattr(message_descriptor, 'syntax') and
+ message_descriptor.syntax == 'proto3')
+
+ def _MergeScalarField(self, tokenizer, message, field):
+ """Merges a single scalar field into a message.
+
+ Args:
+ tokenizer: A tokenizer to parse the field value.
+ message: A protocol message to record the data.
+ field: The descriptor of the field to be merged.
+
+ Raises:
+ ParseError: In case of text parsing problems.
+ RuntimeError: On runtime errors.
+ """
+ _ = self.allow_unknown_extension
+ value = None
+
+ if field.type in (descriptor.FieldDescriptor.TYPE_INT32,
+ descriptor.FieldDescriptor.TYPE_SINT32,
+ descriptor.FieldDescriptor.TYPE_SFIXED32):
+ value = _ConsumeInt32(tokenizer)
+ elif field.type in (descriptor.FieldDescriptor.TYPE_INT64,
+ descriptor.FieldDescriptor.TYPE_SINT64,
+ descriptor.FieldDescriptor.TYPE_SFIXED64):
+ value = _ConsumeInt64(tokenizer)
+ elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32,
+ descriptor.FieldDescriptor.TYPE_FIXED32):
+ value = _ConsumeUint32(tokenizer)
+ elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64,
+ descriptor.FieldDescriptor.TYPE_FIXED64):
+ value = _ConsumeUint64(tokenizer)
+ elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT,
+ descriptor.FieldDescriptor.TYPE_DOUBLE):
+ value = tokenizer.ConsumeFloat()
+ elif field.type == descriptor.FieldDescriptor.TYPE_BOOL:
+ value = tokenizer.ConsumeBool()
+ elif field.type == descriptor.FieldDescriptor.TYPE_STRING:
+ value = tokenizer.ConsumeString()
+ elif field.type == descriptor.FieldDescriptor.TYPE_BYTES:
+ value = tokenizer.ConsumeByteString()
+ elif field.type == descriptor.FieldDescriptor.TYPE_ENUM:
+ value = tokenizer.ConsumeEnum(field)
+ else:
+ raise RuntimeError('Unknown field type %d' % field.type)
+
+ if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
+ if field.is_extension:
+ message.Extensions[field].append(value)
+ else:
+ getattr(message, field.name).append(value)
+ else:
+ if field.is_extension:
+ if (not self._allow_multiple_scalars and
+ not self._IsProto3Syntax(message) and
+ message.HasExtension(field)):
+ raise tokenizer.ParseErrorPreviousToken(
+ 'Message type "%s" should not have multiple "%s" extensions.' %
+ (message.DESCRIPTOR.full_name, field.full_name))
+ else:
+ message.Extensions[field] = value
+ else:
+ duplicate_error = False
+ if not self._allow_multiple_scalars:
+ if self._IsProto3Syntax(message):
+ # Proto3 doesn't represent presence so we try best effort to check
+ # multiple scalars by compare to default values.
+ duplicate_error = bool(getattr(message, field.name))
+ else:
+ duplicate_error = message.HasField(field.name)
+
+ if duplicate_error:
+ raise tokenizer.ParseErrorPreviousToken(
+ 'Message type "%s" should not have multiple "%s" fields.' %
+ (message.DESCRIPTOR.full_name, field.name))
+ else:
+ setattr(message, field.name, value)
+
+
+def _SkipFieldContents(tokenizer):
+ """Skips over contents (value or message) of a field.
+
+ Args:
+ tokenizer: A tokenizer to parse the field name and values.
+ """
+ # Try to guess the type of this field.
+ # If this field is not a message, there should be a ":" between the
+ # field name and the field value and also the field value should not
+ # start with "{" or "<" which indicates the beginning of a message body.
+ # If there is no ":" or there is a "{" or "<" after ":", this field has
+ # to be a message or the input is ill-formed.
+ if tokenizer.TryConsume(':') and not tokenizer.LookingAt(
+ '{') and not tokenizer.LookingAt('<'):
+ _SkipFieldValue(tokenizer)
+ else:
+ _SkipFieldMessage(tokenizer)
+
+
+def _SkipField(tokenizer):
+ """Skips over a complete field (name and value/message).
+
+ Args:
+ tokenizer: A tokenizer to parse the field name and values.
+ """
+ if tokenizer.TryConsume('['):
+ # Consume extension name.
+ tokenizer.ConsumeIdentifier()
+ while tokenizer.TryConsume('.'):
+ tokenizer.ConsumeIdentifier()
+ tokenizer.Consume(']')
+ else:
+ tokenizer.ConsumeIdentifierOrNumber()
+
+ _SkipFieldContents(tokenizer)
+
+ # For historical reasons, fields may optionally be separated by commas or
+ # semicolons.
+ if not tokenizer.TryConsume(','):
+ tokenizer.TryConsume(';')
+
+
+def _SkipFieldMessage(tokenizer):
+ """Skips over a field message.
+
+ Args:
+ tokenizer: A tokenizer to parse the field name and values.
+ """
+
+ if tokenizer.TryConsume('<'):
+ delimiter = '>'
+ else:
+ tokenizer.Consume('{')
+ delimiter = '}'
+
+ while not tokenizer.LookingAt('>') and not tokenizer.LookingAt('}'):
+ _SkipField(tokenizer)
+
+ tokenizer.Consume(delimiter)
+
+
+def _SkipFieldValue(tokenizer):
+ """Skips over a field value.
+
+ Args:
+ tokenizer: A tokenizer to parse the field name and values.
+
+ Raises:
+ ParseError: In case an invalid field value is found.
+ """
+ # String/bytes tokens can come in multiple adjacent string literals.
+ # If we can consume one, consume as many as we can.
+ if tokenizer.TryConsumeByteString():
+ while tokenizer.TryConsumeByteString():
+ pass
+ return
+
+ if (not tokenizer.TryConsumeIdentifier() and
+ not _TryConsumeInt64(tokenizer) and not _TryConsumeUint64(tokenizer) and
+ not tokenizer.TryConsumeFloat()):
+ raise ParseError('Invalid field value: ' + tokenizer.token)
+
+
+class Tokenizer(object):
+ """Protocol buffer text representation tokenizer.
+
+ This class handles the lower level string parsing by splitting it into
+ meaningful tokens.
+
+ It was directly ported from the Java protocol buffer API.
+ """
+
+ _WHITESPACE = re.compile(r'\s+')
+ _COMMENT = re.compile(r'(\s*#.*$)', re.MULTILINE)
+ _WHITESPACE_OR_COMMENT = re.compile(r'(\s|(#.*$))+', re.MULTILINE)
+ _TOKEN = re.compile('|'.join([
+ r'[a-zA-Z_][0-9a-zA-Z_+-]*', # an identifier
+ r'([0-9+-]|(\.[0-9]))[0-9a-zA-Z_.+-]*', # a number
+ ] + [ # quoted str for each quote mark
+ # Avoid backtracking! https://stackoverflow.com/a/844267
+ r'{qt}[^{qt}\n\\]*((\\.)+[^{qt}\n\\]*)*({qt}|\\?$)'.format(qt=mark)
+ for mark in _QUOTES
+ ]))
+
+ _IDENTIFIER = re.compile(r'[^\d\W]\w*')
+ _IDENTIFIER_OR_NUMBER = re.compile(r'\w+')
+
+ def __init__(self, lines, skip_comments=True):
+ self._position = 0
+ self._line = -1
+ self._column = 0
+ self._token_start = None
+ self.token = ''
+ self._lines = iter(lines)
+ self._current_line = ''
+ self._previous_line = 0
+ self._previous_column = 0
+ self._more_lines = True
+ self._skip_comments = skip_comments
+ self._whitespace_pattern = (skip_comments and self._WHITESPACE_OR_COMMENT
+ or self._WHITESPACE)
+ self._SkipWhitespace()
+ self.NextToken()
+
+ def LookingAt(self, token):
+ return self.token == token
+
+ def AtEnd(self):
+ """Checks the end of the text was reached.
+
+ Returns:
+ True iff the end was reached.
+ """
+ return not self.token
+
+ def _PopLine(self):
+ while len(self._current_line) <= self._column:
+ try:
+ self._current_line = next(self._lines)
+ except StopIteration:
+ self._current_line = ''
+ self._more_lines = False
+ return
+ else:
+ self._line += 1
+ self._column = 0
+
+ def _SkipWhitespace(self):
+ while True:
+ self._PopLine()
+ match = self._whitespace_pattern.match(self._current_line, self._column)
+ if not match:
+ break
+ length = len(match.group(0))
+ self._column += length
+
+ def TryConsume(self, token):
+ """Tries to consume a given piece of text.
+
+ Args:
+ token: Text to consume.
+
+ Returns:
+ True iff the text was consumed.
+ """
+ if self.token == token:
+ self.NextToken()
+ return True
+ return False
+
+ def Consume(self, token):
+ """Consumes a piece of text.
+
+ Args:
+ token: Text to consume.
+
+ Raises:
+ ParseError: If the text couldn't be consumed.
+ """
+ if not self.TryConsume(token):
+ raise self.ParseError('Expected "%s".' % token)
+
+ def ConsumeComment(self):
+ result = self.token
+ if not self._COMMENT.match(result):
+ raise self.ParseError('Expected comment.')
+ self.NextToken()
+ return result
+
+ def ConsumeCommentOrTrailingComment(self):
+ """Consumes a comment, returns a 2-tuple (trailing bool, comment str)."""
+
+ # Tokenizer initializes _previous_line and _previous_column to 0. As the
+ # tokenizer starts, it looks like there is a previous token on the line.
+ just_started = self._line == 0 and self._column == 0
+
+ before_parsing = self._previous_line
+ comment = self.ConsumeComment()
+
+ # A trailing comment is a comment on the same line than the previous token.
+ trailing = (self._previous_line == before_parsing
+ and not just_started)
+
+ return trailing, comment
+
+ def TryConsumeIdentifier(self):
+ try:
+ self.ConsumeIdentifier()
+ return True
+ except ParseError:
+ return False
+
+ def ConsumeIdentifier(self):
+ """Consumes protocol message field identifier.
+
+ Returns:
+ Identifier string.
+
+ Raises:
+ ParseError: If an identifier couldn't be consumed.
+ """
+ result = self.token
+ if not self._IDENTIFIER.match(result):
+ raise self.ParseError('Expected identifier.')
+ self.NextToken()
+ return result
+
+ def TryConsumeIdentifierOrNumber(self):
+ try:
+ self.ConsumeIdentifierOrNumber()
+ return True
+ except ParseError:
+ return False
+
+ def ConsumeIdentifierOrNumber(self):
+ """Consumes protocol message field identifier.
+
+ Returns:
+ Identifier string.
+
+ Raises:
+ ParseError: If an identifier couldn't be consumed.
+ """
+ result = self.token
+ if not self._IDENTIFIER_OR_NUMBER.match(result):
+ raise self.ParseError('Expected identifier or number, got %s.' % result)
+ self.NextToken()
+ return result
+
+ def TryConsumeInteger(self):
+ try:
+ self.ConsumeInteger()
+ return True
+ except ParseError:
+ return False
+
+ def ConsumeInteger(self):
+ """Consumes an integer number.
+
+ Returns:
+ The integer parsed.
+
+ Raises:
+ ParseError: If an integer couldn't be consumed.
+ """
+ try:
+ result = _ParseAbstractInteger(self.token)
+ except ValueError as e:
+ raise self.ParseError(str(e))
+ self.NextToken()
+ return result
+
+ def TryConsumeFloat(self):
+ try:
+ self.ConsumeFloat()
+ return True
+ except ParseError:
+ return False
+
+ def ConsumeFloat(self):
+ """Consumes an floating point number.
+
+ Returns:
+ The number parsed.
+
+ Raises:
+ ParseError: If a floating point number couldn't be consumed.
+ """
+ try:
+ result = ParseFloat(self.token)
+ except ValueError as e:
+ raise self.ParseError(str(e))
+ self.NextToken()
+ return result
+
+ def ConsumeBool(self):
+ """Consumes a boolean value.
+
+ Returns:
+ The bool parsed.
+
+ Raises:
+ ParseError: If a boolean value couldn't be consumed.
+ """
+ try:
+ result = ParseBool(self.token)
+ except ValueError as e:
+ raise self.ParseError(str(e))
+ self.NextToken()
+ return result
+
+ def TryConsumeByteString(self):
+ try:
+ self.ConsumeByteString()
+ return True
+ except ParseError:
+ return False
+
+ def ConsumeString(self):
+ """Consumes a string value.
+
+ Returns:
+ The string parsed.
+
+ Raises:
+ ParseError: If a string value couldn't be consumed.
+ """
+ the_bytes = self.ConsumeByteString()
+ try:
+ return str(the_bytes, 'utf-8')
+ except UnicodeDecodeError as e:
+ raise self._StringParseError(e)
+
+ def ConsumeByteString(self):
+ """Consumes a byte array value.
+
+ Returns:
+ The array parsed (as a string).
+
+ Raises:
+ ParseError: If a byte array value couldn't be consumed.
+ """
+ the_list = [self._ConsumeSingleByteString()]
+ while self.token and self.token[0] in _QUOTES:
+ the_list.append(self._ConsumeSingleByteString())
+ return b''.join(the_list)
+
+ def _ConsumeSingleByteString(self):
+ """Consume one token of a string literal.
+
+ String literals (whether bytes or text) can come in multiple adjacent
+ tokens which are automatically concatenated, like in C or Python. This
+ method only consumes one token.
+
+ Returns:
+ The token parsed.
+ Raises:
+ ParseError: When the wrong format data is found.
+ """
+ text = self.token
+ if len(text) < 1 or text[0] not in _QUOTES:
+ raise self.ParseError('Expected string but found: %r' % (text,))
+
+ if len(text) < 2 or text[-1] != text[0]:
+ raise self.ParseError('String missing ending quote: %r' % (text,))
+
+ try:
+ result = text_encoding.CUnescape(text[1:-1])
+ except ValueError as e:
+ raise self.ParseError(str(e))
+ self.NextToken()
+ return result
+
+ def ConsumeEnum(self, field):
+ try:
+ result = ParseEnum(field, self.token)
+ except ValueError as e:
+ raise self.ParseError(str(e))
+ self.NextToken()
+ return result
+
+ def ParseErrorPreviousToken(self, message):
+ """Creates and *returns* a ParseError for the previously read token.
+
+ Args:
+ message: A message to set for the exception.
+
+ Returns:
+ A ParseError instance.
+ """
+ return ParseError(message, self._previous_line + 1,
+ self._previous_column + 1)
+
+ def ParseError(self, message):
+ """Creates and *returns* a ParseError for the current token."""
+ return ParseError('\'' + self._current_line + '\': ' + message,
+ self._line + 1, self._column + 1)
+
+ def _StringParseError(self, e):
+ return self.ParseError('Couldn\'t parse string: ' + str(e))
+
+ def NextToken(self):
+ """Reads the next meaningful token."""
+ self._previous_line = self._line
+ self._previous_column = self._column
+
+ self._column += len(self.token)
+ self._SkipWhitespace()
+
+ if not self._more_lines:
+ self.token = ''
+ return
+
+ match = self._TOKEN.match(self._current_line, self._column)
+ if not match and not self._skip_comments:
+ match = self._COMMENT.match(self._current_line, self._column)
+ if match:
+ token = match.group(0)
+ self.token = token
+ else:
+ self.token = self._current_line[self._column]
+
+# Aliased so it can still be accessed by current visibility violators.
+# TODO(dbarnett): Migrate violators to textformat_tokenizer.
+_Tokenizer = Tokenizer # pylint: disable=invalid-name
+
+
+def _ConsumeInt32(tokenizer):
+ """Consumes a signed 32bit integer number from tokenizer.
+
+ Args:
+ tokenizer: A tokenizer used to parse the number.
+
+ Returns:
+ The integer parsed.
+
+ Raises:
+ ParseError: If a signed 32bit integer couldn't be consumed.
+ """
+ return _ConsumeInteger(tokenizer, is_signed=True, is_long=False)
+
+
+def _ConsumeUint32(tokenizer):
+ """Consumes an unsigned 32bit integer number from tokenizer.
+
+ Args:
+ tokenizer: A tokenizer used to parse the number.
+
+ Returns:
+ The integer parsed.
+
+ Raises:
+ ParseError: If an unsigned 32bit integer couldn't be consumed.
+ """
+ return _ConsumeInteger(tokenizer, is_signed=False, is_long=False)
+
+
+def _TryConsumeInt64(tokenizer):
+ try:
+ _ConsumeInt64(tokenizer)
+ return True
+ except ParseError:
+ return False
+
+
+def _ConsumeInt64(tokenizer):
+ """Consumes a signed 32bit integer number from tokenizer.
+
+ Args:
+ tokenizer: A tokenizer used to parse the number.
+
+ Returns:
+ The integer parsed.
+
+ Raises:
+ ParseError: If a signed 32bit integer couldn't be consumed.
+ """
+ return _ConsumeInteger(tokenizer, is_signed=True, is_long=True)
+
+
+def _TryConsumeUint64(tokenizer):
+ try:
+ _ConsumeUint64(tokenizer)
+ return True
+ except ParseError:
+ return False
+
+
+def _ConsumeUint64(tokenizer):
+ """Consumes an unsigned 64bit integer number from tokenizer.
+
+ Args:
+ tokenizer: A tokenizer used to parse the number.
+
+ Returns:
+ The integer parsed.
+
+ Raises:
+ ParseError: If an unsigned 64bit integer couldn't be consumed.
+ """
+ return _ConsumeInteger(tokenizer, is_signed=False, is_long=True)
+
+
+def _ConsumeInteger(tokenizer, is_signed=False, is_long=False):
+ """Consumes an integer number from tokenizer.
+
+ Args:
+ tokenizer: A tokenizer used to parse the number.
+ is_signed: True if a signed integer must be parsed.
+ is_long: True if a long integer must be parsed.
+
+ Returns:
+ The integer parsed.
+
+ Raises:
+ ParseError: If an integer with given characteristics couldn't be consumed.
+ """
+ try:
+ result = ParseInteger(tokenizer.token, is_signed=is_signed, is_long=is_long)
+ except ValueError as e:
+ raise tokenizer.ParseError(str(e))
+ tokenizer.NextToken()
+ return result
+
+
+def ParseInteger(text, is_signed=False, is_long=False):
+ """Parses an integer.
+
+ Args:
+ text: The text to parse.
+ is_signed: True if a signed integer must be parsed.
+ is_long: True if a long integer must be parsed.
+
+ Returns:
+ The integer value.
+
+ Raises:
+ ValueError: Thrown Iff the text is not a valid integer.
+ """
+ # Do the actual parsing. Exception handling is propagated to caller.
+ result = _ParseAbstractInteger(text)
+
+ # Check if the integer is sane. Exceptions handled by callers.
+ checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)]
+ checker.CheckValue(result)
+ return result
+
+
+def _ParseAbstractInteger(text):
+ """Parses an integer without checking size/signedness.
+
+ Args:
+ text: The text to parse.
+
+ Returns:
+ The integer value.
+
+ Raises:
+ ValueError: Thrown Iff the text is not a valid integer.
+ """
+ # Do the actual parsing. Exception handling is propagated to caller.
+ orig_text = text
+ c_octal_match = re.match(r'(-?)0(\d+)$', text)
+ if c_octal_match:
+ # Python 3 no longer supports 0755 octal syntax without the 'o', so
+ # we always use the '0o' prefix for multi-digit numbers starting with 0.
+ text = c_octal_match.group(1) + '0o' + c_octal_match.group(2)
+ try:
+ return int(text, 0)
+ except ValueError:
+ raise ValueError('Couldn\'t parse integer: %s' % orig_text)
+
+
+def ParseFloat(text):
+ """Parse a floating point number.
+
+ Args:
+ text: Text to parse.
+
+ Returns:
+ The number parsed.
+
+ Raises:
+ ValueError: If a floating point number couldn't be parsed.
+ """
+ try:
+ # Assume Python compatible syntax.
+ return float(text)
+ except ValueError:
+ # Check alternative spellings.
+ if _FLOAT_INFINITY.match(text):
+ if text[0] == '-':
+ return float('-inf')
+ else:
+ return float('inf')
+ elif _FLOAT_NAN.match(text):
+ return float('nan')
+ else:
+ # assume '1.0f' format
+ try:
+ return float(text.rstrip('f'))
+ except ValueError:
+ raise ValueError('Couldn\'t parse float: %s' % text)
+
+
+def ParseBool(text):
+ """Parse a boolean value.
+
+ Args:
+ text: Text to parse.
+
+ Returns:
+ Boolean values parsed
+
+ Raises:
+ ValueError: If text is not a valid boolean.
+ """
+ if text in ('true', 't', '1', 'True'):
+ return True
+ elif text in ('false', 'f', '0', 'False'):
+ return False
+ else:
+ raise ValueError('Expected "true" or "false".')
+
+
+def ParseEnum(field, value):
+ """Parse an enum value.
+
+ The value can be specified by a number (the enum value), or by
+ a string literal (the enum name).
+
+ Args:
+ field: Enum field descriptor.
+ value: String value.
+
+ Returns:
+ Enum value number.
+
+ Raises:
+ ValueError: If the enum value could not be parsed.
+ """
+ enum_descriptor = field.enum_type
+ try:
+ number = int(value, 0)
+ except ValueError:
+ # Identifier.
+ enum_value = enum_descriptor.values_by_name.get(value, None)
+ if enum_value is None:
+ raise ValueError('Enum type "%s" has no value named %s.' %
+ (enum_descriptor.full_name, value))
+ else:
+ # Numeric value.
+ if hasattr(field.file, 'syntax'):
+ # Attribute is checked for compatibility.
+ if field.file.syntax == 'proto3':
+ # Proto3 accept numeric unknown enums.
+ return number
+ enum_value = enum_descriptor.values_by_number.get(number, None)
+ if enum_value is None:
+ raise ValueError('Enum type "%s" has no value with number %d.' %
+ (enum_descriptor.full_name, number))
+ return enum_value.number
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/timestamp_pb2.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/timestamp_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..48e6e123df705974dc6ed93cb8395feae6333730
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/timestamp_pb2.py
@@ -0,0 +1,35 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/timestamp.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fgoogle/protobuf/timestamp.proto\x12\x0fgoogle.protobuf\"+\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x85\x01\n\x13\x63om.google.protobufB\x0eTimestampProtoP\x01Z2google.golang.org/protobuf/types/known/timestamppb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
+
+
+
+_TIMESTAMP = DESCRIPTOR.message_types_by_name['Timestamp']
+Timestamp = _reflection.GeneratedProtocolMessageType('Timestamp', (_message.Message,), {
+ 'DESCRIPTOR' : _TIMESTAMP,
+ '__module__' : 'google.protobuf.timestamp_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.Timestamp)
+ })
+_sym_db.RegisterMessage(Timestamp)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016TimestampProtoP\001Z2google.golang.org/protobuf/types/known/timestamppb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
+ _TIMESTAMP._serialized_start=52
+ _TIMESTAMP._serialized_end=95
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/type_pb2.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/type_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..a1a45371e6e9ce07af21fe97a2f4187461986bd4
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/type_pb2.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/type.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
+from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1agoogle/protobuf/type.proto\x12\x0fgoogle.protobuf\x1a\x19google/protobuf/any.proto\x1a$google/protobuf/source_context.proto\"\xd7\x01\n\x04Type\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Field\x12\x0e\n\x06oneofs\x18\x03 \x03(\t\x12(\n\x07options\x18\x04 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x06 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x05\n\x05\x46ield\x12)\n\x04kind\x18\x01 \x01(\x0e\x32\x1b.google.protobuf.Field.Kind\x12\x37\n\x0b\x63\x61rdinality\x18\x02 \x01(\x0e\x32\".google.protobuf.Field.Cardinality\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x10\n\x08type_url\x18\x06 \x01(\t\x12\x13\n\x0boneof_index\x18\x07 \x01(\x05\x12\x0e\n\x06packed\x18\x08 \x01(\x08\x12(\n\x07options\x18\t \x03(\x0b\x32\x17.google.protobuf.Option\x12\x11\n\tjson_name\x18\n \x01(\t\x12\x15\n\rdefault_value\x18\x0b \x01(\t\"\xc8\x02\n\x04Kind\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"t\n\x0b\x43\x61rdinality\x12\x17\n\x13\x43\x41RDINALITY_UNKNOWN\x10\x00\x12\x18\n\x14\x43\x41RDINALITY_OPTIONAL\x10\x01\x12\x18\n\x14\x43\x41RDINALITY_REQUIRED\x10\x02\x12\x18\n\x14\x43\x41RDINALITY_REPEATED\x10\x03\"\xce\x01\n\x04\x45num\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tenumvalue\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.EnumValue\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x05 \x01(\x0e\x32\x17.google.protobuf.Syntax\"S\n\tEnumValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\";\n\x06Option\x12\x0c\n\x04name\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any*.\n\x06Syntax\x12\x11\n\rSYNTAX_PROTO2\x10\x00\x12\x11\n\rSYNTAX_PROTO3\x10\x01\x42{\n\x13\x63om.google.protobufB\tTypeProtoP\x01Z-google.golang.org/protobuf/types/known/typepb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
+
+_SYNTAX = DESCRIPTOR.enum_types_by_name['Syntax']
+Syntax = enum_type_wrapper.EnumTypeWrapper(_SYNTAX)
+SYNTAX_PROTO2 = 0
+SYNTAX_PROTO3 = 1
+
+
+_TYPE = DESCRIPTOR.message_types_by_name['Type']
+_FIELD = DESCRIPTOR.message_types_by_name['Field']
+_ENUM = DESCRIPTOR.message_types_by_name['Enum']
+_ENUMVALUE = DESCRIPTOR.message_types_by_name['EnumValue']
+_OPTION = DESCRIPTOR.message_types_by_name['Option']
+_FIELD_KIND = _FIELD.enum_types_by_name['Kind']
+_FIELD_CARDINALITY = _FIELD.enum_types_by_name['Cardinality']
+Type = _reflection.GeneratedProtocolMessageType('Type', (_message.Message,), {
+ 'DESCRIPTOR' : _TYPE,
+ '__module__' : 'google.protobuf.type_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.Type)
+ })
+_sym_db.RegisterMessage(Type)
+
+Field = _reflection.GeneratedProtocolMessageType('Field', (_message.Message,), {
+ 'DESCRIPTOR' : _FIELD,
+ '__module__' : 'google.protobuf.type_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.Field)
+ })
+_sym_db.RegisterMessage(Field)
+
+Enum = _reflection.GeneratedProtocolMessageType('Enum', (_message.Message,), {
+ 'DESCRIPTOR' : _ENUM,
+ '__module__' : 'google.protobuf.type_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.Enum)
+ })
+_sym_db.RegisterMessage(Enum)
+
+EnumValue = _reflection.GeneratedProtocolMessageType('EnumValue', (_message.Message,), {
+ 'DESCRIPTOR' : _ENUMVALUE,
+ '__module__' : 'google.protobuf.type_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.EnumValue)
+ })
+_sym_db.RegisterMessage(EnumValue)
+
+Option = _reflection.GeneratedProtocolMessageType('Option', (_message.Message,), {
+ 'DESCRIPTOR' : _OPTION,
+ '__module__' : 'google.protobuf.type_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.Option)
+ })
+_sym_db.RegisterMessage(Option)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\tTypeProtoP\001Z-google.golang.org/protobuf/types/known/typepb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
+ _SYNTAX._serialized_start=1413
+ _SYNTAX._serialized_end=1459
+ _TYPE._serialized_start=113
+ _TYPE._serialized_end=328
+ _FIELD._serialized_start=331
+ _FIELD._serialized_end=1056
+ _FIELD_KIND._serialized_start=610
+ _FIELD_KIND._serialized_end=938
+ _FIELD_CARDINALITY._serialized_start=940
+ _FIELD_CARDINALITY._serialized_end=1056
+ _ENUM._serialized_start=1059
+ _ENUM._serialized_end=1265
+ _ENUMVALUE._serialized_start=1267
+ _ENUMVALUE._serialized_end=1350
+ _OPTION._serialized_start=1352
+ _OPTION._serialized_end=1411
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/util/__init__.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/util/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/util/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/util/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..253b6e66a490d6b9efb473d0bd4cd8742a2f72c5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/util/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/util/__pycache__/json_format_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/util/__pycache__/json_format_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3c541af1518b05ffc0b2ec93780af3257df7ecd9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/util/__pycache__/json_format_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/util/__pycache__/json_format_proto3_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/google/protobuf/util/__pycache__/json_format_proto3_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3edeae0a58201cb6f8f987135ece4bfa96d0e3d0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/google/protobuf/util/__pycache__/json_format_proto3_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/util/json_format_pb2.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/util/json_format_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..f312e647857e402cbd242a3f59d1de43bfc5fe8b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/util/json_format_pb2.py
@@ -0,0 +1,236 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/util/json_format.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&google/protobuf/util/json_format.proto\x12\x11protobuf_unittest\"\x89\x01\n\x13TestFlagsAndStrings\x12\t\n\x01\x41\x18\x01 \x02(\x05\x12K\n\rrepeatedgroup\x18\x02 \x03(\n24.protobuf_unittest.TestFlagsAndStrings.RepeatedGroup\x1a\x1a\n\rRepeatedGroup\x12\t\n\x01\x66\x18\x03 \x02(\t\"!\n\x14TestBase64ByteArrays\x12\t\n\x01\x61\x18\x01 \x02(\x0c\"G\n\x12TestJavaScriptJSON\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\r\n\x05\x66inal\x18\x02 \x01(\x02\x12\n\n\x02in\x18\x03 \x01(\t\x12\x0b\n\x03Var\x18\x04 \x01(\t\"Q\n\x18TestJavaScriptOrderJSON1\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\"\x89\x01\n\x18TestJavaScriptOrderJSON2\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\x36\n\x01z\x18\x06 \x03(\x0b\x32+.protobuf_unittest.TestJavaScriptOrderJSON1\"$\n\x0cTestLargeInt\x12\t\n\x01\x61\x18\x01 \x02(\x03\x12\t\n\x01\x62\x18\x02 \x02(\x04\"\xa0\x01\n\x0bTestNumbers\x12\x30\n\x01\x61\x18\x01 \x01(\x0e\x32%.protobuf_unittest.TestNumbers.MyType\x12\t\n\x01\x62\x18\x02 \x01(\x05\x12\t\n\x01\x63\x18\x03 \x01(\x02\x12\t\n\x01\x64\x18\x04 \x01(\x08\x12\t\n\x01\x65\x18\x05 \x01(\x01\x12\t\n\x01\x66\x18\x06 \x01(\r\"(\n\x06MyType\x12\x06\n\x02OK\x10\x00\x12\x0b\n\x07WARNING\x10\x01\x12\t\n\x05\x45RROR\x10\x02\"T\n\rTestCamelCase\x12\x14\n\x0cnormal_field\x18\x01 \x01(\t\x12\x15\n\rCAPITAL_FIELD\x18\x02 \x01(\x05\x12\x16\n\x0e\x43\x61melCaseField\x18\x03 \x01(\x05\"|\n\x0bTestBoolMap\x12=\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32+.protobuf_unittest.TestBoolMap.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"O\n\rTestRecursion\x12\r\n\x05value\x18\x01 \x01(\x05\x12/\n\x05\x63hild\x18\x02 \x01(\x0b\x32 .protobuf_unittest.TestRecursion\"\x86\x01\n\rTestStringMap\x12\x43\n\nstring_map\x18\x01 \x03(\x0b\x32/.protobuf_unittest.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc4\x01\n\x14TestStringSerializer\x12\x15\n\rscalar_string\x18\x01 \x01(\t\x12\x17\n\x0frepeated_string\x18\x02 \x03(\t\x12J\n\nstring_map\x18\x03 \x03(\x0b\x32\x36.protobuf_unittest.TestStringSerializer.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"$\n\x18TestMessageWithExtension*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"z\n\rTestExtension\x12\r\n\x05value\x18\x01 \x01(\t2Z\n\x03\x65xt\x12+.protobuf_unittest.TestMessageWithExtension\x18\x64 \x01(\x0b\x32 .protobuf_unittest.TestExtension\"Q\n\x14TestDefaultEnumValue\x12\x39\n\nenum_value\x18\x01 \x01(\x0e\x32\x1c.protobuf_unittest.EnumValue:\x07\x44\x45\x46\x41ULT*2\n\tEnumValue\x12\x0c\n\x08PROTOCOL\x10\x00\x12\n\n\x06\x42UFFER\x10\x01\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x02')
+
+_ENUMVALUE = DESCRIPTOR.enum_types_by_name['EnumValue']
+EnumValue = enum_type_wrapper.EnumTypeWrapper(_ENUMVALUE)
+PROTOCOL = 0
+BUFFER = 1
+DEFAULT = 2
+
+
+_TESTFLAGSANDSTRINGS = DESCRIPTOR.message_types_by_name['TestFlagsAndStrings']
+_TESTFLAGSANDSTRINGS_REPEATEDGROUP = _TESTFLAGSANDSTRINGS.nested_types_by_name['RepeatedGroup']
+_TESTBASE64BYTEARRAYS = DESCRIPTOR.message_types_by_name['TestBase64ByteArrays']
+_TESTJAVASCRIPTJSON = DESCRIPTOR.message_types_by_name['TestJavaScriptJSON']
+_TESTJAVASCRIPTORDERJSON1 = DESCRIPTOR.message_types_by_name['TestJavaScriptOrderJSON1']
+_TESTJAVASCRIPTORDERJSON2 = DESCRIPTOR.message_types_by_name['TestJavaScriptOrderJSON2']
+_TESTLARGEINT = DESCRIPTOR.message_types_by_name['TestLargeInt']
+_TESTNUMBERS = DESCRIPTOR.message_types_by_name['TestNumbers']
+_TESTCAMELCASE = DESCRIPTOR.message_types_by_name['TestCamelCase']
+_TESTBOOLMAP = DESCRIPTOR.message_types_by_name['TestBoolMap']
+_TESTBOOLMAP_BOOLMAPENTRY = _TESTBOOLMAP.nested_types_by_name['BoolMapEntry']
+_TESTRECURSION = DESCRIPTOR.message_types_by_name['TestRecursion']
+_TESTSTRINGMAP = DESCRIPTOR.message_types_by_name['TestStringMap']
+_TESTSTRINGMAP_STRINGMAPENTRY = _TESTSTRINGMAP.nested_types_by_name['StringMapEntry']
+_TESTSTRINGSERIALIZER = DESCRIPTOR.message_types_by_name['TestStringSerializer']
+_TESTSTRINGSERIALIZER_STRINGMAPENTRY = _TESTSTRINGSERIALIZER.nested_types_by_name['StringMapEntry']
+_TESTMESSAGEWITHEXTENSION = DESCRIPTOR.message_types_by_name['TestMessageWithExtension']
+_TESTEXTENSION = DESCRIPTOR.message_types_by_name['TestExtension']
+_TESTDEFAULTENUMVALUE = DESCRIPTOR.message_types_by_name['TestDefaultEnumValue']
+_TESTNUMBERS_MYTYPE = _TESTNUMBERS.enum_types_by_name['MyType']
+TestFlagsAndStrings = _reflection.GeneratedProtocolMessageType('TestFlagsAndStrings', (_message.Message,), {
+
+ 'RepeatedGroup' : _reflection.GeneratedProtocolMessageType('RepeatedGroup', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTFLAGSANDSTRINGS_REPEATEDGROUP,
+ '__module__' : 'google.protobuf.util.json_format_pb2'
+ # @@protoc_insertion_point(class_scope:protobuf_unittest.TestFlagsAndStrings.RepeatedGroup)
+ })
+ ,
+ 'DESCRIPTOR' : _TESTFLAGSANDSTRINGS,
+ '__module__' : 'google.protobuf.util.json_format_pb2'
+ # @@protoc_insertion_point(class_scope:protobuf_unittest.TestFlagsAndStrings)
+ })
+_sym_db.RegisterMessage(TestFlagsAndStrings)
+_sym_db.RegisterMessage(TestFlagsAndStrings.RepeatedGroup)
+
+TestBase64ByteArrays = _reflection.GeneratedProtocolMessageType('TestBase64ByteArrays', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTBASE64BYTEARRAYS,
+ '__module__' : 'google.protobuf.util.json_format_pb2'
+ # @@protoc_insertion_point(class_scope:protobuf_unittest.TestBase64ByteArrays)
+ })
+_sym_db.RegisterMessage(TestBase64ByteArrays)
+
+TestJavaScriptJSON = _reflection.GeneratedProtocolMessageType('TestJavaScriptJSON', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTJAVASCRIPTJSON,
+ '__module__' : 'google.protobuf.util.json_format_pb2'
+ # @@protoc_insertion_point(class_scope:protobuf_unittest.TestJavaScriptJSON)
+ })
+_sym_db.RegisterMessage(TestJavaScriptJSON)
+
+TestJavaScriptOrderJSON1 = _reflection.GeneratedProtocolMessageType('TestJavaScriptOrderJSON1', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTJAVASCRIPTORDERJSON1,
+ '__module__' : 'google.protobuf.util.json_format_pb2'
+ # @@protoc_insertion_point(class_scope:protobuf_unittest.TestJavaScriptOrderJSON1)
+ })
+_sym_db.RegisterMessage(TestJavaScriptOrderJSON1)
+
+TestJavaScriptOrderJSON2 = _reflection.GeneratedProtocolMessageType('TestJavaScriptOrderJSON2', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTJAVASCRIPTORDERJSON2,
+ '__module__' : 'google.protobuf.util.json_format_pb2'
+ # @@protoc_insertion_point(class_scope:protobuf_unittest.TestJavaScriptOrderJSON2)
+ })
+_sym_db.RegisterMessage(TestJavaScriptOrderJSON2)
+
+TestLargeInt = _reflection.GeneratedProtocolMessageType('TestLargeInt', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTLARGEINT,
+ '__module__' : 'google.protobuf.util.json_format_pb2'
+ # @@protoc_insertion_point(class_scope:protobuf_unittest.TestLargeInt)
+ })
+_sym_db.RegisterMessage(TestLargeInt)
+
+TestNumbers = _reflection.GeneratedProtocolMessageType('TestNumbers', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTNUMBERS,
+ '__module__' : 'google.protobuf.util.json_format_pb2'
+ # @@protoc_insertion_point(class_scope:protobuf_unittest.TestNumbers)
+ })
+_sym_db.RegisterMessage(TestNumbers)
+
+TestCamelCase = _reflection.GeneratedProtocolMessageType('TestCamelCase', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTCAMELCASE,
+ '__module__' : 'google.protobuf.util.json_format_pb2'
+ # @@protoc_insertion_point(class_scope:protobuf_unittest.TestCamelCase)
+ })
+_sym_db.RegisterMessage(TestCamelCase)
+
+TestBoolMap = _reflection.GeneratedProtocolMessageType('TestBoolMap', (_message.Message,), {
+
+ 'BoolMapEntry' : _reflection.GeneratedProtocolMessageType('BoolMapEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTBOOLMAP_BOOLMAPENTRY,
+ '__module__' : 'google.protobuf.util.json_format_pb2'
+ # @@protoc_insertion_point(class_scope:protobuf_unittest.TestBoolMap.BoolMapEntry)
+ })
+ ,
+ 'DESCRIPTOR' : _TESTBOOLMAP,
+ '__module__' : 'google.protobuf.util.json_format_pb2'
+ # @@protoc_insertion_point(class_scope:protobuf_unittest.TestBoolMap)
+ })
+_sym_db.RegisterMessage(TestBoolMap)
+_sym_db.RegisterMessage(TestBoolMap.BoolMapEntry)
+
+TestRecursion = _reflection.GeneratedProtocolMessageType('TestRecursion', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTRECURSION,
+ '__module__' : 'google.protobuf.util.json_format_pb2'
+ # @@protoc_insertion_point(class_scope:protobuf_unittest.TestRecursion)
+ })
+_sym_db.RegisterMessage(TestRecursion)
+
+TestStringMap = _reflection.GeneratedProtocolMessageType('TestStringMap', (_message.Message,), {
+
+ 'StringMapEntry' : _reflection.GeneratedProtocolMessageType('StringMapEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTSTRINGMAP_STRINGMAPENTRY,
+ '__module__' : 'google.protobuf.util.json_format_pb2'
+ # @@protoc_insertion_point(class_scope:protobuf_unittest.TestStringMap.StringMapEntry)
+ })
+ ,
+ 'DESCRIPTOR' : _TESTSTRINGMAP,
+ '__module__' : 'google.protobuf.util.json_format_pb2'
+ # @@protoc_insertion_point(class_scope:protobuf_unittest.TestStringMap)
+ })
+_sym_db.RegisterMessage(TestStringMap)
+_sym_db.RegisterMessage(TestStringMap.StringMapEntry)
+
+TestStringSerializer = _reflection.GeneratedProtocolMessageType('TestStringSerializer', (_message.Message,), {
+
+ 'StringMapEntry' : _reflection.GeneratedProtocolMessageType('StringMapEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTSTRINGSERIALIZER_STRINGMAPENTRY,
+ '__module__' : 'google.protobuf.util.json_format_pb2'
+ # @@protoc_insertion_point(class_scope:protobuf_unittest.TestStringSerializer.StringMapEntry)
+ })
+ ,
+ 'DESCRIPTOR' : _TESTSTRINGSERIALIZER,
+ '__module__' : 'google.protobuf.util.json_format_pb2'
+ # @@protoc_insertion_point(class_scope:protobuf_unittest.TestStringSerializer)
+ })
+_sym_db.RegisterMessage(TestStringSerializer)
+_sym_db.RegisterMessage(TestStringSerializer.StringMapEntry)
+
+TestMessageWithExtension = _reflection.GeneratedProtocolMessageType('TestMessageWithExtension', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTMESSAGEWITHEXTENSION,
+ '__module__' : 'google.protobuf.util.json_format_pb2'
+ # @@protoc_insertion_point(class_scope:protobuf_unittest.TestMessageWithExtension)
+ })
+_sym_db.RegisterMessage(TestMessageWithExtension)
+
+TestExtension = _reflection.GeneratedProtocolMessageType('TestExtension', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTEXTENSION,
+ '__module__' : 'google.protobuf.util.json_format_pb2'
+ # @@protoc_insertion_point(class_scope:protobuf_unittest.TestExtension)
+ })
+_sym_db.RegisterMessage(TestExtension)
+
+TestDefaultEnumValue = _reflection.GeneratedProtocolMessageType('TestDefaultEnumValue', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTDEFAULTENUMVALUE,
+ '__module__' : 'google.protobuf.util.json_format_pb2'
+ # @@protoc_insertion_point(class_scope:protobuf_unittest.TestDefaultEnumValue)
+ })
+_sym_db.RegisterMessage(TestDefaultEnumValue)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+ TestMessageWithExtension.RegisterExtension(_TESTEXTENSION.extensions_by_name['ext'])
+
+ DESCRIPTOR._options = None
+ _TESTBOOLMAP_BOOLMAPENTRY._options = None
+ _TESTBOOLMAP_BOOLMAPENTRY._serialized_options = b'8\001'
+ _TESTSTRINGMAP_STRINGMAPENTRY._options = None
+ _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001'
+ _TESTSTRINGSERIALIZER_STRINGMAPENTRY._options = None
+ _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_options = b'8\001'
+ _ENUMVALUE._serialized_start=1607
+ _ENUMVALUE._serialized_end=1657
+ _TESTFLAGSANDSTRINGS._serialized_start=62
+ _TESTFLAGSANDSTRINGS._serialized_end=199
+ _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_start=173
+ _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_end=199
+ _TESTBASE64BYTEARRAYS._serialized_start=201
+ _TESTBASE64BYTEARRAYS._serialized_end=234
+ _TESTJAVASCRIPTJSON._serialized_start=236
+ _TESTJAVASCRIPTJSON._serialized_end=307
+ _TESTJAVASCRIPTORDERJSON1._serialized_start=309
+ _TESTJAVASCRIPTORDERJSON1._serialized_end=390
+ _TESTJAVASCRIPTORDERJSON2._serialized_start=393
+ _TESTJAVASCRIPTORDERJSON2._serialized_end=530
+ _TESTLARGEINT._serialized_start=532
+ _TESTLARGEINT._serialized_end=568
+ _TESTNUMBERS._serialized_start=571
+ _TESTNUMBERS._serialized_end=731
+ _TESTNUMBERS_MYTYPE._serialized_start=691
+ _TESTNUMBERS_MYTYPE._serialized_end=731
+ _TESTCAMELCASE._serialized_start=733
+ _TESTCAMELCASE._serialized_end=817
+ _TESTBOOLMAP._serialized_start=819
+ _TESTBOOLMAP._serialized_end=943
+ _TESTBOOLMAP_BOOLMAPENTRY._serialized_start=897
+ _TESTBOOLMAP_BOOLMAPENTRY._serialized_end=943
+ _TESTRECURSION._serialized_start=945
+ _TESTRECURSION._serialized_end=1024
+ _TESTSTRINGMAP._serialized_start=1027
+ _TESTSTRINGMAP._serialized_end=1161
+ _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=1113
+ _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=1161
+ _TESTSTRINGSERIALIZER._serialized_start=1164
+ _TESTSTRINGSERIALIZER._serialized_end=1360
+ _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_start=1113
+ _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_end=1161
+ _TESTMESSAGEWITHEXTENSION._serialized_start=1362
+ _TESTMESSAGEWITHEXTENSION._serialized_end=1398
+ _TESTEXTENSION._serialized_start=1400
+ _TESTEXTENSION._serialized_end=1522
+ _TESTDEFAULTENUMVALUE._serialized_start=1524
+ _TESTDEFAULTENUMVALUE._serialized_end=1605
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/util/json_format_proto3_pb2.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/util/json_format_proto3_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..b20b8f52b26fa4170860bfb45575e46f65dd1046
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/util/json_format_proto3_pb2.py
@@ -0,0 +1,414 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/util/json_format_proto3.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
+from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
+from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2
+from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
+from google.protobuf import unittest_pb2 as google_dot_protobuf_dot_unittest__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n-google/protobuf/util/json_format_proto3.proto\x12\x06proto3\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1egoogle/protobuf/unittest.proto\"\x1c\n\x0bMessageType\x12\r\n\x05value\x18\x01 \x01(\x05\"\x94\x05\n\x0bTestMessage\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x13\n\x0bint32_value\x18\x02 \x01(\x05\x12\x13\n\x0bint64_value\x18\x03 \x01(\x03\x12\x14\n\x0cuint32_value\x18\x04 \x01(\r\x12\x14\n\x0cuint64_value\x18\x05 \x01(\x04\x12\x13\n\x0b\x66loat_value\x18\x06 \x01(\x02\x12\x14\n\x0c\x64ouble_value\x18\x07 \x01(\x01\x12\x14\n\x0cstring_value\x18\x08 \x01(\t\x12\x13\n\x0b\x62ytes_value\x18\t \x01(\x0c\x12$\n\nenum_value\x18\n \x01(\x0e\x32\x10.proto3.EnumType\x12*\n\rmessage_value\x18\x0b \x01(\x0b\x32\x13.proto3.MessageType\x12\x1b\n\x13repeated_bool_value\x18\x15 \x03(\x08\x12\x1c\n\x14repeated_int32_value\x18\x16 \x03(\x05\x12\x1c\n\x14repeated_int64_value\x18\x17 \x03(\x03\x12\x1d\n\x15repeated_uint32_value\x18\x18 \x03(\r\x12\x1d\n\x15repeated_uint64_value\x18\x19 \x03(\x04\x12\x1c\n\x14repeated_float_value\x18\x1a \x03(\x02\x12\x1d\n\x15repeated_double_value\x18\x1b \x03(\x01\x12\x1d\n\x15repeated_string_value\x18\x1c \x03(\t\x12\x1c\n\x14repeated_bytes_value\x18\x1d \x03(\x0c\x12-\n\x13repeated_enum_value\x18\x1e \x03(\x0e\x32\x10.proto3.EnumType\x12\x33\n\x16repeated_message_value\x18\x1f \x03(\x0b\x32\x13.proto3.MessageType\"\x8c\x02\n\tTestOneof\x12\x1b\n\x11oneof_int32_value\x18\x01 \x01(\x05H\x00\x12\x1c\n\x12oneof_string_value\x18\x02 \x01(\tH\x00\x12\x1b\n\x11oneof_bytes_value\x18\x03 \x01(\x0cH\x00\x12,\n\x10oneof_enum_value\x18\x04 \x01(\x0e\x32\x10.proto3.EnumTypeH\x00\x12\x32\n\x13oneof_message_value\x18\x05 \x01(\x0b\x32\x13.proto3.MessageTypeH\x00\x12\x36\n\x10oneof_null_value\x18\x06 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x42\r\n\x0boneof_value\"\xe1\x04\n\x07TestMap\x12.\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\x1c.proto3.TestMap.BoolMapEntry\x12\x30\n\tint32_map\x18\x02 \x03(\x0b\x32\x1d.proto3.TestMap.Int32MapEntry\x12\x30\n\tint64_map\x18\x03 \x03(\x0b\x32\x1d.proto3.TestMap.Int64MapEntry\x12\x32\n\nuint32_map\x18\x04 \x03(\x0b\x32\x1e.proto3.TestMap.Uint32MapEntry\x12\x32\n\nuint64_map\x18\x05 \x03(\x0b\x32\x1e.proto3.TestMap.Uint64MapEntry\x12\x32\n\nstring_map\x18\x06 \x03(\x0b\x32\x1e.proto3.TestMap.StringMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"\x85\x06\n\rTestNestedMap\x12\x34\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\".proto3.TestNestedMap.BoolMapEntry\x12\x36\n\tint32_map\x18\x02 \x03(\x0b\x32#.proto3.TestNestedMap.Int32MapEntry\x12\x36\n\tint64_map\x18\x03 \x03(\x0b\x32#.proto3.TestNestedMap.Int64MapEntry\x12\x38\n\nuint32_map\x18\x04 \x03(\x0b\x32$.proto3.TestNestedMap.Uint32MapEntry\x12\x38\n\nuint64_map\x18\x05 \x03(\x0b\x32$.proto3.TestNestedMap.Uint64MapEntry\x12\x38\n\nstring_map\x18\x06 \x03(\x0b\x32$.proto3.TestNestedMap.StringMapEntry\x12\x32\n\x07map_map\x18\x07 \x03(\x0b\x32!.proto3.TestNestedMap.MapMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x44\n\x0bMapMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12$\n\x05value\x18\x02 \x01(\x0b\x32\x15.proto3.TestNestedMap:\x02\x38\x01\"{\n\rTestStringMap\x12\x38\n\nstring_map\x18\x01 \x03(\x0b\x32$.proto3.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xee\x07\n\x0bTestWrapper\x12.\n\nbool_value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x30\n\x0bint32_value\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x30\n\x0bint64_value\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x32\n\x0cuint32_value\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.UInt32Value\x12\x32\n\x0cuint64_value\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x30\n\x0b\x66loat_value\x18\x06 \x01(\x0b\x32\x1b.google.protobuf.FloatValue\x12\x32\n\x0c\x64ouble_value\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x32\n\x0cstring_value\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x30\n\x0b\x62ytes_value\x18\t \x01(\x0b\x32\x1b.google.protobuf.BytesValue\x12\x37\n\x13repeated_bool_value\x18\x0b \x03(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x39\n\x14repeated_int32_value\x18\x0c \x03(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x39\n\x14repeated_int64_value\x18\r \x03(\x0b\x32\x1b.google.protobuf.Int64Value\x12;\n\x15repeated_uint32_value\x18\x0e \x03(\x0b\x32\x1c.google.protobuf.UInt32Value\x12;\n\x15repeated_uint64_value\x18\x0f \x03(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x39\n\x14repeated_float_value\x18\x10 \x03(\x0b\x32\x1b.google.protobuf.FloatValue\x12;\n\x15repeated_double_value\x18\x11 \x03(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15repeated_string_value\x18\x12 \x03(\x0b\x32\x1c.google.protobuf.StringValue\x12\x39\n\x14repeated_bytes_value\x18\x13 \x03(\x0b\x32\x1b.google.protobuf.BytesValue\"n\n\rTestTimestamp\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\"k\n\x0cTestDuration\x12(\n\x05value\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x31\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x19.google.protobuf.Duration\":\n\rTestFieldMask\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"e\n\nTestStruct\x12&\n\x05value\x18\x01 \x01(\x0b\x32\x17.google.protobuf.Struct\x12/\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Struct\"\\\n\x07TestAny\x12#\n\x05value\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12,\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x14.google.protobuf.Any\"b\n\tTestValue\x12%\n\x05value\x18\x01 \x01(\x0b\x32\x16.google.protobuf.Value\x12.\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Value\"n\n\rTestListValue\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.ListValue\"\x89\x01\n\rTestBoolValue\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x34\n\x08\x62ool_map\x18\x02 \x03(\x0b\x32\".proto3.TestBoolValue.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"+\n\x12TestCustomJsonName\x12\x15\n\x05value\x18\x01 \x01(\x05R\x06@value\"J\n\x0eTestExtensions\x12\x38\n\nextensions\x18\x01 \x01(\x0b\x32$.protobuf_unittest.TestAllExtensions\"\x84\x01\n\rTestEnumValue\x12%\n\x0b\x65num_value1\x18\x01 \x01(\x0e\x32\x10.proto3.EnumType\x12%\n\x0b\x65num_value2\x18\x02 \x01(\x0e\x32\x10.proto3.EnumType\x12%\n\x0b\x65num_value3\x18\x03 \x01(\x0e\x32\x10.proto3.EnumType*\x1c\n\x08\x45numType\x12\x07\n\x03\x46OO\x10\x00\x12\x07\n\x03\x42\x41R\x10\x01\x42,\n\x18\x63om.google.protobuf.utilB\x10JsonFormatProto3b\x06proto3')
+
+_ENUMTYPE = DESCRIPTOR.enum_types_by_name['EnumType']
+EnumType = enum_type_wrapper.EnumTypeWrapper(_ENUMTYPE)
+FOO = 0
+BAR = 1
+
+
+_MESSAGETYPE = DESCRIPTOR.message_types_by_name['MessageType']
+_TESTMESSAGE = DESCRIPTOR.message_types_by_name['TestMessage']
+_TESTONEOF = DESCRIPTOR.message_types_by_name['TestOneof']
+_TESTMAP = DESCRIPTOR.message_types_by_name['TestMap']
+_TESTMAP_BOOLMAPENTRY = _TESTMAP.nested_types_by_name['BoolMapEntry']
+_TESTMAP_INT32MAPENTRY = _TESTMAP.nested_types_by_name['Int32MapEntry']
+_TESTMAP_INT64MAPENTRY = _TESTMAP.nested_types_by_name['Int64MapEntry']
+_TESTMAP_UINT32MAPENTRY = _TESTMAP.nested_types_by_name['Uint32MapEntry']
+_TESTMAP_UINT64MAPENTRY = _TESTMAP.nested_types_by_name['Uint64MapEntry']
+_TESTMAP_STRINGMAPENTRY = _TESTMAP.nested_types_by_name['StringMapEntry']
+_TESTNESTEDMAP = DESCRIPTOR.message_types_by_name['TestNestedMap']
+_TESTNESTEDMAP_BOOLMAPENTRY = _TESTNESTEDMAP.nested_types_by_name['BoolMapEntry']
+_TESTNESTEDMAP_INT32MAPENTRY = _TESTNESTEDMAP.nested_types_by_name['Int32MapEntry']
+_TESTNESTEDMAP_INT64MAPENTRY = _TESTNESTEDMAP.nested_types_by_name['Int64MapEntry']
+_TESTNESTEDMAP_UINT32MAPENTRY = _TESTNESTEDMAP.nested_types_by_name['Uint32MapEntry']
+_TESTNESTEDMAP_UINT64MAPENTRY = _TESTNESTEDMAP.nested_types_by_name['Uint64MapEntry']
+_TESTNESTEDMAP_STRINGMAPENTRY = _TESTNESTEDMAP.nested_types_by_name['StringMapEntry']
+_TESTNESTEDMAP_MAPMAPENTRY = _TESTNESTEDMAP.nested_types_by_name['MapMapEntry']
+_TESTSTRINGMAP = DESCRIPTOR.message_types_by_name['TestStringMap']
+_TESTSTRINGMAP_STRINGMAPENTRY = _TESTSTRINGMAP.nested_types_by_name['StringMapEntry']
+_TESTWRAPPER = DESCRIPTOR.message_types_by_name['TestWrapper']
+_TESTTIMESTAMP = DESCRIPTOR.message_types_by_name['TestTimestamp']
+_TESTDURATION = DESCRIPTOR.message_types_by_name['TestDuration']
+_TESTFIELDMASK = DESCRIPTOR.message_types_by_name['TestFieldMask']
+_TESTSTRUCT = DESCRIPTOR.message_types_by_name['TestStruct']
+_TESTANY = DESCRIPTOR.message_types_by_name['TestAny']
+_TESTVALUE = DESCRIPTOR.message_types_by_name['TestValue']
+_TESTLISTVALUE = DESCRIPTOR.message_types_by_name['TestListValue']
+_TESTBOOLVALUE = DESCRIPTOR.message_types_by_name['TestBoolValue']
+_TESTBOOLVALUE_BOOLMAPENTRY = _TESTBOOLVALUE.nested_types_by_name['BoolMapEntry']
+_TESTCUSTOMJSONNAME = DESCRIPTOR.message_types_by_name['TestCustomJsonName']
+_TESTEXTENSIONS = DESCRIPTOR.message_types_by_name['TestExtensions']
+_TESTENUMVALUE = DESCRIPTOR.message_types_by_name['TestEnumValue']
+MessageType = _reflection.GeneratedProtocolMessageType('MessageType', (_message.Message,), {
+ 'DESCRIPTOR' : _MESSAGETYPE,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.MessageType)
+ })
+_sym_db.RegisterMessage(MessageType)
+
+TestMessage = _reflection.GeneratedProtocolMessageType('TestMessage', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTMESSAGE,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestMessage)
+ })
+_sym_db.RegisterMessage(TestMessage)
+
+TestOneof = _reflection.GeneratedProtocolMessageType('TestOneof', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTONEOF,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestOneof)
+ })
+_sym_db.RegisterMessage(TestOneof)
+
+TestMap = _reflection.GeneratedProtocolMessageType('TestMap', (_message.Message,), {
+
+ 'BoolMapEntry' : _reflection.GeneratedProtocolMessageType('BoolMapEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTMAP_BOOLMAPENTRY,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestMap.BoolMapEntry)
+ })
+ ,
+
+ 'Int32MapEntry' : _reflection.GeneratedProtocolMessageType('Int32MapEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTMAP_INT32MAPENTRY,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestMap.Int32MapEntry)
+ })
+ ,
+
+ 'Int64MapEntry' : _reflection.GeneratedProtocolMessageType('Int64MapEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTMAP_INT64MAPENTRY,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestMap.Int64MapEntry)
+ })
+ ,
+
+ 'Uint32MapEntry' : _reflection.GeneratedProtocolMessageType('Uint32MapEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTMAP_UINT32MAPENTRY,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestMap.Uint32MapEntry)
+ })
+ ,
+
+ 'Uint64MapEntry' : _reflection.GeneratedProtocolMessageType('Uint64MapEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTMAP_UINT64MAPENTRY,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestMap.Uint64MapEntry)
+ })
+ ,
+
+ 'StringMapEntry' : _reflection.GeneratedProtocolMessageType('StringMapEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTMAP_STRINGMAPENTRY,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestMap.StringMapEntry)
+ })
+ ,
+ 'DESCRIPTOR' : _TESTMAP,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestMap)
+ })
+_sym_db.RegisterMessage(TestMap)
+_sym_db.RegisterMessage(TestMap.BoolMapEntry)
+_sym_db.RegisterMessage(TestMap.Int32MapEntry)
+_sym_db.RegisterMessage(TestMap.Int64MapEntry)
+_sym_db.RegisterMessage(TestMap.Uint32MapEntry)
+_sym_db.RegisterMessage(TestMap.Uint64MapEntry)
+_sym_db.RegisterMessage(TestMap.StringMapEntry)
+
+TestNestedMap = _reflection.GeneratedProtocolMessageType('TestNestedMap', (_message.Message,), {
+
+ 'BoolMapEntry' : _reflection.GeneratedProtocolMessageType('BoolMapEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTNESTEDMAP_BOOLMAPENTRY,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.BoolMapEntry)
+ })
+ ,
+
+ 'Int32MapEntry' : _reflection.GeneratedProtocolMessageType('Int32MapEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTNESTEDMAP_INT32MAPENTRY,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.Int32MapEntry)
+ })
+ ,
+
+ 'Int64MapEntry' : _reflection.GeneratedProtocolMessageType('Int64MapEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTNESTEDMAP_INT64MAPENTRY,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.Int64MapEntry)
+ })
+ ,
+
+ 'Uint32MapEntry' : _reflection.GeneratedProtocolMessageType('Uint32MapEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTNESTEDMAP_UINT32MAPENTRY,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.Uint32MapEntry)
+ })
+ ,
+
+ 'Uint64MapEntry' : _reflection.GeneratedProtocolMessageType('Uint64MapEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTNESTEDMAP_UINT64MAPENTRY,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.Uint64MapEntry)
+ })
+ ,
+
+ 'StringMapEntry' : _reflection.GeneratedProtocolMessageType('StringMapEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTNESTEDMAP_STRINGMAPENTRY,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.StringMapEntry)
+ })
+ ,
+
+ 'MapMapEntry' : _reflection.GeneratedProtocolMessageType('MapMapEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTNESTEDMAP_MAPMAPENTRY,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestNestedMap.MapMapEntry)
+ })
+ ,
+ 'DESCRIPTOR' : _TESTNESTEDMAP,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestNestedMap)
+ })
+_sym_db.RegisterMessage(TestNestedMap)
+_sym_db.RegisterMessage(TestNestedMap.BoolMapEntry)
+_sym_db.RegisterMessage(TestNestedMap.Int32MapEntry)
+_sym_db.RegisterMessage(TestNestedMap.Int64MapEntry)
+_sym_db.RegisterMessage(TestNestedMap.Uint32MapEntry)
+_sym_db.RegisterMessage(TestNestedMap.Uint64MapEntry)
+_sym_db.RegisterMessage(TestNestedMap.StringMapEntry)
+_sym_db.RegisterMessage(TestNestedMap.MapMapEntry)
+
+TestStringMap = _reflection.GeneratedProtocolMessageType('TestStringMap', (_message.Message,), {
+
+ 'StringMapEntry' : _reflection.GeneratedProtocolMessageType('StringMapEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTSTRINGMAP_STRINGMAPENTRY,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestStringMap.StringMapEntry)
+ })
+ ,
+ 'DESCRIPTOR' : _TESTSTRINGMAP,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestStringMap)
+ })
+_sym_db.RegisterMessage(TestStringMap)
+_sym_db.RegisterMessage(TestStringMap.StringMapEntry)
+
+TestWrapper = _reflection.GeneratedProtocolMessageType('TestWrapper', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTWRAPPER,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestWrapper)
+ })
+_sym_db.RegisterMessage(TestWrapper)
+
+TestTimestamp = _reflection.GeneratedProtocolMessageType('TestTimestamp', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTTIMESTAMP,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestTimestamp)
+ })
+_sym_db.RegisterMessage(TestTimestamp)
+
+TestDuration = _reflection.GeneratedProtocolMessageType('TestDuration', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTDURATION,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestDuration)
+ })
+_sym_db.RegisterMessage(TestDuration)
+
+TestFieldMask = _reflection.GeneratedProtocolMessageType('TestFieldMask', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTFIELDMASK,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestFieldMask)
+ })
+_sym_db.RegisterMessage(TestFieldMask)
+
+TestStruct = _reflection.GeneratedProtocolMessageType('TestStruct', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTSTRUCT,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestStruct)
+ })
+_sym_db.RegisterMessage(TestStruct)
+
+TestAny = _reflection.GeneratedProtocolMessageType('TestAny', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTANY,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestAny)
+ })
+_sym_db.RegisterMessage(TestAny)
+
+TestValue = _reflection.GeneratedProtocolMessageType('TestValue', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTVALUE,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestValue)
+ })
+_sym_db.RegisterMessage(TestValue)
+
+TestListValue = _reflection.GeneratedProtocolMessageType('TestListValue', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTLISTVALUE,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestListValue)
+ })
+_sym_db.RegisterMessage(TestListValue)
+
+TestBoolValue = _reflection.GeneratedProtocolMessageType('TestBoolValue', (_message.Message,), {
+
+ 'BoolMapEntry' : _reflection.GeneratedProtocolMessageType('BoolMapEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTBOOLVALUE_BOOLMAPENTRY,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestBoolValue.BoolMapEntry)
+ })
+ ,
+ 'DESCRIPTOR' : _TESTBOOLVALUE,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestBoolValue)
+ })
+_sym_db.RegisterMessage(TestBoolValue)
+_sym_db.RegisterMessage(TestBoolValue.BoolMapEntry)
+
+TestCustomJsonName = _reflection.GeneratedProtocolMessageType('TestCustomJsonName', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTCUSTOMJSONNAME,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestCustomJsonName)
+ })
+_sym_db.RegisterMessage(TestCustomJsonName)
+
+TestExtensions = _reflection.GeneratedProtocolMessageType('TestExtensions', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTEXTENSIONS,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestExtensions)
+ })
+_sym_db.RegisterMessage(TestExtensions)
+
+TestEnumValue = _reflection.GeneratedProtocolMessageType('TestEnumValue', (_message.Message,), {
+ 'DESCRIPTOR' : _TESTENUMVALUE,
+ '__module__' : 'google.protobuf.util.json_format_proto3_pb2'
+ # @@protoc_insertion_point(class_scope:proto3.TestEnumValue)
+ })
+_sym_db.RegisterMessage(TestEnumValue)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b'\n\030com.google.protobuf.utilB\020JsonFormatProto3'
+ _TESTMAP_BOOLMAPENTRY._options = None
+ _TESTMAP_BOOLMAPENTRY._serialized_options = b'8\001'
+ _TESTMAP_INT32MAPENTRY._options = None
+ _TESTMAP_INT32MAPENTRY._serialized_options = b'8\001'
+ _TESTMAP_INT64MAPENTRY._options = None
+ _TESTMAP_INT64MAPENTRY._serialized_options = b'8\001'
+ _TESTMAP_UINT32MAPENTRY._options = None
+ _TESTMAP_UINT32MAPENTRY._serialized_options = b'8\001'
+ _TESTMAP_UINT64MAPENTRY._options = None
+ _TESTMAP_UINT64MAPENTRY._serialized_options = b'8\001'
+ _TESTMAP_STRINGMAPENTRY._options = None
+ _TESTMAP_STRINGMAPENTRY._serialized_options = b'8\001'
+ _TESTNESTEDMAP_BOOLMAPENTRY._options = None
+ _TESTNESTEDMAP_BOOLMAPENTRY._serialized_options = b'8\001'
+ _TESTNESTEDMAP_INT32MAPENTRY._options = None
+ _TESTNESTEDMAP_INT32MAPENTRY._serialized_options = b'8\001'
+ _TESTNESTEDMAP_INT64MAPENTRY._options = None
+ _TESTNESTEDMAP_INT64MAPENTRY._serialized_options = b'8\001'
+ _TESTNESTEDMAP_UINT32MAPENTRY._options = None
+ _TESTNESTEDMAP_UINT32MAPENTRY._serialized_options = b'8\001'
+ _TESTNESTEDMAP_UINT64MAPENTRY._options = None
+ _TESTNESTEDMAP_UINT64MAPENTRY._serialized_options = b'8\001'
+ _TESTNESTEDMAP_STRINGMAPENTRY._options = None
+ _TESTNESTEDMAP_STRINGMAPENTRY._serialized_options = b'8\001'
+ _TESTNESTEDMAP_MAPMAPENTRY._options = None
+ _TESTNESTEDMAP_MAPMAPENTRY._serialized_options = b'8\001'
+ _TESTSTRINGMAP_STRINGMAPENTRY._options = None
+ _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001'
+ _TESTBOOLVALUE_BOOLMAPENTRY._options = None
+ _TESTBOOLVALUE_BOOLMAPENTRY._serialized_options = b'8\001'
+ _ENUMTYPE._serialized_start=4849
+ _ENUMTYPE._serialized_end=4877
+ _MESSAGETYPE._serialized_start=277
+ _MESSAGETYPE._serialized_end=305
+ _TESTMESSAGE._serialized_start=308
+ _TESTMESSAGE._serialized_end=968
+ _TESTONEOF._serialized_start=971
+ _TESTONEOF._serialized_end=1239
+ _TESTMAP._serialized_start=1242
+ _TESTMAP._serialized_end=1851
+ _TESTMAP_BOOLMAPENTRY._serialized_start=1557
+ _TESTMAP_BOOLMAPENTRY._serialized_end=1603
+ _TESTMAP_INT32MAPENTRY._serialized_start=1605
+ _TESTMAP_INT32MAPENTRY._serialized_end=1652
+ _TESTMAP_INT64MAPENTRY._serialized_start=1654
+ _TESTMAP_INT64MAPENTRY._serialized_end=1701
+ _TESTMAP_UINT32MAPENTRY._serialized_start=1703
+ _TESTMAP_UINT32MAPENTRY._serialized_end=1751
+ _TESTMAP_UINT64MAPENTRY._serialized_start=1753
+ _TESTMAP_UINT64MAPENTRY._serialized_end=1801
+ _TESTMAP_STRINGMAPENTRY._serialized_start=1803
+ _TESTMAP_STRINGMAPENTRY._serialized_end=1851
+ _TESTNESTEDMAP._serialized_start=1854
+ _TESTNESTEDMAP._serialized_end=2627
+ _TESTNESTEDMAP_BOOLMAPENTRY._serialized_start=1557
+ _TESTNESTEDMAP_BOOLMAPENTRY._serialized_end=1603
+ _TESTNESTEDMAP_INT32MAPENTRY._serialized_start=1605
+ _TESTNESTEDMAP_INT32MAPENTRY._serialized_end=1652
+ _TESTNESTEDMAP_INT64MAPENTRY._serialized_start=1654
+ _TESTNESTEDMAP_INT64MAPENTRY._serialized_end=1701
+ _TESTNESTEDMAP_UINT32MAPENTRY._serialized_start=1703
+ _TESTNESTEDMAP_UINT32MAPENTRY._serialized_end=1751
+ _TESTNESTEDMAP_UINT64MAPENTRY._serialized_start=1753
+ _TESTNESTEDMAP_UINT64MAPENTRY._serialized_end=1801
+ _TESTNESTEDMAP_STRINGMAPENTRY._serialized_start=1803
+ _TESTNESTEDMAP_STRINGMAPENTRY._serialized_end=1851
+ _TESTNESTEDMAP_MAPMAPENTRY._serialized_start=2559
+ _TESTNESTEDMAP_MAPMAPENTRY._serialized_end=2627
+ _TESTSTRINGMAP._serialized_start=2629
+ _TESTSTRINGMAP._serialized_end=2752
+ _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=2704
+ _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=2752
+ _TESTWRAPPER._serialized_start=2755
+ _TESTWRAPPER._serialized_end=3761
+ _TESTTIMESTAMP._serialized_start=3763
+ _TESTTIMESTAMP._serialized_end=3873
+ _TESTDURATION._serialized_start=3875
+ _TESTDURATION._serialized_end=3982
+ _TESTFIELDMASK._serialized_start=3984
+ _TESTFIELDMASK._serialized_end=4042
+ _TESTSTRUCT._serialized_start=4044
+ _TESTSTRUCT._serialized_end=4145
+ _TESTANY._serialized_start=4147
+ _TESTANY._serialized_end=4239
+ _TESTVALUE._serialized_start=4241
+ _TESTVALUE._serialized_end=4339
+ _TESTLISTVALUE._serialized_start=4341
+ _TESTLISTVALUE._serialized_end=4451
+ _TESTBOOLVALUE._serialized_start=4454
+ _TESTBOOLVALUE._serialized_end=4591
+ _TESTBOOLVALUE_BOOLMAPENTRY._serialized_start=1557
+ _TESTBOOLVALUE_BOOLMAPENTRY._serialized_end=1603
+ _TESTCUSTOMJSONNAME._serialized_start=4593
+ _TESTCUSTOMJSONNAME._serialized_end=4636
+ _TESTEXTENSIONS._serialized_start=4638
+ _TESTEXTENSIONS._serialized_end=4712
+ _TESTENUMVALUE._serialized_start=4715
+ _TESTENUMVALUE._serialized_end=4847
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/google/protobuf/wrappers_pb2.py b/monEnvTP/lib/python3.8/site-packages/google/protobuf/wrappers_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..f11512ee2323331d9eb8d969c3e10795c33827ba
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/google/protobuf/wrappers_pb2.py
@@ -0,0 +1,115 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: google/protobuf/wrappers.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/wrappers.proto\x12\x0fgoogle.protobuf\"\x1c\n\x0b\x44oubleValue\x12\r\n\x05value\x18\x01 \x01(\x01\"\x1b\n\nFloatValue\x12\r\n\x05value\x18\x01 \x01(\x02\"\x1b\n\nInt64Value\x12\r\n\x05value\x18\x01 \x01(\x03\"\x1c\n\x0bUInt64Value\x12\r\n\x05value\x18\x01 \x01(\x04\"\x1b\n\nInt32Value\x12\r\n\x05value\x18\x01 \x01(\x05\"\x1c\n\x0bUInt32Value\x12\r\n\x05value\x18\x01 \x01(\r\"\x1a\n\tBoolValue\x12\r\n\x05value\x18\x01 \x01(\x08\"\x1c\n\x0bStringValue\x12\r\n\x05value\x18\x01 \x01(\t\"\x1b\n\nBytesValue\x12\r\n\x05value\x18\x01 \x01(\x0c\x42\x83\x01\n\x13\x63om.google.protobufB\rWrappersProtoP\x01Z1google.golang.org/protobuf/types/known/wrapperspb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
+
+
+
+_DOUBLEVALUE = DESCRIPTOR.message_types_by_name['DoubleValue']
+_FLOATVALUE = DESCRIPTOR.message_types_by_name['FloatValue']
+_INT64VALUE = DESCRIPTOR.message_types_by_name['Int64Value']
+_UINT64VALUE = DESCRIPTOR.message_types_by_name['UInt64Value']
+_INT32VALUE = DESCRIPTOR.message_types_by_name['Int32Value']
+_UINT32VALUE = DESCRIPTOR.message_types_by_name['UInt32Value']
+_BOOLVALUE = DESCRIPTOR.message_types_by_name['BoolValue']
+_STRINGVALUE = DESCRIPTOR.message_types_by_name['StringValue']
+_BYTESVALUE = DESCRIPTOR.message_types_by_name['BytesValue']
+DoubleValue = _reflection.GeneratedProtocolMessageType('DoubleValue', (_message.Message,), {
+ 'DESCRIPTOR' : _DOUBLEVALUE,
+ '__module__' : 'google.protobuf.wrappers_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.DoubleValue)
+ })
+_sym_db.RegisterMessage(DoubleValue)
+
+FloatValue = _reflection.GeneratedProtocolMessageType('FloatValue', (_message.Message,), {
+ 'DESCRIPTOR' : _FLOATVALUE,
+ '__module__' : 'google.protobuf.wrappers_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.FloatValue)
+ })
+_sym_db.RegisterMessage(FloatValue)
+
+Int64Value = _reflection.GeneratedProtocolMessageType('Int64Value', (_message.Message,), {
+ 'DESCRIPTOR' : _INT64VALUE,
+ '__module__' : 'google.protobuf.wrappers_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.Int64Value)
+ })
+_sym_db.RegisterMessage(Int64Value)
+
+UInt64Value = _reflection.GeneratedProtocolMessageType('UInt64Value', (_message.Message,), {
+ 'DESCRIPTOR' : _UINT64VALUE,
+ '__module__' : 'google.protobuf.wrappers_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.UInt64Value)
+ })
+_sym_db.RegisterMessage(UInt64Value)
+
+Int32Value = _reflection.GeneratedProtocolMessageType('Int32Value', (_message.Message,), {
+ 'DESCRIPTOR' : _INT32VALUE,
+ '__module__' : 'google.protobuf.wrappers_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.Int32Value)
+ })
+_sym_db.RegisterMessage(Int32Value)
+
+UInt32Value = _reflection.GeneratedProtocolMessageType('UInt32Value', (_message.Message,), {
+ 'DESCRIPTOR' : _UINT32VALUE,
+ '__module__' : 'google.protobuf.wrappers_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.UInt32Value)
+ })
+_sym_db.RegisterMessage(UInt32Value)
+
+BoolValue = _reflection.GeneratedProtocolMessageType('BoolValue', (_message.Message,), {
+ 'DESCRIPTOR' : _BOOLVALUE,
+ '__module__' : 'google.protobuf.wrappers_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.BoolValue)
+ })
+_sym_db.RegisterMessage(BoolValue)
+
+StringValue = _reflection.GeneratedProtocolMessageType('StringValue', (_message.Message,), {
+ 'DESCRIPTOR' : _STRINGVALUE,
+ '__module__' : 'google.protobuf.wrappers_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.StringValue)
+ })
+_sym_db.RegisterMessage(StringValue)
+
+BytesValue = _reflection.GeneratedProtocolMessageType('BytesValue', (_message.Message,), {
+ 'DESCRIPTOR' : _BYTESVALUE,
+ '__module__' : 'google.protobuf.wrappers_pb2'
+ # @@protoc_insertion_point(class_scope:google.protobuf.BytesValue)
+ })
+_sym_db.RegisterMessage(BytesValue)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rWrappersProtoP\001Z1google.golang.org/protobuf/types/known/wrapperspb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
+ _DOUBLEVALUE._serialized_start=51
+ _DOUBLEVALUE._serialized_end=79
+ _FLOATVALUE._serialized_start=81
+ _FLOATVALUE._serialized_end=108
+ _INT64VALUE._serialized_start=110
+ _INT64VALUE._serialized_end=137
+ _UINT64VALUE._serialized_start=139
+ _UINT64VALUE._serialized_end=167
+ _INT32VALUE._serialized_start=169
+ _INT32VALUE._serialized_end=196
+ _UINT32VALUE._serialized_start=198
+ _UINT32VALUE._serialized_end=226
+ _BOOLVALUE._serialized_start=228
+ _BOOLVALUE._serialized_end=254
+ _STRINGVALUE._serialized_start=256
+ _STRINGVALUE._serialized_end=284
+ _BYTESVALUE._serialized_start=286
+ _BYTESVALUE._serialized_end=313
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources-5.4.0.dist-info/INSTALLER b/monEnvTP/lib/python3.8/site-packages/importlib_resources-5.4.0.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources-5.4.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources-5.4.0.dist-info/LICENSE b/monEnvTP/lib/python3.8/site-packages/importlib_resources-5.4.0.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..378b991a4d94b802250c846aceac1f2e11357eb2
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources-5.4.0.dist-info/LICENSE
@@ -0,0 +1,13 @@
+Copyright 2017-2019 Brett Cannon, Barry Warsaw
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources-5.4.0.dist-info/METADATA b/monEnvTP/lib/python3.8/site-packages/importlib_resources-5.4.0.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..cdb1e78399682ddfcc5b872b27cfb372fadf29c6
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources-5.4.0.dist-info/METADATA
@@ -0,0 +1,86 @@
+Metadata-Version: 2.1
+Name: importlib-resources
+Version: 5.4.0
+Summary: Read resources from Python packages
+Home-page: https://github.com/python/importlib_resources
+Author: Barry Warsaw
+Author-email: barry@python.org
+License: UNKNOWN
+Project-URL: Documentation, https://importlib-resources.readthedocs.io/
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.6
+License-File: LICENSE
+Requires-Dist: zipp (>=3.1.0) ; python_version < "3.10"
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs'
+Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest (>=6) ; extra == 'testing'
+Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
+Requires-Dist: pytest-flake8 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing'
+Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/importlib_resources.svg
+ :target: `PyPI link`_
+
+.. image:: https://img.shields.io/pypi/pyversions/importlib_resources.svg
+ :target: `PyPI link`_
+
+.. _PyPI link: https://pypi.org/project/importlib_resources
+
+.. image:: https://github.com/python/importlib_resources/workflows/tests/badge.svg
+ :target: https://github.com/python/importlib_resources/actions?query=workflow%3A%22tests%22
+ :alt: tests
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: Black
+
+.. image:: https://readthedocs.org/projects/importlib-resources/badge/?version=latest
+ :target: https://importlib-resources.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2021-informational
+ :target: https://blog.jaraco.com/skeleton
+
+``importlib_resources`` is a backport of Python standard library
+`importlib.resources
+<https://docs.python.org/3/library/importlib.html#module-importlib.resources>`_
+module for older Pythons.
+
+The key goal of this module is to replace parts of `pkg_resources
+<https://setuptools.readthedocs.io/en/latest/pkg_resources.html>`_ with a
+solution in Python's stdlib that relies on well-defined APIs. This makes
+reading resources included in packages easier, with more stable and consistent
+semantics.
+
+Compatibility
+=============
+
+New features are introduced in this third-party library and later merged
+into CPython. The following table indicates which versions of this library
+were contributed to different versions in the standard library:
+
+.. list-table::
+ :header-rows: 1
+
+ * - importlib_resources
+ - stdlib
+ * - 5.2
+ - 3.11
+ * - 5.0
+ - 3.10
+ * - 1.3
+ - 3.9
+ * - 0.5 (?)
+ - 3.7
+
+
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources-5.4.0.dist-info/RECORD b/monEnvTP/lib/python3.8/site-packages/importlib_resources-5.4.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..8036ceffb539f85325d60dd75f4cddd409e6fcc1
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources-5.4.0.dist-info/RECORD
@@ -0,0 +1,74 @@
+importlib_resources-5.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+importlib_resources-5.4.0.dist-info/LICENSE,sha256=uWRjFdYGataJX2ziXk048ItUglQmjng3GWBALaWA36U,568
+importlib_resources-5.4.0.dist-info/METADATA,sha256=i5jH25IbM0Ls6u6UzSSCOa0c8hpDvePxqgnQwh2T5Io,3135
+importlib_resources-5.4.0.dist-info/RECORD,,
+importlib_resources-5.4.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
+importlib_resources-5.4.0.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20
+importlib_resources/__init__.py,sha256=zuA0lbRgtVVCcAztM0z5LuBiOCV9L_3qtI6mW2p5xAg,525
+importlib_resources/__pycache__/__init__.cpython-38.pyc,,
+importlib_resources/__pycache__/_adapters.cpython-38.pyc,,
+importlib_resources/__pycache__/_common.cpython-38.pyc,,
+importlib_resources/__pycache__/_compat.cpython-38.pyc,,
+importlib_resources/__pycache__/_itertools.cpython-38.pyc,,
+importlib_resources/__pycache__/_legacy.cpython-38.pyc,,
+importlib_resources/__pycache__/abc.cpython-38.pyc,,
+importlib_resources/__pycache__/readers.cpython-38.pyc,,
+importlib_resources/__pycache__/simple.cpython-38.pyc,,
+importlib_resources/_adapters.py,sha256=o51tP2hpVtohP33gSYyAkGNpLfYDBqxxYsadyiRZi1E,4504
+importlib_resources/_common.py,sha256=iIxAaQhotSh6TLLUEfL_ynU2fzEeyHMz9JcL46mUhLg,2741
+importlib_resources/_compat.py,sha256=3LpkIfeN9x4oXjRea5TxZP5VYhPlzuVRhGe-hEv-S0s,2704
+importlib_resources/_itertools.py,sha256=WCdJ1Gs_kNFwKENyIG7TO0Y434IWCu0zjVVSsSbZwU8,884
+importlib_resources/_legacy.py,sha256=TMLkx6aEM6U8xIREPXqGZrMbUhTiPUuPl6ESD7RdYj4,3494
+importlib_resources/abc.py,sha256=MvTJJXajbl74s36Gyeesf76egtbFnh-TMtzQMVhFWXo,3886
+importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/readers.py,sha256=_9QLGQ5AzrED3PY8S2Zf8V6yLR0-nqqYqtQmgleDJzY,3566
+importlib_resources/simple.py,sha256=xt0qhXbwt3bZ86zuaaKbTiE9A0mDbwu0saRjUq_pcY0,2836
+importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/__pycache__/__init__.cpython-38.pyc,,
+importlib_resources/tests/__pycache__/_compat.cpython-38.pyc,,
+importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-38.pyc,,
+importlib_resources/tests/__pycache__/test_contents.cpython-38.pyc,,
+importlib_resources/tests/__pycache__/test_files.cpython-38.pyc,,
+importlib_resources/tests/__pycache__/test_open.cpython-38.pyc,,
+importlib_resources/tests/__pycache__/test_path.cpython-38.pyc,,
+importlib_resources/tests/__pycache__/test_read.cpython-38.pyc,,
+importlib_resources/tests/__pycache__/test_reader.cpython-38.pyc,,
+importlib_resources/tests/__pycache__/test_resource.cpython-38.pyc,,
+importlib_resources/tests/__pycache__/update-zips.cpython-38.pyc,,
+importlib_resources/tests/__pycache__/util.cpython-38.pyc,,
+importlib_resources/tests/_compat.py,sha256=QGI_4p0DXybypoYvw0kr3jfQqvls3p8u4wy4Wvf0Z_o,435
+importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data01/__pycache__/__init__.cpython-38.pyc,,
+importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-38.pyc,,
+importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
+importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
+importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/__pycache__/__init__.cpython-38.pyc,,
+importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/one/__pycache__/__init__.cpython-38.pyc,,
+importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13
+importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/data02/two/__pycache__/__init__.cpython-38.pyc,,
+importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13
+importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4
+importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44
+importlib_resources/tests/namespacedata01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20
+importlib_resources/tests/test_compatibilty_files.py,sha256=NWkbIsylI8Wz3Dwsxo1quT4ZI6ToXFA2mojCG6Dzuxw,3260
+importlib_resources/tests/test_contents.py,sha256=V1Xfk3lqTDdvUsZuV18Kndf0CT_tkM2oEIwk9Vv0rhg,968
+importlib_resources/tests/test_files.py,sha256=1Nqv6VM_MjfwrmtXYL1a1CMT0QhCxi3hNMqwXlfMQTg,1184
+importlib_resources/tests/test_open.py,sha256=pmEgdrSFdM83L6FxtR8U_RT9BfI3JZ4snGmM_ZZIegY,2565
+importlib_resources/tests/test_path.py,sha256=xvPteNA-UKavDhKgLgrQuXSxKWYH7Q4nSNDVfBX95Gs,2103
+importlib_resources/tests/test_read.py,sha256=EyYvpHJ_7F4LuX2EU_c5EerIBQfRhOFmiIR7LOc5Y5E,2408
+importlib_resources/tests/test_reader.py,sha256=hgXHquqAEnioemv20ZZcDlVaiOrcZKADO37_FkiQ00Y,4286
+importlib_resources/tests/test_resource.py,sha256=DqfLNc9kaN5obqxU8kn0sRUWMf9MygagrpfMV5-QfWg,8145
+importlib_resources/tests/update-zips.py,sha256=x3iJVqWnMM5qp4Oob2Pl3o6Yi03sUjEv_5Wf-UCg3ps,1415
+importlib_resources/tests/util.py,sha256=X1j-0C96pu3_tmtJuLhzfBfcfMenOphDLkxtCt5j7t4,5309
+importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-38.pyc,,
+importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=z5Of4dsv3T0t-46B0MsVhxlhsPGMz28aUhJDWpj3_oY,876
+importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-38.pyc,,
+importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=ydI-_j-xgQ7tDxqBp9cjOqXBGxUp6ZBbwVJu6Xj-nrY,698
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources-5.4.0.dist-info/WHEEL b/monEnvTP/lib/python3.8/site-packages/importlib_resources-5.4.0.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..5bad85fdc1cd08553756d0fb2c7be8b5ad6af7fb
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources-5.4.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources-5.4.0.dist-info/top_level.txt b/monEnvTP/lib/python3.8/site-packages/importlib_resources-5.4.0.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..58ad1bd333593cb544d1a6516e8e2849f094d09b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources-5.4.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+importlib_resources
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/__init__.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..15f6b26b9b21159c8d7d9cd36c1b85f13c5bc219
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/__init__.py
@@ -0,0 +1,36 @@
+"""Read resources contained within a package."""
+
+from ._common import (
+ as_file,
+ files,
+ Package,
+)
+
+from ._legacy import (
+ contents,
+ open_binary,
+ read_binary,
+ open_text,
+ read_text,
+ is_resource,
+ path,
+ Resource,
+)
+
+from importlib_resources.abc import ResourceReader
+
+
+__all__ = [
+ 'Package',
+ 'Resource',
+ 'ResourceReader',
+ 'as_file',
+ 'contents',
+ 'files',
+ 'is_resource',
+ 'open_binary',
+ 'open_text',
+ 'path',
+ 'read_binary',
+ 'read_text',
+]
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..797e496196d90d1371f5f433036170fe6124cf20
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/_adapters.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/_adapters.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7b0a0adb980f593d932da88c5186c177f787bd0d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/_adapters.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/_common.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/_common.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2c9ea4903a5ca9aa54c261343cb9d5d1bc90f274
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/_common.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/_compat.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/_compat.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..24887cdee8a3f6249e27d6336b954ae88658c110
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/_compat.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/_itertools.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/_itertools.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e89217aa5cbfed3c1d7a8006e27fd5ddfe3648ab
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/_itertools.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/_legacy.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/_legacy.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7cefc1df4765c05b79889aecd2c8c1100c768468
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/_legacy.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/abc.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/abc.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..644bba88dfac031c364fd3d9e4a2186ac05adab1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/abc.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/readers.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/readers.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2f686b901ea2d705977b5d1eded3b6c3272282e0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/readers.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/simple.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/simple.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..eebd6bc2ea0712a8e690f8394f8c03768d60f987
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/__pycache__/simple.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/_adapters.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/_adapters.py
new file mode 100644
index 0000000000000000000000000000000000000000..ea363d86a564b5450666aa00aecd46353326a75a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/_adapters.py
@@ -0,0 +1,170 @@
+from contextlib import suppress
+from io import TextIOWrapper
+
+from . import abc
+
+
+class SpecLoaderAdapter:
+ """
+ Adapt a package spec to adapt the underlying loader.
+ """
+
+ def __init__(self, spec, adapter=lambda spec: spec.loader):
+ self.spec = spec
+ self.loader = adapter(spec)
+
+ def __getattr__(self, name):
+ return getattr(self.spec, name)
+
+
+class TraversableResourcesLoader:
+ """
+ Adapt a loader to provide TraversableResources.
+ """
+
+ def __init__(self, spec):
+ self.spec = spec
+
+ def get_resource_reader(self, name):
+ return CompatibilityFiles(self.spec)._native()
+
+
+def _io_wrapper(file, mode='r', *args, **kwargs):
+ if mode == 'r':
+ return TextIOWrapper(file, *args, **kwargs)
+ elif mode == 'rb':
+ return file
+ raise ValueError(
+ "Invalid mode value '{}', only 'r' and 'rb' are supported".format(mode)
+ )
+
+
+class CompatibilityFiles:
+ """
+ Adapter for an existing or non-existent resource reader
+ to provide a compatibility .files().
+ """
+
+ class SpecPath(abc.Traversable):
+ """
+ Path tied to a module spec.
+ Can be read and exposes the resource reader children.
+ """
+
+ def __init__(self, spec, reader):
+ self._spec = spec
+ self._reader = reader
+
+ def iterdir(self):
+ if not self._reader:
+ return iter(())
+ return iter(
+ CompatibilityFiles.ChildPath(self._reader, path)
+ for path in self._reader.contents()
+ )
+
+ def is_file(self):
+ return False
+
+ is_dir = is_file
+
+ def joinpath(self, other):
+ if not self._reader:
+ return CompatibilityFiles.OrphanPath(other)
+ return CompatibilityFiles.ChildPath(self._reader, other)
+
+ @property
+ def name(self):
+ return self._spec.name
+
+ def open(self, mode='r', *args, **kwargs):
+ return _io_wrapper(self._reader.open_resource(None), mode, *args, **kwargs)
+
+ class ChildPath(abc.Traversable):
+ """
+ Path tied to a resource reader child.
+ Can be read but doesn't expose any meaningful children.
+ """
+
+ def __init__(self, reader, name):
+ self._reader = reader
+ self._name = name
+
+ def iterdir(self):
+ return iter(())
+
+ def is_file(self):
+ return self._reader.is_resource(self.name)
+
+ def is_dir(self):
+ return not self.is_file()
+
+ def joinpath(self, other):
+ return CompatibilityFiles.OrphanPath(self.name, other)
+
+ @property
+ def name(self):
+ return self._name
+
+ def open(self, mode='r', *args, **kwargs):
+ return _io_wrapper(
+ self._reader.open_resource(self.name), mode, *args, **kwargs
+ )
+
+ class OrphanPath(abc.Traversable):
+ """
+ Orphan path, not tied to a module spec or resource reader.
+ Can't be read and doesn't expose any meaningful children.
+ """
+
+ def __init__(self, *path_parts):
+ if len(path_parts) < 1:
+ raise ValueError('Need at least one path part to construct a path')
+ self._path = path_parts
+
+ def iterdir(self):
+ return iter(())
+
+ def is_file(self):
+ return False
+
+ is_dir = is_file
+
+ def joinpath(self, other):
+ return CompatibilityFiles.OrphanPath(*self._path, other)
+
+ @property
+ def name(self):
+ return self._path[-1]
+
+ def open(self, mode='r', *args, **kwargs):
+ raise FileNotFoundError("Can't open orphan path")
+
+ def __init__(self, spec):
+ self.spec = spec
+
+ @property
+ def _reader(self):
+ with suppress(AttributeError):
+ return self.spec.loader.get_resource_reader(self.spec.name)
+
+ def _native(self):
+ """
+ Return the native reader if it supports files().
+ """
+ reader = self._reader
+ return reader if hasattr(reader, 'files') else self
+
+ def __getattr__(self, attr):
+ return getattr(self._reader, attr)
+
+ def files(self):
+ return CompatibilityFiles.SpecPath(self.spec, self._reader)
+
+
+def wrap_spec(package):
+ """
+ Construct a package spec with traversable compatibility
+ on the spec/loader/reader.
+ """
+ return SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/_common.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/_common.py
new file mode 100644
index 0000000000000000000000000000000000000000..a12e2c75d132c73b556702159d535d15ed9abfd2
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/_common.py
@@ -0,0 +1,104 @@
+import os
+import pathlib
+import tempfile
+import functools
+import contextlib
+import types
+import importlib
+
+from typing import Union, Optional
+from .abc import ResourceReader, Traversable
+
+from ._compat import wrap_spec
+
+Package = Union[types.ModuleType, str]
+
+
+def files(package):
+ # type: (Package) -> Traversable
+ """
+ Get a Traversable resource from a package
+ """
+ return from_package(get_package(package))
+
+
+def get_resource_reader(package):
+ # type: (types.ModuleType) -> Optional[ResourceReader]
+ """
+ Return the package's loader if it's a ResourceReader.
+ """
+ # We can't use
+ # a issubclass() check here because apparently abc.'s __subclasscheck__()
+ # hook wants to create a weak reference to the object, but
+ # zipimport.zipimporter does not support weak references, resulting in a
+ # TypeError. That seems terrible.
+ spec = package.__spec__
+ reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore
+ if reader is None:
+ return None
+ return reader(spec.name) # type: ignore
+
+
+def resolve(cand):
+ # type: (Package) -> types.ModuleType
+ return cand if isinstance(cand, types.ModuleType) else importlib.import_module(cand)
+
+
+def get_package(package):
+ # type: (Package) -> types.ModuleType
+ """Take a package name or module object and return the module.
+
+ Raise an exception if the resolved module is not a package.
+ """
+ resolved = resolve(package)
+ if wrap_spec(resolved).submodule_search_locations is None:
+ raise TypeError(f'{package!r} is not a package')
+ return resolved
+
+
+def from_package(package):
+ """
+ Return a Traversable object for the given package.
+
+ """
+ spec = wrap_spec(package)
+ reader = spec.loader.get_resource_reader(spec.name)
+ return reader.files()
+
+
+@contextlib.contextmanager
+def _tempfile(reader, suffix=''):
+ # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
+ # blocks due to the need to close the temporary file to work on Windows
+ # properly.
+ fd, raw_path = tempfile.mkstemp(suffix=suffix)
+ try:
+ try:
+ os.write(fd, reader())
+ finally:
+ os.close(fd)
+ del reader
+ yield pathlib.Path(raw_path)
+ finally:
+ try:
+ os.remove(raw_path)
+ except FileNotFoundError:
+ pass
+
+
+@functools.singledispatch
+def as_file(path):
+ """
+ Given a Traversable object, return that object as a
+ path on the local file system in a context manager.
+ """
+ return _tempfile(path.read_bytes, suffix=path.name)
+
+
+@as_file.register(pathlib.Path)
+@contextlib.contextmanager
+def _(path):
+ """
+ Degenerate behavior for pathlib.Path objects.
+ """
+ yield path
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/_compat.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/_compat.py
new file mode 100644
index 0000000000000000000000000000000000000000..61e48d47d3a45e6f5fcc667e1b562cff793fbcc1
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/_compat.py
@@ -0,0 +1,98 @@
+# flake8: noqa
+
+import abc
+import sys
+import pathlib
+from contextlib import suppress
+
+if sys.version_info >= (3, 10):
+ from zipfile import Path as ZipPath # type: ignore
+else:
+ from zipp import Path as ZipPath # type: ignore
+
+
+try:
+ from typing import runtime_checkable # type: ignore
+except ImportError:
+
+ def runtime_checkable(cls): # type: ignore
+ return cls
+
+
+try:
+ from typing import Protocol # type: ignore
+except ImportError:
+ Protocol = abc.ABC # type: ignore
+
+
+class TraversableResourcesLoader:
+ """
+ Adapt loaders to provide TraversableResources and other
+ compatibility.
+
+ Used primarily for Python 3.9 and earlier where the native
+ loaders do not yet implement TraversableResources.
+ """
+
+ def __init__(self, spec):
+ self.spec = spec
+
+ @property
+ def path(self):
+ return self.spec.origin
+
+ def get_resource_reader(self, name):
+ from . import readers, _adapters
+
+ def _zip_reader(spec):
+ with suppress(AttributeError):
+ return readers.ZipReader(spec.loader, spec.name)
+
+ def _namespace_reader(spec):
+ with suppress(AttributeError, ValueError):
+ return readers.NamespaceReader(spec.submodule_search_locations)
+
+ def _available_reader(spec):
+ with suppress(AttributeError):
+ return spec.loader.get_resource_reader(spec.name)
+
+ def _native_reader(spec):
+ reader = _available_reader(spec)
+ return reader if hasattr(reader, 'files') else None
+
+ def _file_reader(spec):
+ try:
+ path = pathlib.Path(self.path)
+ except TypeError:
+ return None
+ if path.exists():
+ return readers.FileReader(self)
+
+ return (
+ # native reader if it supplies 'files'
+ _native_reader(self.spec)
+ or
+ # local ZipReader if a zip module
+ _zip_reader(self.spec)
+ or
+ # local NamespaceReader if a namespace module
+ _namespace_reader(self.spec)
+ or
+ # local FileReader
+ _file_reader(self.spec)
+ # fallback - adapt the spec ResourceReader to TraversableReader
+ or _adapters.CompatibilityFiles(self.spec)
+ )
+
+
+def wrap_spec(package):
+ """
+ Construct a package spec with traversable compatibility
+ on the spec/loader/reader.
+
+ Supersedes _adapters.wrap_spec to use TraversableResourcesLoader
+ from above for older Python compatibility (<3.10).
+ """
+ from . import _adapters
+
+ return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/_itertools.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/_itertools.py
new file mode 100644
index 0000000000000000000000000000000000000000..cce05582ffc6fe6d72027194f4ccc44ee42f1fcd
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/_itertools.py
@@ -0,0 +1,35 @@
+from itertools import filterfalse
+
+from typing import (
+ Callable,
+ Iterable,
+ Iterator,
+ Optional,
+ Set,
+ TypeVar,
+ Union,
+)
+
+# Type and type variable definitions
+_T = TypeVar('_T')
+_U = TypeVar('_U')
+
+
+def unique_everseen(
+ iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = None
+) -> Iterator[_T]:
+ "List unique elements, preserving order. Remember all elements ever seen."
+ # unique_everseen('AAAABBBCCDAABBB') --> A B C D
+ # unique_everseen('ABBCcAD', str.lower) --> A B C D
+ seen: Set[Union[_T, _U]] = set()
+ seen_add = seen.add
+ if key is None:
+ for element in filterfalse(seen.__contains__, iterable):
+ seen_add(element)
+ yield element
+ else:
+ for element in iterable:
+ k = key(element)
+ if k not in seen:
+ seen_add(k)
+ yield element
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/_legacy.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/_legacy.py
new file mode 100644
index 0000000000000000000000000000000000000000..1d5d3f1fbb1f6c69d0da2a50e1d4492ad3378f17
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/_legacy.py
@@ -0,0 +1,121 @@
+import functools
+import os
+import pathlib
+import types
+import warnings
+
+from typing import Union, Iterable, ContextManager, BinaryIO, TextIO, Any
+
+from . import _common
+
+Package = Union[types.ModuleType, str]
+Resource = str
+
+
+def deprecated(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ warnings.warn(
+ f"{func.__name__} is deprecated. Use files() instead. "
+ "Refer to https://importlib-resources.readthedocs.io"
+ "/en/latest/using.html#migrating-from-legacy for migration advice.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return func(*args, **kwargs)
+
+ return wrapper
+
+
+def normalize_path(path):
+ # type: (Any) -> str
+ """Normalize a path by ensuring it is a string.
+
+ If the resulting string contains path separators, an exception is raised.
+ """
+ str_path = str(path)
+ parent, file_name = os.path.split(str_path)
+ if parent:
+ raise ValueError(f'{path!r} must be only a file name')
+ return file_name
+
+
+@deprecated
+def open_binary(package: Package, resource: Resource) -> BinaryIO:
+ """Return a file-like object opened for binary reading of the resource."""
+ return (_common.files(package) / normalize_path(resource)).open('rb')
+
+
+@deprecated
+def read_binary(package: Package, resource: Resource) -> bytes:
+ """Return the binary contents of the resource."""
+ return (_common.files(package) / normalize_path(resource)).read_bytes()
+
+
+@deprecated
+def open_text(
+ package: Package,
+ resource: Resource,
+ encoding: str = 'utf-8',
+ errors: str = 'strict',
+) -> TextIO:
+ """Return a file-like object opened for text reading of the resource."""
+ return (_common.files(package) / normalize_path(resource)).open(
+ 'r', encoding=encoding, errors=errors
+ )
+
+
+@deprecated
+def read_text(
+ package: Package,
+ resource: Resource,
+ encoding: str = 'utf-8',
+ errors: str = 'strict',
+) -> str:
+ """Return the decoded string of the resource.
+
+ The decoding-related arguments have the same semantics as those of
+ bytes.decode().
+ """
+ with open_text(package, resource, encoding, errors) as fp:
+ return fp.read()
+
+
+@deprecated
+def contents(package: Package) -> Iterable[str]:
+ """Return an iterable of entries in `package`.
+
+ Note that not all entries are resources. Specifically, directories are
+ not considered resources. Use `is_resource()` on each entry returned here
+ to check if it is a resource or not.
+ """
+ return [path.name for path in _common.files(package).iterdir()]
+
+
+@deprecated
+def is_resource(package: Package, name: str) -> bool:
+ """True if `name` is a resource inside `package`.
+
+ Directories are *not* resources.
+ """
+ resource = normalize_path(name)
+ return any(
+ traversable.name == resource and traversable.is_file()
+ for traversable in _common.files(package).iterdir()
+ )
+
+
+@deprecated
+def path(
+ package: Package,
+ resource: Resource,
+) -> ContextManager[pathlib.Path]:
+ """A context manager providing a file path object to the resource.
+
+ If the resource does not already exist on its own on the file system,
+ a temporary file will be created. If the file was created, the file
+ will be deleted upon exiting the context manager (no exception is
+ raised if the file was deleted prior to the context manager
+ exiting).
+ """
+ return _common.as_file(_common.files(package) / normalize_path(resource))
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/abc.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/abc.py
new file mode 100644
index 0000000000000000000000000000000000000000..d39dc1adba0f00d2f7bdf6fa2cd1abcd82475e2e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/abc.py
@@ -0,0 +1,137 @@
+import abc
+from typing import BinaryIO, Iterable, Text
+
+from ._compat import runtime_checkable, Protocol
+
+
+class ResourceReader(metaclass=abc.ABCMeta):
+ """Abstract base class for loaders to provide resource reading support."""
+
+ @abc.abstractmethod
+ def open_resource(self, resource: Text) -> BinaryIO:
+ """Return an opened, file-like object for binary reading.
+
+ The 'resource' argument is expected to represent only a file name.
+ If the resource cannot be found, FileNotFoundError is raised.
+ """
+ # This deliberately raises FileNotFoundError instead of
+ # NotImplementedError so that if this method is accidentally called,
+ # it'll still do the right thing.
+ raise FileNotFoundError
+
+ @abc.abstractmethod
+ def resource_path(self, resource: Text) -> Text:
+ """Return the file system path to the specified resource.
+
+ The 'resource' argument is expected to represent only a file name.
+ If the resource does not exist on the file system, raise
+ FileNotFoundError.
+ """
+ # This deliberately raises FileNotFoundError instead of
+ # NotImplementedError so that if this method is accidentally called,
+ # it'll still do the right thing.
+ raise FileNotFoundError
+
+ @abc.abstractmethod
+ def is_resource(self, path: Text) -> bool:
+ """Return True if the named 'path' is a resource.
+
+ Files are resources, directories are not.
+ """
+ raise FileNotFoundError
+
+ @abc.abstractmethod
+ def contents(self) -> Iterable[str]:
+ """Return an iterable of entries in `package`."""
+ raise FileNotFoundError
+
+
+@runtime_checkable
+class Traversable(Protocol):
+ """
+ An object with a subset of pathlib.Path methods suitable for
+ traversing directories and opening files.
+ """
+
+ @abc.abstractmethod
+ def iterdir(self):
+ """
+ Yield Traversable objects in self
+ """
+
+ def read_bytes(self):
+ """
+ Read contents of self as bytes
+ """
+ with self.open('rb') as strm:
+ return strm.read()
+
+ def read_text(self, encoding=None):
+ """
+ Read contents of self as text
+ """
+ with self.open(encoding=encoding) as strm:
+ return strm.read()
+
+ @abc.abstractmethod
+ def is_dir(self) -> bool:
+ """
+ Return True if self is a directory
+ """
+
+ @abc.abstractmethod
+ def is_file(self) -> bool:
+ """
+ Return True if self is a file
+ """
+
+ @abc.abstractmethod
+ def joinpath(self, child):
+ """
+ Return Traversable child in self
+ """
+
+ def __truediv__(self, child):
+ """
+ Return Traversable child in self
+ """
+ return self.joinpath(child)
+
+ @abc.abstractmethod
+ def open(self, mode='r', *args, **kwargs):
+ """
+ mode may be 'r' or 'rb' to open as text or binary. Return a handle
+ suitable for reading (same as pathlib.Path.open).
+
+ When opening as text, accepts encoding parameters such as those
+ accepted by io.TextIOWrapper.
+ """
+
+ @abc.abstractproperty
+ def name(self) -> str:
+ """
+ The base name of this object without any parent references.
+ """
+
+
+class TraversableResources(ResourceReader):
+ """
+ The required interface for providing traversable
+ resources.
+ """
+
+ @abc.abstractmethod
+ def files(self):
+ """Return a Traversable object for the loaded package."""
+
+ def open_resource(self, resource):
+ return self.files().joinpath(resource).open('rb')
+
+ def resource_path(self, resource):
+ raise FileNotFoundError(resource)
+
+ def is_resource(self, path):
+ return self.files().joinpath(path).is_file()
+
+ def contents(self):
+ return (item.name for item in self.files().iterdir())
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/py.typed b/monEnvTP/lib/python3.8/site-packages/importlib_resources/py.typed
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/readers.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/readers.py
new file mode 100644
index 0000000000000000000000000000000000000000..f1190ca452a1ce22ee9a1b304991d475281df8ca
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/readers.py
@@ -0,0 +1,122 @@
+import collections
+import pathlib
+import operator
+
+from . import abc
+
+from ._itertools import unique_everseen
+from ._compat import ZipPath
+
+
+def remove_duplicates(items):
+ return iter(collections.OrderedDict.fromkeys(items))
+
+
+class FileReader(abc.TraversableResources):
+ def __init__(self, loader):
+ self.path = pathlib.Path(loader.path).parent
+
+ def resource_path(self, resource):
+ """
+ Return the file system path to prevent
+ `resources.path()` from creating a temporary
+ copy.
+ """
+ return str(self.path.joinpath(resource))
+
+ def files(self):
+ return self.path
+
+
+class ZipReader(abc.TraversableResources):
+ def __init__(self, loader, module):
+ _, _, name = module.rpartition('.')
+ self.prefix = loader.prefix.replace('\\', '/') + name + '/'
+ self.archive = loader.archive
+
+ def open_resource(self, resource):
+ try:
+ return super().open_resource(resource)
+ except KeyError as exc:
+ raise FileNotFoundError(exc.args[0])
+
+ def is_resource(self, path):
+ # workaround for `zipfile.Path.is_file` returning true
+ # for non-existent paths.
+ target = self.files().joinpath(path)
+ return target.is_file() and target.exists()
+
+ def files(self):
+ return ZipPath(self.archive, self.prefix)
+
+
+class MultiplexedPath(abc.Traversable):
+ """
+ Given a series of Traversable objects, implement a merged
+ version of the interface across all objects. Useful for
+ namespace packages which may be multihomed at a single
+ name.
+ """
+
+ def __init__(self, *paths):
+ self._paths = list(map(pathlib.Path, remove_duplicates(paths)))
+ if not self._paths:
+ message = 'MultiplexedPath must contain at least one path'
+ raise FileNotFoundError(message)
+ if not all(path.is_dir() for path in self._paths):
+ raise NotADirectoryError('MultiplexedPath only supports directories')
+
+ def iterdir(self):
+ files = (file for path in self._paths for file in path.iterdir())
+ return unique_everseen(files, key=operator.attrgetter('name'))
+
+ def read_bytes(self):
+ raise FileNotFoundError(f'{self} is not a file')
+
+ def read_text(self, *args, **kwargs):
+ raise FileNotFoundError(f'{self} is not a file')
+
+ def is_dir(self):
+ return True
+
+ def is_file(self):
+ return False
+
+ def joinpath(self, child):
+ # first try to find child in current paths
+ for file in self.iterdir():
+ if file.name == child:
+ return file
+ # if it does not exist, construct it with the first path
+ return self._paths[0] / child
+
+ __truediv__ = joinpath
+
+ def open(self, *args, **kwargs):
+ raise FileNotFoundError(f'{self} is not a file')
+
+ @property
+ def name(self):
+ return self._paths[0].name
+
+ def __repr__(self):
+ paths = ', '.join(f"'{path}'" for path in self._paths)
+ return f'MultiplexedPath({paths})'
+
+
+class NamespaceReader(abc.TraversableResources):
+ def __init__(self, namespace_path):
+ if 'NamespacePath' not in str(namespace_path):
+ raise ValueError('Invalid path')
+ self.path = MultiplexedPath(*list(namespace_path))
+
+ def resource_path(self, resource):
+ """
+ Return the file system path to prevent
+ `resources.path()` from creating a temporary
+ copy.
+ """
+ return str(self.path.joinpath(resource))
+
+ def files(self):
+ return self.path
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/simple.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/simple.py
new file mode 100644
index 0000000000000000000000000000000000000000..da073cbdb11e6c24c19a2d388c53c8842228595f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/simple.py
@@ -0,0 +1,116 @@
+"""
+Interface adapters for low-level readers.
+"""
+
+import abc
+import io
+import itertools
+from typing import BinaryIO, List
+
+from .abc import Traversable, TraversableResources
+
+
+class SimpleReader(abc.ABC):
+ """
+ The minimum, low-level interface required from a resource
+ provider.
+ """
+
+ @abc.abstractproperty
+ def package(self):
+ # type: () -> str
+ """
+ The name of the package for which this reader loads resources.
+ """
+
+ @abc.abstractmethod
+ def children(self):
+ # type: () -> List['SimpleReader']
+ """
+ Obtain an iterable of SimpleReader for available
+ child containers (e.g. directories).
+ """
+
+ @abc.abstractmethod
+ def resources(self):
+ # type: () -> List[str]
+ """
+ Obtain available named resources for this virtual package.
+ """
+
+ @abc.abstractmethod
+ def open_binary(self, resource):
+ # type: (str) -> BinaryIO
+ """
+ Obtain a File-like for a named resource.
+ """
+
+ @property
+ def name(self):
+ return self.package.split('.')[-1]
+
+
+class ResourceHandle(Traversable):
+ """
+ Handle to a named resource in a ResourceReader.
+ """
+
+ def __init__(self, parent, name):
+ # type: (ResourceContainer, str) -> None
+ self.parent = parent
+ self.name = name # type: ignore
+
+ def is_file(self):
+ return True
+
+ def is_dir(self):
+ return False
+
+ def open(self, mode='r', *args, **kwargs):
+ stream = self.parent.reader.open_binary(self.name)
+ if 'b' not in mode:
+ stream = io.TextIOWrapper(*args, **kwargs)
+ return stream
+
+ def joinpath(self, name):
+ raise RuntimeError("Cannot traverse into a resource")
+
+
+class ResourceContainer(Traversable):
+ """
+ Traversable container for a package's resources via its reader.
+ """
+
+ def __init__(self, reader):
+ # type: (SimpleReader) -> None
+ self.reader = reader
+
+ def is_dir(self):
+ return True
+
+ def is_file(self):
+ return False
+
+ def iterdir(self):
+ files = (ResourceHandle(self, name) for name in self.reader.resources)
+ dirs = map(ResourceContainer, self.reader.children())
+ return itertools.chain(files, dirs)
+
+ def open(self, *args, **kwargs):
+ raise IsADirectoryError()
+
+ def joinpath(self, name):
+ return next(
+ traversable for traversable in self.iterdir() if traversable.name == name
+ )
+
+
+class TraversableReader(TraversableResources, SimpleReader):
+ """
+ A TraversableResources based on SimpleReader. Resource providers
+ may derive from this class to provide the TraversableResources
+ interface by supplying the SimpleReader interface.
+ """
+
+ def files(self):
+ return ResourceContainer(self)
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__init__.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fa041162fd9ff7bb4bf74782862d209e194b1886
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/_compat.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/_compat.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..80b0f8d0b9cbb2071e5cf141ee8cec38f99aa6a2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/_compat.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..23bcc81e0748761eeb65c42faaf16dbda32f09fc
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_compatibilty_files.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_contents.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_contents.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..01cd0e49ab8680ccf65ee95c11b6f537ddc00a10
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_contents.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_files.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_files.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..10d6003c956e5de15e5756ebc6da9389b9b1d9a9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_files.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_open.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_open.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3dab3018ea6124b26cd26c764e1331940777b85b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_open.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_path.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_path.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b5d64713187a1647ef050e50c64b5a4015e9dfba
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_path.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_read.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_read.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d5eee83f2980400da04fc2c2f42d7eb1e8be2d28
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_read.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_reader.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_reader.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f7a42c103899d0eb43de64becd49da636cfabe94
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_reader.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_resource.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_resource.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0bbfd8bf49f54030e24c13bfbb778eb05ccd8ba5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/test_resource.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/update-zips.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/update-zips.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e128394e71e6b6a0e54ac7a3d2c5bd29b7781fe3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/update-zips.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/util.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/util.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..04022065d672be663ebaa6b3c8d73726cdd209ef
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/__pycache__/util.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/_compat.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/_compat.py
new file mode 100644
index 0000000000000000000000000000000000000000..4c99cffd0282cd577a716e4652166de4ee0ac8b4
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/_compat.py
@@ -0,0 +1,19 @@
+import os
+
+
+try:
+ from test.support import import_helper # type: ignore
+except ImportError:
+ # Python 3.9 and earlier
+ class import_helper: # type: ignore
+ from test.support import modules_setup, modules_cleanup
+
+
+try:
+ # Python 3.10
+ from test.support.os_helper import unlink
+except ImportError:
+ from test.support import unlink as _unlink
+
+ def unlink(target):
+ return _unlink(os.fspath(target))
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/__init__.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3abb10b288c67526cbb31463105100c9a7663195
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/binary.file b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/binary.file
new file mode 100644
index 0000000000000000000000000000000000000000..eaf36c1daccfdf325514461cd1a2ffbc139b5464
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/binary.file differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/subdirectory/__init__.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/subdirectory/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6489795eef73dacd966d418a3a3093dade45ef3c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/subdirectory/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/subdirectory/binary.file b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/subdirectory/binary.file
new file mode 100644
index 0000000000000000000000000000000000000000..eaf36c1daccfdf325514461cd1a2ffbc139b5464
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/subdirectory/binary.file differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/utf-16.file b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/utf-16.file
new file mode 100644
index 0000000000000000000000000000000000000000..2cb772295ef4b480a8d83725bd5006a0236d8f68
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/utf-16.file differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/utf-8.file b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/utf-8.file
new file mode 100644
index 0000000000000000000000000000000000000000..1c0132ad90a1926b64be56b6fe335de6d727aa17
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data01/utf-8.file
@@ -0,0 +1 @@
+Hello, UTF-8 world!
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/__init__.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2e501ebe24457de1baff4c8d49d5455245f8ae3b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/one/__init__.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/one/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/one/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/one/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a12d26b9db1df8163b76d66d8b1576e85f70505c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/one/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/one/resource1.txt b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/one/resource1.txt
new file mode 100644
index 0000000000000000000000000000000000000000..61a813e40174a6ffda9ef98820a4f733c3276d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/one/resource1.txt
@@ -0,0 +1 @@
+one resource
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/two/__init__.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/two/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/two/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/two/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d847b60ae28f829d94af2e6dd035addee8d42e22
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/two/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/two/resource2.txt b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/two/resource2.txt
new file mode 100644
index 0000000000000000000000000000000000000000..a80ce46ea362e2019c489c6cf7a63e833acb7fbd
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/data02/two/resource2.txt
@@ -0,0 +1 @@
+two resource
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/namespacedata01/binary.file b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/namespacedata01/binary.file
new file mode 100644
index 0000000000000000000000000000000000000000..eaf36c1daccfdf325514461cd1a2ffbc139b5464
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/namespacedata01/binary.file differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/namespacedata01/utf-16.file b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/namespacedata01/utf-16.file
new file mode 100644
index 0000000000000000000000000000000000000000..2cb772295ef4b480a8d83725bd5006a0236d8f68
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/namespacedata01/utf-16.file differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/namespacedata01/utf-8.file b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/namespacedata01/utf-8.file
new file mode 100644
index 0000000000000000000000000000000000000000..1c0132ad90a1926b64be56b6fe335de6d727aa17
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/namespacedata01/utf-8.file
@@ -0,0 +1 @@
+Hello, UTF-8 world!
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_compatibilty_files.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_compatibilty_files.py
new file mode 100644
index 0000000000000000000000000000000000000000..d92c7c56c9c76aa545f4e1ac68574f32e5aeb7f0
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_compatibilty_files.py
@@ -0,0 +1,102 @@
+import io
+import unittest
+
+import importlib_resources as resources
+
+from importlib_resources._adapters import (
+ CompatibilityFiles,
+ wrap_spec,
+)
+
+from . import util
+
+
+class CompatibilityFilesTests(unittest.TestCase):
+ @property
+ def package(self):
+ bytes_data = io.BytesIO(b'Hello, world!')
+ return util.create_package(
+ file=bytes_data,
+ path='some_path',
+ contents=('a', 'b', 'c'),
+ )
+
+ @property
+ def files(self):
+ return resources.files(self.package)
+
+ def test_spec_path_iter(self):
+ self.assertEqual(
+ sorted(path.name for path in self.files.iterdir()),
+ ['a', 'b', 'c'],
+ )
+
+ def test_child_path_iter(self):
+ self.assertEqual(list((self.files / 'a').iterdir()), [])
+
+ def test_orphan_path_iter(self):
+ self.assertEqual(list((self.files / 'a' / 'a').iterdir()), [])
+ self.assertEqual(list((self.files / 'a' / 'a' / 'a').iterdir()), [])
+
+ def test_spec_path_is(self):
+ self.assertFalse(self.files.is_file())
+ self.assertFalse(self.files.is_dir())
+
+ def test_child_path_is(self):
+ self.assertTrue((self.files / 'a').is_file())
+ self.assertFalse((self.files / 'a').is_dir())
+
+ def test_orphan_path_is(self):
+ self.assertFalse((self.files / 'a' / 'a').is_file())
+ self.assertFalse((self.files / 'a' / 'a').is_dir())
+ self.assertFalse((self.files / 'a' / 'a' / 'a').is_file())
+ self.assertFalse((self.files / 'a' / 'a' / 'a').is_dir())
+
+ def test_spec_path_name(self):
+ self.assertEqual(self.files.name, 'testingpackage')
+
+ def test_child_path_name(self):
+ self.assertEqual((self.files / 'a').name, 'a')
+
+ def test_orphan_path_name(self):
+ self.assertEqual((self.files / 'a' / 'b').name, 'b')
+ self.assertEqual((self.files / 'a' / 'b' / 'c').name, 'c')
+
+ def test_spec_path_open(self):
+ self.assertEqual(self.files.read_bytes(), b'Hello, world!')
+ self.assertEqual(self.files.read_text(), 'Hello, world!')
+
+ def test_child_path_open(self):
+ self.assertEqual((self.files / 'a').read_bytes(), b'Hello, world!')
+ self.assertEqual((self.files / 'a').read_text(), 'Hello, world!')
+
+ def test_orphan_path_open(self):
+ with self.assertRaises(FileNotFoundError):
+ (self.files / 'a' / 'b').read_bytes()
+ with self.assertRaises(FileNotFoundError):
+ (self.files / 'a' / 'b' / 'c').read_bytes()
+
+ def test_open_invalid_mode(self):
+ with self.assertRaises(ValueError):
+ self.files.open('0')
+
+ def test_orphan_path_invalid(self):
+ with self.assertRaises(ValueError):
+ CompatibilityFiles.OrphanPath()
+
+ def test_wrap_spec(self):
+ spec = wrap_spec(self.package)
+ self.assertIsInstance(spec.loader.get_resource_reader(None), CompatibilityFiles)
+
+
+class CompatibilityFilesNoReaderTests(unittest.TestCase):
+ @property
+ def package(self):
+ return util.create_package_from_loader(None)
+
+ @property
+ def files(self):
+ return resources.files(self.package)
+
+ def test_spec_path_joinpath(self):
+ self.assertIsInstance(self.files / 'a', CompatibilityFiles.OrphanPath)
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_contents.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_contents.py
new file mode 100644
index 0000000000000000000000000000000000000000..525568e8c9fbfa4adf4673d82a35f6b67761f62c
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_contents.py
@@ -0,0 +1,43 @@
+import unittest
+import importlib_resources as resources
+
+from . import data01
+from . import util
+
+
+class ContentsTests:
+ expected = {
+ '__init__.py',
+ 'binary.file',
+ 'subdirectory',
+ 'utf-16.file',
+ 'utf-8.file',
+ }
+
+ def test_contents(self):
+ contents = {path.name for path in resources.files(self.data).iterdir()}
+ assert self.expected <= contents
+
+
+class ContentsDiskTests(ContentsTests, unittest.TestCase):
+ def setUp(self):
+ self.data = data01
+
+
+class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase):
+ pass
+
+
+class ContentsNamespaceTests(ContentsTests, unittest.TestCase):
+ expected = {
+ # no __init__ because of namespace design
+ # no subdirectory as incidental difference in fixture
+ 'binary.file',
+ 'utf-16.file',
+ 'utf-8.file',
+ }
+
+ def setUp(self):
+ from . import namespacedata01
+
+ self.data = namespacedata01
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_files.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_files.py
new file mode 100644
index 0000000000000000000000000000000000000000..2676b49e7deed612a2548acb7866995105d53d3c
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_files.py
@@ -0,0 +1,46 @@
+import typing
+import unittest
+
+import importlib_resources as resources
+from importlib_resources.abc import Traversable
+from . import data01
+from . import util
+
+
+class FilesTests:
+ def test_read_bytes(self):
+ files = resources.files(self.data)
+ actual = files.joinpath('utf-8.file').read_bytes()
+ assert actual == b'Hello, UTF-8 world!\n'
+
+ def test_read_text(self):
+ files = resources.files(self.data)
+ actual = files.joinpath('utf-8.file').read_text(encoding='utf-8')
+ assert actual == 'Hello, UTF-8 world!\n'
+
+ @unittest.skipUnless(
+ hasattr(typing, 'runtime_checkable'),
+ "Only suitable when typing supports runtime_checkable",
+ )
+ def test_traversable(self):
+ assert isinstance(resources.files(self.data), Traversable)
+
+
+class OpenDiskTests(FilesTests, unittest.TestCase):
+ def setUp(self):
+ self.data = data01
+
+
+class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase):
+ pass
+
+
+class OpenNamespaceTests(FilesTests, unittest.TestCase):
+ def setUp(self):
+ from . import namespacedata01
+
+ self.data = namespacedata01
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_open.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_open.py
new file mode 100644
index 0000000000000000000000000000000000000000..87b42c3d39b6b120db1c2cb13d7cd3fcc40c6335
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_open.py
@@ -0,0 +1,81 @@
+import unittest
+
+import importlib_resources as resources
+from . import data01
+from . import util
+
+
+class CommonBinaryTests(util.CommonTests, unittest.TestCase):
+ def execute(self, package, path):
+ target = resources.files(package).joinpath(path)
+ with target.open('rb'):
+ pass
+
+
+class CommonTextTests(util.CommonTests, unittest.TestCase):
+ def execute(self, package, path):
+ target = resources.files(package).joinpath(path)
+ with target.open():
+ pass
+
+
+class OpenTests:
+ def test_open_binary(self):
+ target = resources.files(self.data) / 'binary.file'
+ with target.open('rb') as fp:
+ result = fp.read()
+ self.assertEqual(result, b'\x00\x01\x02\x03')
+
+ def test_open_text_default_encoding(self):
+ target = resources.files(self.data) / 'utf-8.file'
+ with target.open() as fp:
+ result = fp.read()
+ self.assertEqual(result, 'Hello, UTF-8 world!\n')
+
+ def test_open_text_given_encoding(self):
+ target = resources.files(self.data) / 'utf-16.file'
+ with target.open(encoding='utf-16', errors='strict') as fp:
+ result = fp.read()
+ self.assertEqual(result, 'Hello, UTF-16 world!\n')
+
+ def test_open_text_with_errors(self):
+ # Raises UnicodeError without the 'errors' argument.
+ target = resources.files(self.data) / 'utf-16.file'
+ with target.open(encoding='utf-8', errors='strict') as fp:
+ self.assertRaises(UnicodeError, fp.read)
+ with target.open(encoding='utf-8', errors='ignore') as fp:
+ result = fp.read()
+ self.assertEqual(
+ result,
+ 'H\x00e\x00l\x00l\x00o\x00,\x00 '
+ '\x00U\x00T\x00F\x00-\x001\x006\x00 '
+ '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00',
+ )
+
+ def test_open_binary_FileNotFoundError(self):
+ target = resources.files(self.data) / 'does-not-exist'
+ self.assertRaises(FileNotFoundError, target.open, 'rb')
+
+ def test_open_text_FileNotFoundError(self):
+ target = resources.files(self.data) / 'does-not-exist'
+ self.assertRaises(FileNotFoundError, target.open)
+
+
+class OpenDiskTests(OpenTests, unittest.TestCase):
+ def setUp(self):
+ self.data = data01
+
+
+class OpenDiskNamespaceTests(OpenTests, unittest.TestCase):
+ def setUp(self):
+ from . import namespacedata01
+
+ self.data = namespacedata01
+
+
+class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase):
+ pass
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_path.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_path.py
new file mode 100644
index 0000000000000000000000000000000000000000..4f4d3943bbaf600e8c2e89cf34fa224f36445a19
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_path.py
@@ -0,0 +1,64 @@
+import io
+import unittest
+
+import importlib_resources as resources
+from . import data01
+from . import util
+
+
+class CommonTests(util.CommonTests, unittest.TestCase):
+ def execute(self, package, path):
+ with resources.as_file(resources.files(package).joinpath(path)):
+ pass
+
+
+class PathTests:
+ def test_reading(self):
+ # Path should be readable.
+ # Test also implicitly verifies the returned object is a pathlib.Path
+ # instance.
+ target = resources.files(self.data) / 'utf-8.file'
+ with resources.as_file(target) as path:
+ self.assertTrue(path.name.endswith("utf-8.file"), repr(path))
+ # pathlib.Path.read_text() was introduced in Python 3.5.
+ with path.open('r', encoding='utf-8') as file:
+ text = file.read()
+ self.assertEqual('Hello, UTF-8 world!\n', text)
+
+
+class PathDiskTests(PathTests, unittest.TestCase):
+ data = data01
+
+ def test_natural_path(self):
+ """
+ Guarantee the internal implementation detail that
+ file-system-backed resources do not get the tempdir
+ treatment.
+ """
+ target = resources.files(self.data) / 'utf-8.file'
+ with resources.as_file(target) as path:
+ assert 'data' in str(path)
+
+
+class PathMemoryTests(PathTests, unittest.TestCase):
+ def setUp(self):
+ file = io.BytesIO(b'Hello, UTF-8 world!\n')
+ self.addCleanup(file.close)
+ self.data = util.create_package(
+ file=file, path=FileNotFoundError("package exists only in memory")
+ )
+ self.data.__spec__.origin = None
+ self.data.__spec__.has_location = False
+
+
+class PathZipTests(PathTests, util.ZipSetup, unittest.TestCase):
+ def test_remove_in_context_manager(self):
+ # It is not an error if the file that was temporarily stashed on the
+ # file system is removed inside the `with` stanza.
+ target = resources.files(self.data) / 'utf-8.file'
+ with resources.as_file(target) as path:
+ path.unlink()
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_read.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_read.py
new file mode 100644
index 0000000000000000000000000000000000000000..41dd6db5f3e4814d7331817465aaf65678f83adc
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_read.py
@@ -0,0 +1,76 @@
+import unittest
+import importlib_resources as resources
+
+from . import data01
+from . import util
+from importlib import import_module
+
+
+class CommonBinaryTests(util.CommonTests, unittest.TestCase):
+ def execute(self, package, path):
+ resources.files(package).joinpath(path).read_bytes()
+
+
+class CommonTextTests(util.CommonTests, unittest.TestCase):
+ def execute(self, package, path):
+ resources.files(package).joinpath(path).read_text()
+
+
+class ReadTests:
+ def test_read_bytes(self):
+ result = resources.files(self.data).joinpath('binary.file').read_bytes()
+ self.assertEqual(result, b'\0\1\2\3')
+
+ def test_read_text_default_encoding(self):
+ result = resources.files(self.data).joinpath('utf-8.file').read_text()
+ self.assertEqual(result, 'Hello, UTF-8 world!\n')
+
+ def test_read_text_given_encoding(self):
+ result = (
+ resources.files(self.data)
+ .joinpath('utf-16.file')
+ .read_text(encoding='utf-16')
+ )
+ self.assertEqual(result, 'Hello, UTF-16 world!\n')
+
+ def test_read_text_with_errors(self):
+ # Raises UnicodeError without the 'errors' argument.
+ target = resources.files(self.data) / 'utf-16.file'
+ self.assertRaises(UnicodeError, target.read_text, encoding='utf-8')
+ result = target.read_text(encoding='utf-8', errors='ignore')
+ self.assertEqual(
+ result,
+ 'H\x00e\x00l\x00l\x00o\x00,\x00 '
+ '\x00U\x00T\x00F\x00-\x001\x006\x00 '
+ '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00',
+ )
+
+
+class ReadDiskTests(ReadTests, unittest.TestCase):
+ data = data01
+
+
+class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase):
+ def test_read_submodule_resource(self):
+ submodule = import_module('ziptestdata.subdirectory')
+ result = resources.files(submodule).joinpath('binary.file').read_bytes()
+ self.assertEqual(result, b'\0\1\2\3')
+
+ def test_read_submodule_resource_by_name(self):
+ result = (
+ resources.files('ziptestdata.subdirectory')
+ .joinpath('binary.file')
+ .read_bytes()
+ )
+ self.assertEqual(result, b'\0\1\2\3')
+
+
+class ReadNamespaceTests(ReadTests, unittest.TestCase):
+ def setUp(self):
+ from . import namespacedata01
+
+ self.data = namespacedata01
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_reader.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_reader.py
new file mode 100644
index 0000000000000000000000000000000000000000..16841a508a0b5b3bac7bad675e3317cdae1a4531
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_reader.py
@@ -0,0 +1,128 @@
+import os.path
+import sys
+import pathlib
+import unittest
+
+from importlib import import_module
+from importlib_resources.readers import MultiplexedPath, NamespaceReader
+
+
+class MultiplexedPathTest(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ path = pathlib.Path(__file__).parent / 'namespacedata01'
+ cls.folder = str(path)
+
+ def test_init_no_paths(self):
+ with self.assertRaises(FileNotFoundError):
+ MultiplexedPath()
+
+ def test_init_file(self):
+ with self.assertRaises(NotADirectoryError):
+ MultiplexedPath(os.path.join(self.folder, 'binary.file'))
+
+ def test_iterdir(self):
+ contents = {path.name for path in MultiplexedPath(self.folder).iterdir()}
+ try:
+ contents.remove('__pycache__')
+ except (KeyError, ValueError):
+ pass
+ self.assertEqual(contents, {'binary.file', 'utf-16.file', 'utf-8.file'})
+
+ def test_iterdir_duplicate(self):
+ data01 = os.path.abspath(os.path.join(__file__, '..', 'data01'))
+ contents = {
+ path.name for path in MultiplexedPath(self.folder, data01).iterdir()
+ }
+ for remove in ('__pycache__', '__init__.pyc'):
+ try:
+ contents.remove(remove)
+ except (KeyError, ValueError):
+ pass
+ self.assertEqual(
+ contents,
+ {'__init__.py', 'binary.file', 'subdirectory', 'utf-16.file', 'utf-8.file'},
+ )
+
+ def test_is_dir(self):
+ self.assertEqual(MultiplexedPath(self.folder).is_dir(), True)
+
+ def test_is_file(self):
+ self.assertEqual(MultiplexedPath(self.folder).is_file(), False)
+
+ def test_open_file(self):
+ path = MultiplexedPath(self.folder)
+ with self.assertRaises(FileNotFoundError):
+ path.read_bytes()
+ with self.assertRaises(FileNotFoundError):
+ path.read_text()
+ with self.assertRaises(FileNotFoundError):
+ path.open()
+
+ def test_join_path(self):
+ prefix = os.path.abspath(os.path.join(__file__, '..'))
+ data01 = os.path.join(prefix, 'data01')
+ path = MultiplexedPath(self.folder, data01)
+ self.assertEqual(
+ str(path.joinpath('binary.file'))[len(prefix) + 1 :],
+ os.path.join('namespacedata01', 'binary.file'),
+ )
+ self.assertEqual(
+ str(path.joinpath('subdirectory'))[len(prefix) + 1 :],
+ os.path.join('data01', 'subdirectory'),
+ )
+ self.assertEqual(
+ str(path.joinpath('imaginary'))[len(prefix) + 1 :],
+ os.path.join('namespacedata01', 'imaginary'),
+ )
+
+ def test_repr(self):
+ self.assertEqual(
+ repr(MultiplexedPath(self.folder)),
+ f"MultiplexedPath('{self.folder}')",
+ )
+
+ def test_name(self):
+ self.assertEqual(
+ MultiplexedPath(self.folder).name,
+ os.path.basename(self.folder),
+ )
+
+
+class NamespaceReaderTest(unittest.TestCase):
+ site_dir = str(pathlib.Path(__file__).parent)
+
+ @classmethod
+ def setUpClass(cls):
+ sys.path.append(cls.site_dir)
+
+ @classmethod
+ def tearDownClass(cls):
+ sys.path.remove(cls.site_dir)
+
+ def test_init_error(self):
+ with self.assertRaises(ValueError):
+ NamespaceReader(['path1', 'path2'])
+
+ def test_resource_path(self):
+ namespacedata01 = import_module('namespacedata01')
+ reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations)
+
+ root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01'))
+ self.assertEqual(
+ reader.resource_path('binary.file'), os.path.join(root, 'binary.file')
+ )
+ self.assertEqual(
+ reader.resource_path('imaginary'), os.path.join(root, 'imaginary')
+ )
+
+ def test_files(self):
+ namespacedata01 = import_module('namespacedata01')
+ reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations)
+ root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01'))
+ self.assertIsInstance(reader.files(), MultiplexedPath)
+ self.assertEqual(repr(reader.files()), f"MultiplexedPath('{root}')")
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_resource.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_resource.py
new file mode 100644
index 0000000000000000000000000000000000000000..5affd8b0de8d5447787ab1b617d9fa72a44bcadd
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/test_resource.py
@@ -0,0 +1,252 @@
+import sys
+import unittest
+import importlib_resources as resources
+import uuid
+import pathlib
+
+from . import data01
+from . import zipdata01, zipdata02
+from . import util
+from importlib import import_module
+from ._compat import import_helper, unlink
+
+
+class ResourceTests:
+ # Subclasses are expected to set the `data` attribute.
+
+ def test_is_file_exists(self):
+ target = resources.files(self.data) / 'binary.file'
+ self.assertTrue(target.is_file())
+
+ def test_is_file_missing(self):
+ target = resources.files(self.data) / 'not-a-file'
+ self.assertFalse(target.is_file())
+
+ def test_is_dir(self):
+ target = resources.files(self.data) / 'subdirectory'
+ self.assertFalse(target.is_file())
+ self.assertTrue(target.is_dir())
+
+
+class ResourceDiskTests(ResourceTests, unittest.TestCase):
+ def setUp(self):
+ self.data = data01
+
+
+class ResourceZipTests(ResourceTests, util.ZipSetup, unittest.TestCase):
+ pass
+
+
+def names(traversable):
+ return {item.name for item in traversable.iterdir()}
+
+
+class ResourceLoaderTests(unittest.TestCase):
+ def test_resource_contents(self):
+ package = util.create_package(
+ file=data01, path=data01.__file__, contents=['A', 'B', 'C']
+ )
+ self.assertEqual(names(resources.files(package)), {'A', 'B', 'C'})
+
+ def test_is_file(self):
+ package = util.create_package(
+ file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F']
+ )
+ self.assertTrue(resources.files(package).joinpath('B').is_file())
+
+ def test_is_dir(self):
+ package = util.create_package(
+ file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F']
+ )
+ self.assertTrue(resources.files(package).joinpath('D').is_dir())
+
+ def test_resource_missing(self):
+ package = util.create_package(
+ file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F']
+ )
+ self.assertFalse(resources.files(package).joinpath('Z').is_file())
+
+
+class ResourceCornerCaseTests(unittest.TestCase):
+ def test_package_has_no_reader_fallback(self):
+ # Test odd ball packages which:
+ # 1. Do not have a ResourceReader as a loader
+ # 2. Are not on the file system
+ # 3. Are not in a zip file
+ module = util.create_package(
+ file=data01, path=data01.__file__, contents=['A', 'B', 'C']
+ )
+ # Give the module a dummy loader.
+ module.__loader__ = object()
+ # Give the module a dummy origin.
+ module.__file__ = '/path/which/shall/not/be/named'
+ module.__spec__.loader = module.__loader__
+ module.__spec__.origin = module.__file__
+ self.assertFalse(resources.files(module).joinpath('A').is_file())
+
+
+class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase):
+ ZIP_MODULE = zipdata01 # type: ignore
+
+ def test_is_submodule_resource(self):
+ submodule = import_module('ziptestdata.subdirectory')
+ self.assertTrue(resources.files(submodule).joinpath('binary.file').is_file())
+
+ def test_read_submodule_resource_by_name(self):
+ self.assertTrue(
+ resources.files('ziptestdata.subdirectory')
+ .joinpath('binary.file')
+ .is_file()
+ )
+
+ def test_submodule_contents(self):
+ submodule = import_module('ziptestdata.subdirectory')
+ self.assertEqual(
+ names(resources.files(submodule)), {'__init__.py', 'binary.file'}
+ )
+
+ def test_submodule_contents_by_name(self):
+ self.assertEqual(
+ names(resources.files('ziptestdata.subdirectory')),
+ {'__init__.py', 'binary.file'},
+ )
+
+
+class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase):
+ ZIP_MODULE = zipdata02 # type: ignore
+
+ def test_unrelated_contents(self):
+ """
+ Test thata zip with two unrelated subpackages return
+ distinct resources. Ref python/importlib_resources#44.
+ """
+ self.assertEqual(
+ names(resources.files('ziptestdata.one')),
+ {'__init__.py', 'resource1.txt'},
+ )
+ self.assertEqual(
+ names(resources.files('ziptestdata.two')),
+ {'__init__.py', 'resource2.txt'},
+ )
+
+
+class DeletingZipsTest(unittest.TestCase):
+ """Having accessed resources in a zip file should not keep an open
+ reference to the zip.
+ """
+
+ ZIP_MODULE = zipdata01
+
+ def setUp(self):
+ modules = import_helper.modules_setup()
+ self.addCleanup(import_helper.modules_cleanup, *modules)
+
+ data_path = pathlib.Path(self.ZIP_MODULE.__file__)
+ data_dir = data_path.parent
+ self.source_zip_path = data_dir / 'ziptestdata.zip'
+ self.zip_path = pathlib.Path(f'{uuid.uuid4()}.zip').absolute()
+ self.zip_path.write_bytes(self.source_zip_path.read_bytes())
+ sys.path.append(str(self.zip_path))
+ self.data = import_module('ziptestdata')
+
+ def tearDown(self):
+ try:
+ sys.path.remove(str(self.zip_path))
+ except ValueError:
+ pass
+
+ try:
+ del sys.path_importer_cache[str(self.zip_path)]
+ del sys.modules[self.data.__name__]
+ except KeyError:
+ pass
+
+ try:
+ unlink(self.zip_path)
+ except OSError:
+ # If the test fails, this will probably fail too
+ pass
+
+ def test_iterdir_does_not_keep_open(self):
+ c = [item.name for item in resources.files('ziptestdata').iterdir()]
+ self.zip_path.unlink()
+ del c
+
+ def test_is_file_does_not_keep_open(self):
+ c = resources.files('ziptestdata').joinpath('binary.file').is_file()
+ self.zip_path.unlink()
+ del c
+
+ def test_is_file_failure_does_not_keep_open(self):
+ c = resources.files('ziptestdata').joinpath('not-present').is_file()
+ self.zip_path.unlink()
+ del c
+
+ @unittest.skip("Desired but not supported.")
+ def test_as_file_does_not_keep_open(self): # pragma: no cover
+ c = resources.as_file(resources.files('ziptestdata') / 'binary.file')
+ self.zip_path.unlink()
+ del c
+
+ def test_entered_path_does_not_keep_open(self):
+ # This is what certifi does on import to make its bundle
+ # available for the process duration.
+ c = resources.as_file(
+ resources.files('ziptestdata') / 'binary.file'
+ ).__enter__()
+ self.zip_path.unlink()
+ del c
+
+ def test_read_binary_does_not_keep_open(self):
+ c = resources.files('ziptestdata').joinpath('binary.file').read_bytes()
+ self.zip_path.unlink()
+ del c
+
+ def test_read_text_does_not_keep_open(self):
+ c = resources.files('ziptestdata').joinpath('utf-8.file').read_text()
+ self.zip_path.unlink()
+ del c
+
+
+class ResourceFromNamespaceTest01(unittest.TestCase):
+ site_dir = str(pathlib.Path(__file__).parent)
+
+ @classmethod
+ def setUpClass(cls):
+ sys.path.append(cls.site_dir)
+
+ @classmethod
+ def tearDownClass(cls):
+ sys.path.remove(cls.site_dir)
+
+ def test_is_submodule_resource(self):
+ self.assertTrue(
+ resources.files(import_module('namespacedata01'))
+ .joinpath('binary.file')
+ .is_file()
+ )
+
+ def test_read_submodule_resource_by_name(self):
+ self.assertTrue(
+ resources.files('namespacedata01').joinpath('binary.file').is_file()
+ )
+
+ def test_submodule_contents(self):
+ contents = names(resources.files(import_module('namespacedata01')))
+ try:
+ contents.remove('__pycache__')
+ except KeyError:
+ pass
+ self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'})
+
+ def test_submodule_contents_by_name(self):
+ contents = names(resources.files('namespacedata01'))
+ try:
+ contents.remove('__pycache__')
+ except KeyError:
+ pass
+ self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'})
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/update-zips.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/update-zips.py
new file mode 100644
index 0000000000000000000000000000000000000000..9ef0224ca65ca067be71deeda384c9bce3015a97
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/update-zips.py
@@ -0,0 +1,53 @@
+"""
+Generate the zip test data files.
+
+Run to build the tests/zipdataNN/ziptestdata.zip files from
+files in tests/dataNN.
+
+Replaces the file with the working copy, but does commit anything
+to the source repo.
+"""
+
+import contextlib
+import os
+import pathlib
+import zipfile
+
+
+def main():
+ """
+ >>> from unittest import mock
+ >>> monkeypatch = getfixture('monkeypatch')
+ >>> monkeypatch.setattr(zipfile, 'ZipFile', mock.MagicMock())
+ >>> print(); main() # print workaround for bpo-32509
+ <BLANKLINE>
+ ...data01... -> ziptestdata/...
+ ...
+ ...data02... -> ziptestdata/...
+ ...
+ """
+ suffixes = '01', '02'
+ tuple(map(generate, suffixes))
+
+
+def generate(suffix):
+ root = pathlib.Path(__file__).parent.relative_to(os.getcwd())
+ zfpath = root / f'zipdata{suffix}/ziptestdata.zip'
+ with zipfile.ZipFile(zfpath, 'w') as zf:
+ for src, rel in walk(root / f'data{suffix}'):
+ dst = 'ziptestdata' / pathlib.PurePosixPath(rel.as_posix())
+ print(src, '->', dst)
+ zf.write(src, dst)
+
+
+def walk(datapath):
+ for dirpath, dirnames, filenames in os.walk(datapath):
+ with contextlib.suppress(KeyError):
+ dirnames.remove('__pycache__')
+ for filename in filenames:
+ res = pathlib.Path(dirpath) / filename
+ rel = res.relative_to(datapath)
+ yield res, rel
+
+
+__name__ == '__main__' and main()
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/util.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/util.py
new file mode 100644
index 0000000000000000000000000000000000000000..c6d83e4bd3553de6d38556be139977abdb354671
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/util.py
@@ -0,0 +1,178 @@
+import abc
+import importlib
+import io
+import sys
+import types
+from pathlib import Path, PurePath
+
+from . import data01
+from . import zipdata01
+from ..abc import ResourceReader
+from ._compat import import_helper
+
+
+from importlib.machinery import ModuleSpec
+
+
+class Reader(ResourceReader):
+ def __init__(self, **kwargs):
+ vars(self).update(kwargs)
+
+ def get_resource_reader(self, package):
+ return self
+
+ def open_resource(self, path):
+ self._path = path
+ if isinstance(self.file, Exception):
+ raise self.file
+ return self.file
+
+ def resource_path(self, path_):
+ self._path = path_
+ if isinstance(self.path, Exception):
+ raise self.path
+ return self.path
+
+ def is_resource(self, path_):
+ self._path = path_
+ if isinstance(self.path, Exception):
+ raise self.path
+
+ def part(entry):
+ return entry.split('/')
+
+ return any(
+ len(parts) == 1 and parts[0] == path_ for parts in map(part, self._contents)
+ )
+
+ def contents(self):
+ if isinstance(self.path, Exception):
+ raise self.path
+ yield from self._contents
+
+
+def create_package_from_loader(loader, is_package=True):
+ name = 'testingpackage'
+ module = types.ModuleType(name)
+ spec = ModuleSpec(name, loader, origin='does-not-exist', is_package=is_package)
+ module.__spec__ = spec
+ module.__loader__ = loader
+ return module
+
+
+def create_package(file=None, path=None, is_package=True, contents=()):
+ return create_package_from_loader(
+ Reader(file=file, path=path, _contents=contents),
+ is_package,
+ )
+
+
+class CommonTests(metaclass=abc.ABCMeta):
+ """
+ Tests shared by test_open, test_path, and test_read.
+ """
+
+ @abc.abstractmethod
+ def execute(self, package, path):
+ """
+ Call the pertinent legacy API function (e.g. open_text, path)
+ on package and path.
+ """
+
+ def test_package_name(self):
+ # Passing in the package name should succeed.
+ self.execute(data01.__name__, 'utf-8.file')
+
+ def test_package_object(self):
+ # Passing in the package itself should succeed.
+ self.execute(data01, 'utf-8.file')
+
+ def test_string_path(self):
+ # Passing in a string for the path should succeed.
+ path = 'utf-8.file'
+ self.execute(data01, path)
+
+ def test_pathlib_path(self):
+ # Passing in a pathlib.PurePath object for the path should succeed.
+ path = PurePath('utf-8.file')
+ self.execute(data01, path)
+
+ def test_importing_module_as_side_effect(self):
+ # The anchor package can already be imported.
+ del sys.modules[data01.__name__]
+ self.execute(data01.__name__, 'utf-8.file')
+
+ def test_non_package_by_name(self):
+ # The anchor package cannot be a module.
+ with self.assertRaises(TypeError):
+ self.execute(__name__, 'utf-8.file')
+
+ def test_non_package_by_package(self):
+ # The anchor package cannot be a module.
+ with self.assertRaises(TypeError):
+ module = sys.modules['importlib_resources.tests.util']
+ self.execute(module, 'utf-8.file')
+
+ def test_missing_path(self):
+ # Attempting to open or read or request the path for a
+ # non-existent path should succeed if open_resource
+ # can return a viable data stream.
+ bytes_data = io.BytesIO(b'Hello, world!')
+ package = create_package(file=bytes_data, path=FileNotFoundError())
+ self.execute(package, 'utf-8.file')
+ self.assertEqual(package.__loader__._path, 'utf-8.file')
+
+ def test_extant_path(self):
+ # Attempting to open or read or request the path when the
+ # path does exist should still succeed. Does not assert
+ # anything about the result.
+ bytes_data = io.BytesIO(b'Hello, world!')
+ # any path that exists
+ path = __file__
+ package = create_package(file=bytes_data, path=path)
+ self.execute(package, 'utf-8.file')
+ self.assertEqual(package.__loader__._path, 'utf-8.file')
+
+ def test_useless_loader(self):
+ package = create_package(file=FileNotFoundError(), path=FileNotFoundError())
+ with self.assertRaises(FileNotFoundError):
+ self.execute(package, 'utf-8.file')
+
+
+class ZipSetupBase:
+ ZIP_MODULE = None
+
+ @classmethod
+ def setUpClass(cls):
+ data_path = Path(cls.ZIP_MODULE.__file__)
+ data_dir = data_path.parent
+ cls._zip_path = str(data_dir / 'ziptestdata.zip')
+ sys.path.append(cls._zip_path)
+ cls.data = importlib.import_module('ziptestdata')
+
+ @classmethod
+ def tearDownClass(cls):
+ try:
+ sys.path.remove(cls._zip_path)
+ except ValueError:
+ pass
+
+ try:
+ del sys.path_importer_cache[cls._zip_path]
+ del sys.modules[cls.data.__name__]
+ except KeyError:
+ pass
+
+ try:
+ del cls.data
+ del cls._zip_path
+ except AttributeError:
+ pass
+
+ def setUp(self):
+ modules = import_helper.modules_setup()
+ self.addCleanup(import_helper.modules_cleanup, *modules)
+
+
+class ZipSetup(ZipSetupBase):
+ ZIP_MODULE = zipdata01 # type: ignore
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/zipdata01/__init__.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/zipdata01/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ef07480906222837551bc91e9fded422bdd564db
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/zipdata01/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/zipdata01/ziptestdata.zip b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/zipdata01/ziptestdata.zip
new file mode 100644
index 0000000000000000000000000000000000000000..9a3bb0739f87e97c1084b94d7d153680f6727738
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/zipdata01/ziptestdata.zip differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/zipdata02/__init__.py b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/zipdata02/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7faeb9daabfc95e0de8c6a565aaa4a1d7f78dd06
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/zipdata02/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/zipdata02/ziptestdata.zip b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/zipdata02/ziptestdata.zip
new file mode 100644
index 0000000000000000000000000000000000000000..d63ff512d2807ef2fd259455283b81b02e0e45fb
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/importlib_resources/tests/zipdata02/ziptestdata.zip differ
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.classes-3.2.1.dist-info/INSTALLER b/monEnvTP/lib/python3.8/site-packages/jaraco.classes-3.2.1.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.classes-3.2.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.classes-3.2.1.dist-info/LICENSE b/monEnvTP/lib/python3.8/site-packages/jaraco.classes-3.2.1.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..353924be0e59b9ad7e6c22848c2189398481821d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.classes-3.2.1.dist-info/LICENSE
@@ -0,0 +1,19 @@
+Copyright Jason R. Coombs
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.classes-3.2.1.dist-info/METADATA b/monEnvTP/lib/python3.8/site-packages/jaraco.classes-3.2.1.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..6857ef90cd5a6febe2f9b2fd8ce131314df5cdc1
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.classes-3.2.1.dist-info/METADATA
@@ -0,0 +1,49 @@
+Metadata-Version: 2.1
+Name: jaraco.classes
+Version: 3.2.1
+Summary: Utility functions for Python class constructs
+Home-page: https://github.com/jaraco/jaraco.classes
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+License: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.6
+Requires-Dist: more-itertools
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs'
+Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest (!=3.7.3,>=3.5) ; extra == 'testing'
+Requires-Dist: pytest-checkdocs (>=1.2.3) ; extra == 'testing'
+Requires-Dist: pytest-flake8 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-enabler ; extra == 'testing'
+Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/jaraco.classes.svg
+ :target: `PyPI link`_
+
+.. image:: https://img.shields.io/pypi/pyversions/jaraco.classes.svg
+ :target: `PyPI link`_
+
+.. _PyPI link: https://pypi.org/project/jaraco.ui
+
+.. image:: https://github.com/jaraco/jaraco.classes/workflows/tests/badge.svg
+ :target: https://github.com/jaraco/jaraco.classes/actions?query=workflow%3A%22tests%22
+ :alt: tests
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: Black
+
+.. .. image:: https://readthedocs.org/projects/skeleton/badge/?version=latest
+.. :target: https://skeleton.readthedocs.io/en/latest/?badge=latest
+
+
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.classes-3.2.1.dist-info/RECORD b/monEnvTP/lib/python3.8/site-packages/jaraco.classes-3.2.1.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..33694d87c058174739940f4e5e10c4552b40b9bd
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.classes-3.2.1.dist-info/RECORD
@@ -0,0 +1,14 @@
+jaraco.classes-3.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.classes-3.2.1.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
+jaraco.classes-3.2.1.dist-info/METADATA,sha256=SOrG9GjO8R38zwrbbmFgVtSUl7D2X6wjncpDDZkuq7Y,1937
+jaraco.classes-3.2.1.dist-info/RECORD,,
+jaraco.classes-3.2.1.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92
+jaraco.classes-3.2.1.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/classes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+jaraco/classes/__pycache__/__init__.cpython-38.pyc,,
+jaraco/classes/__pycache__/ancestry.cpython-38.pyc,,
+jaraco/classes/__pycache__/meta.cpython-38.pyc,,
+jaraco/classes/__pycache__/properties.cpython-38.pyc,,
+jaraco/classes/ancestry.py,sha256=92bQatSnIUFqPG8cWzMBWbkAs6Ww7cMOBxfUiyQhWYk,1464
+jaraco/classes/meta.py,sha256=sUETZqDrKKEftYqmmVqrFFWRVCN-v9Sdri4Cb0L8SZA,1853
+jaraco/classes/properties.py,sha256=DWP_Z-QiUi8PdX_vv9ZtazaRr3WcAOSoPReupwgX7Kk,3980
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.classes-3.2.1.dist-info/WHEEL b/monEnvTP/lib/python3.8/site-packages/jaraco.classes-3.2.1.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..385faab0525ccdbfd1070a8bebcca3ac8617236e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.classes-3.2.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.36.2)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.classes-3.2.1.dist-info/top_level.txt b/monEnvTP/lib/python3.8/site-packages/jaraco.classes-3.2.1.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..f6205a5f19a533fd30f90a433e610325ff02f989
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.classes-3.2.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+jaraco
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.collections-3.5.1.dist-info/INSTALLER b/monEnvTP/lib/python3.8/site-packages/jaraco.collections-3.5.1.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.collections-3.5.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.collections-3.5.1.dist-info/LICENSE b/monEnvTP/lib/python3.8/site-packages/jaraco.collections-3.5.1.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..353924be0e59b9ad7e6c22848c2189398481821d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.collections-3.5.1.dist-info/LICENSE
@@ -0,0 +1,19 @@
+Copyright Jason R. Coombs
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.collections-3.5.1.dist-info/METADATA b/monEnvTP/lib/python3.8/site-packages/jaraco.collections-3.5.1.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..3584355c211492f3c1fde9f274e0a1269d27a1a5
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.collections-3.5.1.dist-info/METADATA
@@ -0,0 +1,76 @@
+Metadata-Version: 2.1
+Name: jaraco.collections
+Version: 3.5.1
+Summary: Collection objects similar to those in stdlib by jaraco
+Home-page: https://github.com/jaraco/jaraco.collections
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+License: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.7
+License-File: LICENSE
+Requires-Dist: jaraco.text
+Requires-Dist: jaraco.classes
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs'
+Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest (>=6) ; extra == 'testing'
+Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
+Requires-Dist: pytest-flake8 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing'
+Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/jaraco.collections.svg
+ :target: `PyPI link`_
+
+.. image:: https://img.shields.io/pypi/pyversions/jaraco.collections.svg
+ :target: `PyPI link`_
+
+.. _PyPI link: https://pypi.org/project/jaraco.collections
+
+.. image:: https://github.com/jaraco/jaraco.collections/workflows/tests/badge.svg
+ :target: https://github.com/jaraco/jaraco.collections/actions?query=workflow%3A%22tests%22
+ :alt: tests
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: Black
+
+.. image:: https://readthedocs.org/projects/jaracocollections/badge/?version=latest
+ :target: https://jaracocollections.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2021-informational
+ :target: https://blog.jaraco.com/skeleton
+
+Models and classes to supplement the stdlib 'collections' module.
+
+See the docs, linked above, for descriptions and usage examples.
+
+Highlights include:
+
+- RangeMap: A mapping that accepts a range of values for keys.
+- Projection: A subset over an existing mapping.
+- DictFilter: A different implementation of a projection.
+- KeyTransformingDict: Generalized mapping with keys transformed by a function.
+- FoldedCaseKeyedDict: A dict whose string keys are case-insensitive.
+- BijectiveMap: A map where keys map to values and values back to their keys.
+- ItemsAsAttributes: A mapping mix-in exposing items as attributes.
+- IdentityOverrideMap: A map whose keys map by default to themselves unless overridden.
+- FrozenDict: A hashable, immutable map.
+- Enumeration: An object whose keys are enumerated.
+- Everything: A container that contains all things.
+- Least, Greatest: Objects that are always less than or greater than any other.
+- pop_all: Return all items from the mutable sequence and remove them from that sequence.
+- DictStack: A stack of dicts, great for sharing scopes.
+- WeightedLookup: A specialized RangeMap for selecting an item by weights.
+
+
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.collections-3.5.1.dist-info/RECORD b/monEnvTP/lib/python3.8/site-packages/jaraco.collections-3.5.1.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..9e29a0a15f01ae2756a6c7185d3a9b056764cd93
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.collections-3.5.1.dist-info/RECORD
@@ -0,0 +1,8 @@
+jaraco.collections-3.5.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.collections-3.5.1.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
+jaraco.collections-3.5.1.dist-info/METADATA,sha256=FfWOxedwQp89keF3PgoC4rkFBCnn1dhWWUMVsZXnKxY,3309
+jaraco.collections-3.5.1.dist-info/RECORD,,
+jaraco.collections-3.5.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
+jaraco.collections-3.5.1.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/__pycache__/collections.cpython-38.pyc,,
+jaraco/collections.py,sha256=HR2m-xKRRTANhfFngjsW5Z9T3NLtJi5zgxhfH8lWhig,25589
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.collections-3.5.1.dist-info/WHEEL b/monEnvTP/lib/python3.8/site-packages/jaraco.collections-3.5.1.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..becc9a66ea739ba941d48a749e248761cc6e658a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.collections-3.5.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.1)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.collections-3.5.1.dist-info/top_level.txt b/monEnvTP/lib/python3.8/site-packages/jaraco.collections-3.5.1.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..f6205a5f19a533fd30f90a433e610325ff02f989
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.collections-3.5.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+jaraco
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.context-4.1.1.dist-info/INSTALLER b/monEnvTP/lib/python3.8/site-packages/jaraco.context-4.1.1.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.context-4.1.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.context-4.1.1.dist-info/LICENSE b/monEnvTP/lib/python3.8/site-packages/jaraco.context-4.1.1.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..353924be0e59b9ad7e6c22848c2189398481821d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.context-4.1.1.dist-info/LICENSE
@@ -0,0 +1,19 @@
+Copyright Jason R. Coombs
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.context-4.1.1.dist-info/METADATA b/monEnvTP/lib/python3.8/site-packages/jaraco.context-4.1.1.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..908711b7ca8df68611963815611b17646d950e12
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.context-4.1.1.dist-info/METADATA
@@ -0,0 +1,52 @@
+Metadata-Version: 2.1
+Name: jaraco.context
+Version: 4.1.1
+Summary: Context managers by jaraco
+Home-page: https://github.com/jaraco/jaraco.context
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+License: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.6
+License-File: LICENSE
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs'
+Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest (>=6) ; extra == 'testing'
+Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
+Requires-Dist: pytest-flake8 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing'
+Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/jaraco.context.svg
+ :target: `PyPI link`_
+
+.. image:: https://img.shields.io/pypi/pyversions/jaraco.context.svg
+ :target: `PyPI link`_
+
+.. _PyPI link: https://pypi.org/project/jaraco.context
+
+.. image:: https://github.com/jaraco/jaraco.context/workflows/tests/badge.svg
+ :target: https://github.com/jaraco/jaraco.context/actions?query=workflow%3A%22tests%22
+ :alt: tests
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: Black
+
+.. image:: https://readthedocs.org/projects/jaracocontext/badge/?version=latest
+ :target: https://jaracocontext.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2021-informational
+ :target: https://blog.jaraco.com/skeleton
+
+
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.context-4.1.1.dist-info/RECORD b/monEnvTP/lib/python3.8/site-packages/jaraco.context-4.1.1.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..347638e256491ba5aea497bee7901447a9dce30e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.context-4.1.1.dist-info/RECORD
@@ -0,0 +1,8 @@
+jaraco.context-4.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.context-4.1.1.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
+jaraco.context-4.1.1.dist-info/METADATA,sha256=bvqDGCk6Z7TkohUqr5XZm19SbF9mVxrtXjN6uF_BAMQ,2031
+jaraco.context-4.1.1.dist-info/RECORD,,
+jaraco.context-4.1.1.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
+jaraco.context-4.1.1.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/__pycache__/context.cpython-38.pyc,,
+jaraco/context.py,sha256=7X1tpCLc5EN45iWGzGcsH0Unx62REIkvtRvglj0SiUA,5420
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.context-4.1.1.dist-info/WHEEL b/monEnvTP/lib/python3.8/site-packages/jaraco.context-4.1.1.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..5bad85fdc1cd08553756d0fb2c7be8b5ad6af7fb
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.context-4.1.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.context-4.1.1.dist-info/top_level.txt b/monEnvTP/lib/python3.8/site-packages/jaraco.context-4.1.1.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..f6205a5f19a533fd30f90a433e610325ff02f989
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.context-4.1.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+jaraco
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.functools-3.5.0.dist-info/INSTALLER b/monEnvTP/lib/python3.8/site-packages/jaraco.functools-3.5.0.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.functools-3.5.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.functools-3.5.0.dist-info/LICENSE b/monEnvTP/lib/python3.8/site-packages/jaraco.functools-3.5.0.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..353924be0e59b9ad7e6c22848c2189398481821d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.functools-3.5.0.dist-info/LICENSE
@@ -0,0 +1,19 @@
+Copyright Jason R. Coombs
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.functools-3.5.0.dist-info/METADATA b/monEnvTP/lib/python3.8/site-packages/jaraco.functools-3.5.0.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..12dfbdd00c477a34c8a78eec378afe13a8200bef
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.functools-3.5.0.dist-info/METADATA
@@ -0,0 +1,58 @@
+Metadata-Version: 2.1
+Name: jaraco.functools
+Version: 3.5.0
+Summary: Functools like those found in stdlib
+Home-page: https://github.com/jaraco/jaraco.functools
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+License: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.7
+License-File: LICENSE
+Requires-Dist: more-itertools
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs'
+Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest (>=6) ; extra == 'testing'
+Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
+Requires-Dist: pytest-flake8 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing'
+Requires-Dist: jaraco.classes ; extra == 'testing'
+Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/jaraco.functools.svg
+ :target: `PyPI link`_
+
+.. image:: https://img.shields.io/pypi/pyversions/jaraco.functools.svg
+
+.. image:: https://img.shields.io/travis/jaraco/jaraco.functools/master.svg
+ :target: `PyPI link`_
+
+.. _PyPI link: https://pypi.org/project/jaraco.functools
+
+.. image:: https://github.com/jaraco/jaraco.functools/workflows/tests/badge.svg
+ :target: https://github.com/jaraco/jaraco.functools/actions?query=workflow%3A%22tests%22
+ :alt: tests
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: Black
+
+.. image:: https://readthedocs.org/projects/jaracofunctools/badge/?version=latest
+ :target: https://jaracofunctools.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2021-informational
+ :target: https://blog.jaraco.com/skeleton
+
+Additional functools in the spirit of stdlib's functools.
+
+
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.functools-3.5.0.dist-info/RECORD b/monEnvTP/lib/python3.8/site-packages/jaraco.functools-3.5.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..cfa66c8cc7da41cb4f1b4dfbb51b4ad13ad153d3
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.functools-3.5.0.dist-info/RECORD
@@ -0,0 +1,8 @@
+jaraco.functools-3.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.functools-3.5.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
+jaraco.functools-3.5.0.dist-info/METADATA,sha256=cE9C7u9bo_GjLAuw4nML67a25kUaPDiHn4j03lG4jd0,2276
+jaraco.functools-3.5.0.dist-info/RECORD,,
+jaraco.functools-3.5.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
+jaraco.functools-3.5.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/__pycache__/functools.cpython-38.pyc,,
+jaraco/functools.py,sha256=PtEHbXZstgVJrwje4GvJOsz5pEbgslOcgEn2EJNpr2c,13494
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.functools-3.5.0.dist-info/WHEEL b/monEnvTP/lib/python3.8/site-packages/jaraco.functools-3.5.0.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..5bad85fdc1cd08553756d0fb2c7be8b5ad6af7fb
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.functools-3.5.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.functools-3.5.0.dist-info/top_level.txt b/monEnvTP/lib/python3.8/site-packages/jaraco.functools-3.5.0.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..f6205a5f19a533fd30f90a433e610325ff02f989
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.functools-3.5.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+jaraco
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.text-3.7.0.dist-info/INSTALLER b/monEnvTP/lib/python3.8/site-packages/jaraco.text-3.7.0.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.text-3.7.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.text-3.7.0.dist-info/LICENSE b/monEnvTP/lib/python3.8/site-packages/jaraco.text-3.7.0.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..353924be0e59b9ad7e6c22848c2189398481821d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.text-3.7.0.dist-info/LICENSE
@@ -0,0 +1,19 @@
+Copyright Jason R. Coombs
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.text-3.7.0.dist-info/METADATA b/monEnvTP/lib/python3.8/site-packages/jaraco.text-3.7.0.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..615a50a4ae51e74bedd497300b31b10ecefaacce
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.text-3.7.0.dist-info/METADATA
@@ -0,0 +1,55 @@
+Metadata-Version: 2.1
+Name: jaraco.text
+Version: 3.7.0
+Summary: Module for text manipulation
+Home-page: https://github.com/jaraco/jaraco.text
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+License: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.6
+License-File: LICENSE
+Requires-Dist: jaraco.functools
+Requires-Dist: jaraco.context (>=4.1)
+Requires-Dist: importlib-resources ; python_version < "3.9"
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs'
+Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest (>=6) ; extra == 'testing'
+Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
+Requires-Dist: pytest-flake8 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing'
+Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/jaraco.text.svg
+ :target: `PyPI link`_
+
+.. image:: https://img.shields.io/pypi/pyversions/jaraco.text.svg
+ :target: `PyPI link`_
+
+.. _PyPI link: https://pypi.org/project/jaraco.text
+
+.. image:: https://github.com/jaraco/jaraco.text/workflows/tests/badge.svg
+ :target: https://github.com/jaraco/jaraco.text/actions?query=workflow%3A%22tests%22
+ :alt: tests
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: Black
+
+.. image:: https://readthedocs.org/projects/jaracotext/badge/?version=latest
+ :target: https://jaracotext.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2021-informational
+ :target: https://blog.jaraco.com/skeleton
+
+
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.text-3.7.0.dist-info/RECORD b/monEnvTP/lib/python3.8/site-packages/jaraco.text-3.7.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..53b2c5121ea893546aadc68cc2a55f9508c7c633
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.text-3.7.0.dist-info/RECORD
@@ -0,0 +1,9 @@
+jaraco.text-3.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jaraco.text-3.7.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
+jaraco.text-3.7.0.dist-info/METADATA,sha256=5mcR1dY0cJNrM-VIkAFkpjOgvgzmq6nM1GfD0gwTIhs,2136
+jaraco.text-3.7.0.dist-info/RECORD,,
+jaraco.text-3.7.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
+jaraco.text-3.7.0.dist-info/top_level.txt,sha256=0JnN3LfXH4LIRfXL-QFOGCJzQWZO3ELx4R1d_louoQM,7
+jaraco/text/Lorem ipsum.txt,sha256=N_7c_79zxOufBY9HZ3yzMgOkNv-TkOTTio4BydrSjgs,1335
+jaraco/text/__init__.py,sha256=I56MW2ZFwPrYXIxzqxMBe2A1t-T4uZBgEgAKe9-JoqM,15538
+jaraco/text/__pycache__/__init__.cpython-38.pyc,,
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.text-3.7.0.dist-info/WHEEL b/monEnvTP/lib/python3.8/site-packages/jaraco.text-3.7.0.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..becc9a66ea739ba941d48a749e248761cc6e658a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.text-3.7.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.1)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco.text-3.7.0.dist-info/top_level.txt b/monEnvTP/lib/python3.8/site-packages/jaraco.text-3.7.0.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..f6205a5f19a533fd30f90a433e610325ff02f989
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco.text-3.7.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+jaraco
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco/__pycache__/collections.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/jaraco/__pycache__/collections.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c28ebf84f74d68d00117626151fbede1bddba550
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/jaraco/__pycache__/collections.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco/__pycache__/context.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/jaraco/__pycache__/context.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5ae3b8c8315dec2da7d8fd432b1a1674f6bfba7a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/jaraco/__pycache__/context.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco/__pycache__/functools.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/jaraco/__pycache__/functools.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..10ccff3b70769e5dce6646fa66daef0d7e9308be
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/jaraco/__pycache__/functools.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco/classes/__init__.py b/monEnvTP/lib/python3.8/site-packages/jaraco/classes/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco/classes/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/jaraco/classes/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..00df795776f512b58e4240bf5d0ad8b02f9d43e0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/jaraco/classes/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco/classes/__pycache__/ancestry.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/jaraco/classes/__pycache__/ancestry.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..167864cdc3f3877a8fa26dad9a2bcd005543c2e5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/jaraco/classes/__pycache__/ancestry.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco/classes/__pycache__/meta.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/jaraco/classes/__pycache__/meta.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b49981f1164c3fac0314210e45dcf27686d26ead
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/jaraco/classes/__pycache__/meta.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco/classes/__pycache__/properties.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/jaraco/classes/__pycache__/properties.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..913e195811f258caf386a66aee0a177d2d71e5ac
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/jaraco/classes/__pycache__/properties.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco/classes/ancestry.py b/monEnvTP/lib/python3.8/site-packages/jaraco/classes/ancestry.py
new file mode 100644
index 0000000000000000000000000000000000000000..dd9b2e92cb641044ab7eff39637feeb23709508c
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco/classes/ancestry.py
@@ -0,0 +1,68 @@
+"""
+Routines for obtaining the class names
+of an object and its parent classes.
+"""
+
+from more_itertools import unique_everseen
+
+
+def all_bases(c):
+ """
+ return a tuple of all base classes the class c has as a parent.
+ >>> object in all_bases(list)
+ True
+ """
+ return c.mro()[1:]
+
+
+def all_classes(c):
+ """
+ return a tuple of all classes to which c belongs
+ >>> list in all_classes(list)
+ True
+ """
+ return c.mro()
+
+
+# borrowed from
+# http://code.activestate.com/recipes/576949-find-all-subclasses-of-a-given-class/
+
+
+def iter_subclasses(cls):
+ """
+ Generator over all subclasses of a given class, in depth-first order.
+
+ >>> bool in list(iter_subclasses(int))
+ True
+ >>> class A(object): pass
+ >>> class B(A): pass
+ >>> class C(A): pass
+ >>> class D(B,C): pass
+ >>> class E(D): pass
+ >>>
+ >>> for cls in iter_subclasses(A):
+ ... print(cls.__name__)
+ B
+ D
+ E
+ C
+ >>> # get ALL classes currently defined
+ >>> res = [cls.__name__ for cls in iter_subclasses(object)]
+ >>> 'type' in res
+ True
+ >>> 'tuple' in res
+ True
+ >>> len(res) > 100
+ True
+ """
+ return unique_everseen(_iter_all_subclasses(cls))
+
+
+def _iter_all_subclasses(cls):
+ try:
+ subs = cls.__subclasses__()
+ except TypeError: # fails only when cls is type
+ subs = cls.__subclasses__(cls)
+ for sub in subs:
+ yield sub
+ yield from iter_subclasses(sub)
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco/classes/meta.py b/monEnvTP/lib/python3.8/site-packages/jaraco/classes/meta.py
new file mode 100644
index 0000000000000000000000000000000000000000..bd41a1d97b4cebd2d28b00c6d025ff4b80200f4e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco/classes/meta.py
@@ -0,0 +1,66 @@
+"""
+meta.py
+
+Some useful metaclasses.
+"""
+
+
+class LeafClassesMeta(type):
+ """
+ A metaclass for classes that keeps track of all of them that
+ aren't base classes.
+
+ >>> Parent = LeafClassesMeta('MyParentClass', (), {})
+ >>> Parent in Parent._leaf_classes
+ True
+ >>> Child = LeafClassesMeta('MyChildClass', (Parent,), {})
+ >>> Child in Parent._leaf_classes
+ True
+ >>> Parent in Parent._leaf_classes
+ False
+
+ >>> Other = LeafClassesMeta('OtherClass', (), {})
+ >>> Parent in Other._leaf_classes
+ False
+ >>> len(Other._leaf_classes)
+ 1
+ """
+
+ def __init__(cls, name, bases, attrs):
+ if not hasattr(cls, '_leaf_classes'):
+ cls._leaf_classes = set()
+ leaf_classes = getattr(cls, '_leaf_classes')
+ leaf_classes.add(cls)
+ # remove any base classes
+ leaf_classes -= set(bases)
+
+
+class TagRegistered(type):
+ """
+ As classes of this metaclass are created, they keep a registry in the
+ base class of all classes by a class attribute, indicated by attr_name.
+
+ >>> FooObject = TagRegistered('FooObject', (), dict(tag='foo'))
+ >>> FooObject._registry['foo'] is FooObject
+ True
+ >>> BarObject = TagRegistered('Barobject', (FooObject,), dict(tag='bar'))
+ >>> FooObject._registry is BarObject._registry
+ True
+ >>> len(FooObject._registry)
+ 2
+
+ '...' below should be 'jaraco.classes' but for pytest-dev/pytest#3396
+ >>> FooObject._registry['bar']
+ <class '....meta.Barobject'>
+ """
+
+ attr_name = 'tag'
+
+ def __init__(cls, name, bases, namespace):
+ super(TagRegistered, cls).__init__(name, bases, namespace)
+ if not hasattr(cls, '_registry'):
+ cls._registry = {}
+ meta = cls.__class__
+ attr = getattr(cls, meta.attr_name, None)
+ if attr:
+ cls._registry[attr] = cls
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco/classes/properties.py b/monEnvTP/lib/python3.8/site-packages/jaraco/classes/properties.py
new file mode 100644
index 0000000000000000000000000000000000000000..4fda4cb6bc30ee4b6993c5b944f26eb619231dc1
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco/classes/properties.py
@@ -0,0 +1,171 @@
+class NonDataProperty:
+ """Much like the property builtin, but only implements __get__,
+ making it a non-data property, and can be subsequently reset.
+
+ See http://users.rcn.com/python/download/Descriptor.htm for more
+ information.
+
+ >>> class X(object):
+ ... @NonDataProperty
+ ... def foo(self):
+ ... return 3
+ >>> x = X()
+ >>> x.foo
+ 3
+ >>> x.foo = 4
+ >>> x.foo
+ 4
+
+ '...' below should be 'jaraco.classes' but for pytest-dev/pytest#3396
+ >>> X.foo
+ <....properties.NonDataProperty object at ...>
+ """
+
+ def __init__(self, fget):
+ assert fget is not None, "fget cannot be none"
+ assert callable(fget), "fget must be callable"
+ self.fget = fget
+
+ def __get__(self, obj, objtype=None):
+ if obj is None:
+ return self
+ return self.fget(obj)
+
+
+class classproperty:
+ """
+ Like @property but applies at the class level.
+
+
+ >>> class X(metaclass=classproperty.Meta):
+ ... val = None
+ ... @classproperty
+ ... def foo(cls):
+ ... return cls.val
+ ... @foo.setter
+ ... def foo(cls, val):
+ ... cls.val = val
+ >>> X.foo
+ >>> X.foo = 3
+ >>> X.foo
+ 3
+ >>> x = X()
+ >>> x.foo
+ 3
+ >>> X.foo = 4
+ >>> x.foo
+ 4
+
+ Setting the property on an instance affects the class.
+
+ >>> x.foo = 5
+ >>> x.foo
+ 5
+ >>> X.foo
+ 5
+ >>> vars(x)
+ {}
+ >>> X().foo
+ 5
+
+ Attempting to set an attribute where no setter was defined
+ results in an AttributeError:
+
+ >>> class GetOnly(metaclass=classproperty.Meta):
+ ... @classproperty
+ ... def foo(cls):
+ ... return 'bar'
+ >>> GetOnly.foo = 3
+ Traceback (most recent call last):
+ ...
+ AttributeError: can't set attribute
+
+ It is also possible to wrap a classmethod or staticmethod in
+ a classproperty.
+
+ >>> class Static(metaclass=classproperty.Meta):
+ ... @classproperty
+ ... @classmethod
+ ... def foo(cls):
+ ... return 'foo'
+ ... @classproperty
+ ... @staticmethod
+ ... def bar():
+ ... return 'bar'
+ >>> Static.foo
+ 'foo'
+ >>> Static.bar
+ 'bar'
+
+ *Legacy*
+
+ For compatibility, if the metaclass isn't specified, the
+ legacy behavior will be invoked.
+
+ >>> class X:
+ ... val = None
+ ... @classproperty
+ ... def foo(cls):
+ ... return cls.val
+ ... @foo.setter
+ ... def foo(cls, val):
+ ... cls.val = val
+ >>> X.foo
+ >>> X.foo = 3
+ >>> X.foo
+ 3
+ >>> x = X()
+ >>> x.foo
+ 3
+ >>> X.foo = 4
+ >>> x.foo
+ 4
+
+ Note, because the metaclass was not specified, setting
+ a value on an instance does not have the intended effect.
+
+ >>> x.foo = 5
+ >>> x.foo
+ 5
+ >>> X.foo # should be 5
+ 4
+ >>> vars(x) # should be empty
+ {'foo': 5}
+ >>> X().foo # should be 5
+ 4
+ """
+
+ class Meta(type):
+ def __setattr__(self, key, value):
+ obj = self.__dict__.get(key, None)
+ if type(obj) is classproperty:
+ return obj.__set__(self, value)
+ return super().__setattr__(key, value)
+
+ def __init__(self, fget, fset=None):
+ self.fget = self._fix_function(fget)
+ self.fset = fset
+ fset and self.setter(fset)
+
+ def __get__(self, instance, owner=None):
+ return self.fget.__get__(None, owner)()
+
+ def __set__(self, owner, value):
+ if not self.fset:
+ raise AttributeError("can't set attribute")
+ if type(owner) is not classproperty.Meta:
+ owner = type(owner)
+ return self.fset.__get__(None, owner)(value)
+
+ def setter(self, fset):
+ self.fset = self._fix_function(fset)
+ return self
+
+ @classmethod
+ def _fix_function(cls, fn):
+ """
+ Ensure fn is a classmethod or staticmethod.
+ """
+ if not isinstance(fn, (classmethod, staticmethod)):
+ return classmethod(fn)
+ return fn
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco/collections.py b/monEnvTP/lib/python3.8/site-packages/jaraco/collections.py
new file mode 100644
index 0000000000000000000000000000000000000000..8323db78a08bc4343ca44d67abe25b5dec19287b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco/collections.py
@@ -0,0 +1,1056 @@
+import re
+import operator
+import collections.abc
+import itertools
+import copy
+import functools
+import random
+
+from jaraco.classes.properties import NonDataProperty
+import jaraco.text
+
+
+class Projection(collections.abc.Mapping):
+ """
+ Project a set of keys over a mapping
+
+ >>> sample = {'a': 1, 'b': 2, 'c': 3}
+ >>> prj = Projection(['a', 'c', 'd'], sample)
+ >>> prj == {'a': 1, 'c': 3}
+ True
+
+ Keys should only appear if they were specified and exist in the space.
+
+ >>> sorted(list(prj.keys()))
+ ['a', 'c']
+
+ Attempting to access a key not in the projection
+ results in a KeyError.
+
+ >>> prj['b']
+ Traceback (most recent call last):
+ ...
+ KeyError: 'b'
+
+ Use the projection to update another dict.
+
+ >>> target = {'a': 2, 'b': 2}
+ >>> target.update(prj)
+ >>> target == {'a': 1, 'b': 2, 'c': 3}
+ True
+
+ Also note that Projection keeps a reference to the original dict, so
+ if you modify the original dict, that could modify the Projection.
+
+ >>> del sample['a']
+ >>> dict(prj)
+ {'c': 3}
+ """
+
+ def __init__(self, keys, space):
+ self._keys = tuple(keys)
+ self._space = space
+
+ def __getitem__(self, key):
+ if key not in self._keys:
+ raise KeyError(key)
+ return self._space[key]
+
+ def __iter__(self):
+ return iter(set(self._keys).intersection(self._space))
+
+ def __len__(self):
+ return len(tuple(iter(self)))
+
+
+class DictFilter(object):
+ """
+ Takes a dict, and simulates a sub-dict based on the keys.
+
+ >>> sample = {'a': 1, 'b': 2, 'c': 3}
+ >>> filtered = DictFilter(sample, ['a', 'c'])
+ >>> filtered == {'a': 1, 'c': 3}
+ True
+ >>> set(filtered.values()) == {1, 3}
+ True
+ >>> set(filtered.items()) == {('a', 1), ('c', 3)}
+ True
+
+ One can also filter by a regular expression pattern
+
+ >>> sample['d'] = 4
+ >>> sample['ef'] = 5
+
+ Here we filter for only single-character keys
+
+ >>> filtered = DictFilter(sample, include_pattern='.$')
+ >>> filtered == {'a': 1, 'b': 2, 'c': 3, 'd': 4}
+ True
+
+ >>> filtered['e']
+ Traceback (most recent call last):
+ ...
+ KeyError: 'e'
+
+ Also note that DictFilter keeps a reference to the original dict, so
+ if you modify the original dict, that could modify the filtered dict.
+
+ >>> del sample['d']
+ >>> del sample['a']
+ >>> filtered == {'b': 2, 'c': 3}
+ True
+ >>> filtered != {'b': 2, 'c': 3}
+ False
+ """
+
+ def __init__(self, dict, include_keys=[], include_pattern=None):
+ self.dict = dict
+ self.specified_keys = set(include_keys)
+ if include_pattern is not None:
+ self.include_pattern = re.compile(include_pattern)
+ else:
+ # for performance, replace the pattern_keys property
+ self.pattern_keys = set()
+
+ def get_pattern_keys(self):
+ keys = filter(self.include_pattern.match, self.dict.keys())
+ return set(keys)
+
+ pattern_keys = NonDataProperty(get_pattern_keys)
+
+ @property
+ def include_keys(self):
+ return self.specified_keys.union(self.pattern_keys)
+
+ def keys(self):
+ return self.include_keys.intersection(self.dict.keys())
+
+ def values(self):
+ return map(self.dict.get, self.keys())
+
+ def __getitem__(self, i):
+ if i not in self.include_keys:
+ raise KeyError(i)
+ return self.dict[i]
+
+ def items(self):
+ keys = self.keys()
+ values = map(self.dict.get, keys)
+ return zip(keys, values)
+
+ def __eq__(self, other):
+ return dict(self) == other
+
+ def __ne__(self, other):
+ return dict(self) != other
+
+
+def dict_map(function, dictionary):
+ """
+ dict_map is much like the built-in function map. It takes a dictionary
+ and applys a function to the values of that dictionary, returning a
+ new dictionary with the mapped values in the original keys.
+
+ >>> d = dict_map(lambda x:x+1, dict(a=1, b=2))
+ >>> d == dict(a=2,b=3)
+ True
+ """
+ return dict((key, function(value)) for key, value in dictionary.items())
+
+
+class RangeMap(dict):
+ """
+ A dictionary-like object that uses the keys as bounds for a range.
+ Inclusion of the value for that range is determined by the
+ key_match_comparator, which defaults to less-than-or-equal.
+ A value is returned for a key if it is the first key that matches in
+ the sorted list of keys.
+
+ One may supply keyword parameters to be passed to the sort function used
+ to sort keys (i.e. cmp [python 2 only], keys, reverse) as sort_params.
+
+ Let's create a map that maps 1-3 -> 'a', 4-6 -> 'b'
+
+ >>> r = RangeMap({3: 'a', 6: 'b'}) # boy, that was easy
+ >>> r[1], r[2], r[3], r[4], r[5], r[6]
+ ('a', 'a', 'a', 'b', 'b', 'b')
+
+ Even float values should work so long as the comparison operator
+ supports it.
+
+ >>> r[4.5]
+ 'b'
+
+ But you'll notice that the way rangemap is defined, it must be open-ended
+ on one side.
+
+ >>> r[0]
+ 'a'
+ >>> r[-1]
+ 'a'
+
+ One can close the open-end of the RangeMap by using undefined_value
+
+ >>> r = RangeMap({0: RangeMap.undefined_value, 3: 'a', 6: 'b'})
+ >>> r[0]
+ Traceback (most recent call last):
+ ...
+ KeyError: 0
+
+ One can get the first or last elements in the range by using RangeMap.Item
+
+ >>> last_item = RangeMap.Item(-1)
+ >>> r[last_item]
+ 'b'
+
+ .last_item is a shortcut for Item(-1)
+
+ >>> r[RangeMap.last_item]
+ 'b'
+
+ Sometimes it's useful to find the bounds for a RangeMap
+
+ >>> r.bounds()
+ (0, 6)
+
+ RangeMap supports .get(key, default)
+
+ >>> r.get(0, 'not found')
+ 'not found'
+
+ >>> r.get(7, 'not found')
+ 'not found'
+ """
+
+ def __init__(self, source, sort_params={}, key_match_comparator=operator.le):
+ dict.__init__(self, source)
+ self.sort_params = sort_params
+ self.match = key_match_comparator
+
+ def __getitem__(self, item):
+ sorted_keys = sorted(self.keys(), **self.sort_params)
+ if isinstance(item, RangeMap.Item):
+ result = self.__getitem__(sorted_keys[item])
+ else:
+ key = self._find_first_match_(sorted_keys, item)
+ result = dict.__getitem__(self, key)
+ if result is RangeMap.undefined_value:
+ raise KeyError(key)
+ return result
+
+ def get(self, key, default=None):
+ """
+ Return the value for key if key is in the dictionary, else default.
+ If default is not given, it defaults to None, so that this method
+ never raises a KeyError.
+ """
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ def _find_first_match_(self, keys, item):
+ is_match = functools.partial(self.match, item)
+ matches = list(filter(is_match, keys))
+ if matches:
+ return matches[0]
+ raise KeyError(item)
+
+ def bounds(self):
+ sorted_keys = sorted(self.keys(), **self.sort_params)
+ return (sorted_keys[RangeMap.first_item], sorted_keys[RangeMap.last_item])
+
+ # some special values for the RangeMap
+ undefined_value = type(str('RangeValueUndefined'), (object,), {})()
+
+ class Item(int):
+ "RangeMap Item"
+
+ first_item = Item(0)
+ last_item = Item(-1)
+
+
+def __identity(x):
+ return x
+
+
+def sorted_items(d, key=__identity, reverse=False):
+ """
+ Return the items of the dictionary sorted by the keys
+
+ >>> sample = dict(foo=20, bar=42, baz=10)
+ >>> tuple(sorted_items(sample))
+ (('bar', 42), ('baz', 10), ('foo', 20))
+
+ >>> reverse_string = lambda s: ''.join(reversed(s))
+ >>> tuple(sorted_items(sample, key=reverse_string))
+ (('foo', 20), ('bar', 42), ('baz', 10))
+
+ >>> tuple(sorted_items(sample, reverse=True))
+ (('foo', 20), ('baz', 10), ('bar', 42))
+ """
+ # wrap the key func so it operates on the first element of each item
+ def pairkey_key(item):
+ return key(item[0])
+
+ return sorted(d.items(), key=pairkey_key, reverse=reverse)
+
+
+class KeyTransformingDict(dict):
+ """
+ A dict subclass that transforms the keys before they're used.
+ Subclasses may override the default transform_key to customize behavior.
+ """
+
+ @staticmethod
+ def transform_key(key): # pragma: nocover
+ return key
+
+ def __init__(self, *args, **kargs):
+ super(KeyTransformingDict, self).__init__()
+ # build a dictionary using the default constructs
+ d = dict(*args, **kargs)
+ # build this dictionary using transformed keys.
+ for item in d.items():
+ self.__setitem__(*item)
+
+ def __setitem__(self, key, val):
+ key = self.transform_key(key)
+ super(KeyTransformingDict, self).__setitem__(key, val)
+
+ def __getitem__(self, key):
+ key = self.transform_key(key)
+ return super(KeyTransformingDict, self).__getitem__(key)
+
+ def __contains__(self, key):
+ key = self.transform_key(key)
+ return super(KeyTransformingDict, self).__contains__(key)
+
+ def __delitem__(self, key):
+ key = self.transform_key(key)
+ return super(KeyTransformingDict, self).__delitem__(key)
+
+ def get(self, key, *args, **kwargs):
+ key = self.transform_key(key)
+ return super(KeyTransformingDict, self).get(key, *args, **kwargs)
+
+ def setdefault(self, key, *args, **kwargs):
+ key = self.transform_key(key)
+ return super(KeyTransformingDict, self).setdefault(key, *args, **kwargs)
+
+ def pop(self, key, *args, **kwargs):
+ key = self.transform_key(key)
+ return super(KeyTransformingDict, self).pop(key, *args, **kwargs)
+
+ def matching_key_for(self, key):
+ """
+ Given a key, return the actual key stored in self that matches.
+ Raise KeyError if the key isn't found.
+ """
+ try:
+ return next(e_key for e_key in self.keys() if e_key == key)
+ except StopIteration:
+ raise KeyError(key)
+
+
+class FoldedCaseKeyedDict(KeyTransformingDict):
+ """
+ A case-insensitive dictionary (keys are compared as insensitive
+ if they are strings).
+
+ >>> d = FoldedCaseKeyedDict()
+ >>> d['heLlo'] = 'world'
+ >>> list(d.keys()) == ['heLlo']
+ True
+ >>> list(d.values()) == ['world']
+ True
+ >>> d['hello'] == 'world'
+ True
+ >>> 'hello' in d
+ True
+ >>> 'HELLO' in d
+ True
+ >>> print(repr(FoldedCaseKeyedDict({'heLlo': 'world'})).replace("u'", "'"))
+ {'heLlo': 'world'}
+ >>> d = FoldedCaseKeyedDict({'heLlo': 'world'})
+ >>> print(d['hello'])
+ world
+ >>> print(d['Hello'])
+ world
+ >>> list(d.keys())
+ ['heLlo']
+ >>> d = FoldedCaseKeyedDict({'heLlo': 'world', 'Hello': 'world'})
+ >>> list(d.values())
+ ['world']
+ >>> key, = d.keys()
+ >>> key in ['heLlo', 'Hello']
+ True
+ >>> del d['HELLO']
+ >>> d
+ {}
+
+ get should work
+
+ >>> d['Sumthin'] = 'else'
+ >>> d.get('SUMTHIN')
+ 'else'
+ >>> d.get('OTHER', 'thing')
+ 'thing'
+ >>> del d['sumthin']
+
+ setdefault should also work
+
+ >>> d['This'] = 'that'
+ >>> print(d.setdefault('this', 'other'))
+ that
+ >>> len(d)
+ 1
+ >>> print(d['this'])
+ that
+ >>> print(d.setdefault('That', 'other'))
+ other
+ >>> print(d['THAT'])
+ other
+
+ Make it pop!
+
+ >>> print(d.pop('THAT'))
+ other
+
+ To retrieve the key in its originally-supplied form, use matching_key_for
+
+ >>> print(d.matching_key_for('this'))
+ This
+
+ >>> d.matching_key_for('missing')
+ Traceback (most recent call last):
+ ...
+ KeyError: 'missing'
+ """
+
+ @staticmethod
+ def transform_key(key):
+ return jaraco.text.FoldedCase(key)
+
+
+class DictAdapter(object):
+ """
+ Provide a getitem interface for attributes of an object.
+
+ Let's say you want to get at the string.lowercase property in a formatted
+ string. It's easy with DictAdapter.
+
+ >>> import string
+ >>> print("lowercase is %(ascii_lowercase)s" % DictAdapter(string))
+ lowercase is abcdefghijklmnopqrstuvwxyz
+ """
+
+ def __init__(self, wrapped_ob):
+ self.object = wrapped_ob
+
+ def __getitem__(self, name):
+ return getattr(self.object, name)
+
+
+class ItemsAsAttributes(object):
+ """
+ Mix-in class to enable a mapping object to provide items as
+ attributes.
+
+ >>> C = type(str('C'), (dict, ItemsAsAttributes), dict())
+ >>> i = C()
+ >>> i['foo'] = 'bar'
+ >>> i.foo
+ 'bar'
+
+ Natural attribute access takes precedence
+
+ >>> i.foo = 'henry'
+ >>> i.foo
+ 'henry'
+
+ But as you might expect, the mapping functionality is preserved.
+
+ >>> i['foo']
+ 'bar'
+
+ A normal attribute error should be raised if an attribute is
+ requested that doesn't exist.
+
+ >>> i.missing
+ Traceback (most recent call last):
+ ...
+ AttributeError: 'C' object has no attribute 'missing'
+
+ It also works on dicts that customize __getitem__
+
+ >>> missing_func = lambda self, key: 'missing item'
+ >>> C = type(
+ ... str('C'),
+ ... (dict, ItemsAsAttributes),
+ ... dict(__missing__ = missing_func),
+ ... )
+ >>> i = C()
+ >>> i.missing
+ 'missing item'
+ >>> i.foo
+ 'missing item'
+ """
+
+ def __getattr__(self, key):
+ try:
+ return getattr(super(ItemsAsAttributes, self), key)
+ except AttributeError as e:
+ # attempt to get the value from the mapping (return self[key])
+ # but be careful not to lose the original exception context.
+ noval = object()
+
+ def _safe_getitem(cont, key, missing_result):
+ try:
+ return cont[key]
+ except KeyError:
+ return missing_result
+
+ result = _safe_getitem(self, key, noval)
+ if result is not noval:
+ return result
+ # raise the original exception, but use the original class
+ # name, not 'super'.
+ (message,) = e.args
+ message = message.replace('super', self.__class__.__name__, 1)
+ e.args = (message,)
+ raise
+
+
+def invert_map(map):
+ """
+ Given a dictionary, return another dictionary with keys and values
+ switched. If any of the values resolve to the same key, raises
+ a ValueError.
+
+ >>> numbers = dict(a=1, b=2, c=3)
+ >>> letters = invert_map(numbers)
+ >>> letters[1]
+ 'a'
+ >>> numbers['d'] = 3
+ >>> invert_map(numbers)
+ Traceback (most recent call last):
+ ...
+ ValueError: Key conflict in inverted mapping
+ """
+ res = dict((v, k) for k, v in map.items())
+ if not len(res) == len(map):
+ raise ValueError('Key conflict in inverted mapping')
+ return res
+
+
+class IdentityOverrideMap(dict):
+ """
+ A dictionary that by default maps each key to itself, but otherwise
+ acts like a normal dictionary.
+
+ >>> d = IdentityOverrideMap()
+ >>> d[42]
+ 42
+ >>> d['speed'] = 'speedo'
+ >>> print(d['speed'])
+ speedo
+ """
+
+ def __missing__(self, key):
+ return key
+
+
+class DictStack(list, collections.abc.Mapping):
+ """
+ A stack of dictionaries that behaves as a view on those dictionaries,
+ giving preference to the last.
+
+ >>> stack = DictStack([dict(a=1, c=2), dict(b=2, a=2)])
+ >>> stack['a']
+ 2
+ >>> stack['b']
+ 2
+ >>> stack['c']
+ 2
+ >>> len(stack)
+ 3
+ >>> stack.push(dict(a=3))
+ >>> stack['a']
+ 3
+ >>> set(stack.keys()) == set(['a', 'b', 'c'])
+ True
+ >>> set(stack.items()) == set([('a', 3), ('b', 2), ('c', 2)])
+ True
+ >>> dict(**stack) == dict(stack) == dict(a=3, c=2, b=2)
+ True
+ >>> d = stack.pop()
+ >>> stack['a']
+ 2
+ >>> d = stack.pop()
+ >>> stack['a']
+ 1
+ >>> stack.get('b', None)
+ >>> 'c' in stack
+ True
+ """
+
+ def __iter__(self):
+ dicts = list.__iter__(self)
+ return iter(set(itertools.chain.from_iterable(c.keys() for c in dicts)))
+
+ def __getitem__(self, key):
+ for scope in reversed(tuple(list.__iter__(self))):
+ if key in scope:
+ return scope[key]
+ raise KeyError(key)
+
+ push = list.append
+
+ def __contains__(self, other):
+ return collections.abc.Mapping.__contains__(self, other)
+
+ def __len__(self):
+ return len(list(iter(self)))
+
+
+class BijectiveMap(dict):
+ """
+ A Bijective Map (two-way mapping).
+
+ Implemented as a simple dictionary of 2x the size, mapping values back
+ to keys.
+
+ Note, this implementation may be incomplete. If there's not a test for
+ your use case below, it's likely to fail, so please test and send pull
+ requests or patches for additional functionality needed.
+
+
+ >>> m = BijectiveMap()
+ >>> m['a'] = 'b'
+ >>> m == {'a': 'b', 'b': 'a'}
+ True
+ >>> print(m['b'])
+ a
+
+ >>> m['c'] = 'd'
+ >>> len(m)
+ 2
+
+ Some weird things happen if you map an item to itself or overwrite a
+ single key of a pair, so it's disallowed.
+
+ >>> m['e'] = 'e'
+ Traceback (most recent call last):
+ ValueError: Key cannot map to itself
+
+ >>> m['d'] = 'e'
+ Traceback (most recent call last):
+ ValueError: Key/Value pairs may not overlap
+
+ >>> m['e'] = 'd'
+ Traceback (most recent call last):
+ ValueError: Key/Value pairs may not overlap
+
+ >>> print(m.pop('d'))
+ c
+
+ >>> 'c' in m
+ False
+
+ >>> m = BijectiveMap(dict(a='b'))
+ >>> len(m)
+ 1
+ >>> print(m['b'])
+ a
+
+ >>> m = BijectiveMap()
+ >>> m.update(a='b')
+ >>> m['b']
+ 'a'
+
+ >>> del m['b']
+ >>> len(m)
+ 0
+ >>> 'a' in m
+ False
+ """
+
+ def __init__(self, *args, **kwargs):
+ super(BijectiveMap, self).__init__()
+ self.update(*args, **kwargs)
+
+ def __setitem__(self, item, value):
+ if item == value:
+ raise ValueError("Key cannot map to itself")
+ overlap = (
+ item in self
+ and self[item] != value
+ or value in self
+ and self[value] != item
+ )
+ if overlap:
+ raise ValueError("Key/Value pairs may not overlap")
+ super(BijectiveMap, self).__setitem__(item, value)
+ super(BijectiveMap, self).__setitem__(value, item)
+
+ def __delitem__(self, item):
+ self.pop(item)
+
+ def __len__(self):
+ return super(BijectiveMap, self).__len__() // 2
+
+ def pop(self, key, *args, **kwargs):
+ mirror = self[key]
+ super(BijectiveMap, self).__delitem__(mirror)
+ return super(BijectiveMap, self).pop(key, *args, **kwargs)
+
+ def update(self, *args, **kwargs):
+ # build a dictionary using the default constructs
+ d = dict(*args, **kwargs)
+ # build this dictionary using transformed keys.
+ for item in d.items():
+ self.__setitem__(*item)
+
+
+class FrozenDict(collections.abc.Mapping, collections.abc.Hashable):
+ """
+ An immutable mapping.
+
+ >>> a = FrozenDict(a=1, b=2)
+ >>> b = FrozenDict(a=1, b=2)
+ >>> a == b
+ True
+
+ >>> a == dict(a=1, b=2)
+ True
+ >>> dict(a=1, b=2) == a
+ True
+ >>> 'a' in a
+ True
+ >>> type(hash(a)) is type(0)
+ True
+ >>> set(iter(a)) == {'a', 'b'}
+ True
+ >>> len(a)
+ 2
+ >>> a['a'] == a.get('a') == 1
+ True
+
+ >>> a['c'] = 3
+ Traceback (most recent call last):
+ ...
+ TypeError: 'FrozenDict' object does not support item assignment
+
+ >>> a.update(y=3)
+ Traceback (most recent call last):
+ ...
+ AttributeError: 'FrozenDict' object has no attribute 'update'
+
+ Copies should compare equal
+
+ >>> copy.copy(a) == a
+ True
+
+ Copies should be the same type
+
+ >>> isinstance(copy.copy(a), FrozenDict)
+ True
+
+ FrozenDict supplies .copy(), even though
+ collections.abc.Mapping doesn't demand it.
+
+ >>> a.copy() == a
+ True
+ >>> a.copy() is not a
+ True
+ """
+
+ __slots__ = ['__data']
+
+ def __new__(cls, *args, **kwargs):
+ self = super(FrozenDict, cls).__new__(cls)
+ self.__data = dict(*args, **kwargs)
+ return self
+
+ # Container
+ def __contains__(self, key):
+ return key in self.__data
+
+ # Hashable
+ def __hash__(self):
+ return hash(tuple(sorted(self.__data.items())))
+
+ # Mapping
+ def __iter__(self):
+ return iter(self.__data)
+
+ def __len__(self):
+ return len(self.__data)
+
+ def __getitem__(self, key):
+ return self.__data[key]
+
+ # override get for efficiency provided by dict
+ def get(self, *args, **kwargs):
+ return self.__data.get(*args, **kwargs)
+
+ # override eq to recognize underlying implementation
+ def __eq__(self, other):
+ if isinstance(other, FrozenDict):
+ other = other.__data
+ return self.__data.__eq__(other)
+
+ def copy(self):
+ "Return a shallow copy of self"
+ return copy.copy(self)
+
+
+class Enumeration(ItemsAsAttributes, BijectiveMap):
+ """
+ A convenient way to provide enumerated values
+
+ >>> e = Enumeration('a b c')
+ >>> e['a']
+ 0
+
+ >>> e.a
+ 0
+
+ >>> e[1]
+ 'b'
+
+ >>> set(e.names) == set('abc')
+ True
+
+ >>> set(e.codes) == set(range(3))
+ True
+
+ >>> e.get('d') is None
+ True
+
+ Codes need not start with 0
+
+ >>> e = Enumeration('a b c', range(1, 4))
+ >>> e['a']
+ 1
+
+ >>> e[3]
+ 'c'
+ """
+
+ def __init__(self, names, codes=None):
+ if isinstance(names, str):
+ names = names.split()
+ if codes is None:
+ codes = itertools.count()
+ super(Enumeration, self).__init__(zip(names, codes))
+
+ @property
+ def names(self):
+ return (key for key in self if isinstance(key, str))
+
+ @property
+ def codes(self):
+ return (self[name] for name in self.names)
+
+
+class Everything(object):
+ """
+ A collection "containing" every possible thing.
+
+ >>> 'foo' in Everything()
+ True
+
+ >>> import random
+ >>> random.randint(1, 999) in Everything()
+ True
+
+ >>> random.choice([None, 'foo', 42, ('a', 'b', 'c')]) in Everything()
+ True
+ """
+
+ def __contains__(self, other):
+ return True
+
+
+class InstrumentedDict(collections.UserDict): # type: ignore # buggy mypy
+ """
+ Instrument an existing dictionary with additional
+ functionality, but always reference and mutate
+ the original dictionary.
+
+ >>> orig = {'a': 1, 'b': 2}
+ >>> inst = InstrumentedDict(orig)
+ >>> inst['a']
+ 1
+ >>> inst['c'] = 3
+ >>> orig['c']
+ 3
+ >>> inst.keys() == orig.keys()
+ True
+ """
+
+ def __init__(self, data):
+ super().__init__()
+ self.data = data
+
+
+class Least(object):
+ """
+ A value that is always lesser than any other
+
+ >>> least = Least()
+ >>> 3 < least
+ False
+ >>> 3 > least
+ True
+ >>> least < 3
+ True
+ >>> least <= 3
+ True
+ >>> least > 3
+ False
+ >>> 'x' > least
+ True
+ >>> None > least
+ True
+ """
+
+ def __le__(self, other):
+ return True
+
+ __lt__ = __le__
+
+ def __ge__(self, other):
+ return False
+
+ __gt__ = __ge__
+
+
+class Greatest(object):
+ """
+ A value that is always greater than any other
+
+ >>> greatest = Greatest()
+ >>> 3 < greatest
+ True
+ >>> 3 > greatest
+ False
+ >>> greatest < 3
+ False
+ >>> greatest > 3
+ True
+ >>> greatest >= 3
+ True
+ >>> 'x' > greatest
+ False
+ >>> None > greatest
+ False
+ """
+
+ def __ge__(self, other):
+ return True
+
+ __gt__ = __ge__
+
+ def __le__(self, other):
+ return False
+
+ __lt__ = __le__
+
+
+def pop_all(items):
+ """
+ Clear items in place and return a copy of items.
+
+ >>> items = [1, 2, 3]
+ >>> popped = pop_all(items)
+ >>> popped is items
+ False
+ >>> popped
+ [1, 2, 3]
+ >>> items
+ []
+ """
+ result, items[:] = items[:], []
+ return result
+
+
+# mypy disabled for pytest-dev/pytest#8332
+class FreezableDefaultDict(collections.defaultdict): # type: ignore
+ """
+ Often it is desirable to prevent the mutation of
+ a default dict after its initial construction, such
+ as to prevent mutation during iteration.
+
+ >>> dd = FreezableDefaultDict(list)
+ >>> dd[0].append('1')
+ >>> dd.freeze()
+ >>> dd[1]
+ []
+ >>> len(dd)
+ 1
+ """
+
+ def __missing__(self, key):
+ return getattr(self, '_frozen', super().__missing__)(key)
+
+ def freeze(self):
+ self._frozen = lambda key: self.default_factory()
+
+
+class Accumulator:
+ def __init__(self, initial=0):
+ self.val = initial
+
+ def __call__(self, val):
+ self.val += val
+ return self.val
+
+
+class WeightedLookup(RangeMap):
+ """
+ Given parameters suitable for a dict representing keys
+ and a weighted proportion, return a RangeMap representing
+ spans of values proportial to the weights:
+
+ >>> even = WeightedLookup(a=1, b=1)
+
+ [0, 1) -> a
+ [1, 2) -> b
+
+ >>> lk = WeightedLookup(a=1, b=2)
+
+ [0, 1) -> a
+ [1, 3) -> b
+
+ >>> lk[.5]
+ 'a'
+ >>> lk[1.5]
+ 'b'
+
+ Adds ``.random()`` to select a random weighted value:
+
+ >>> lk.random() in ['a', 'b']
+ True
+
+ >>> choices = [lk.random() for x in range(1000)]
+
+ Statistically speaking, choices should be .5 a:b
+ >>> ratio = choices.count('a') / choices.count('b')
+ >>> .4 < ratio < .6
+ True
+ """
+
+ def __init__(self, *args, **kwargs):
+ raw = dict(*args, **kwargs)
+
+ # allocate keys by weight
+ indexes = map(Accumulator(), raw.values())
+ super().__init__(zip(indexes, raw.keys()), key_match_comparator=operator.lt)
+
+ def random(self):
+ lower, upper = self.bounds()
+ selector = random.random() * upper
+ return self[selector]
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco/context.py b/monEnvTP/lib/python3.8/site-packages/jaraco/context.py
new file mode 100644
index 0000000000000000000000000000000000000000..87a4e3dca299c4201ac50f6ef589dc73f1c45576
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco/context.py
@@ -0,0 +1,213 @@
+import os
+import subprocess
+import contextlib
+import functools
+import tempfile
+import shutil
+import operator
+
+
+@contextlib.contextmanager
+def pushd(dir):
+ orig = os.getcwd()
+ os.chdir(dir)
+ try:
+ yield dir
+ finally:
+ os.chdir(orig)
+
+
+@contextlib.contextmanager
+def tarball_context(url, target_dir=None, runner=None, pushd=pushd):
+ """
+ Get a tarball, extract it, change to that directory, yield, then
+ clean up.
+ `runner` is the function to invoke commands.
+ `pushd` is a context manager for changing the directory.
+ """
+ if target_dir is None:
+ target_dir = os.path.basename(url).replace('.tar.gz', '').replace('.tgz', '')
+ if runner is None:
+ runner = functools.partial(subprocess.check_call, shell=True)
+ # In the tar command, use --strip-components=1 to strip the first path and
+ # then
+ # use -C to cause the files to be extracted to {target_dir}. This ensures
+ # that we always know where the files were extracted.
+ runner('mkdir {target_dir}'.format(**vars()))
+ try:
+ getter = 'wget {url} -O -'
+ extract = 'tar x{compression} --strip-components=1 -C {target_dir}'
+ cmd = ' | '.join((getter, extract))
+ runner(cmd.format(compression=infer_compression(url), **vars()))
+ with pushd(target_dir):
+ yield target_dir
+ finally:
+ runner('rm -Rf {target_dir}'.format(**vars()))
+
+
+def infer_compression(url):
+ """
+ Given a URL or filename, infer the compression code for tar.
+ """
+ # cheat and just assume it's the last two characters
+ compression_indicator = url[-2:]
+ mapping = dict(gz='z', bz='j', xz='J')
+ # Assume 'z' (gzip) if no match
+ return mapping.get(compression_indicator, 'z')
+
+
+@contextlib.contextmanager
+def temp_dir(remover=shutil.rmtree):
+ """
+ Create a temporary directory context. Pass a custom remover
+ to override the removal behavior.
+ """
+ temp_dir = tempfile.mkdtemp()
+ try:
+ yield temp_dir
+ finally:
+ remover(temp_dir)
+
+
+@contextlib.contextmanager
+def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir):
+ """
+ Check out the repo indicated by url.
+
+ If dest_ctx is supplied, it should be a context manager
+ to yield the target directory for the check out.
+ """
+ exe = 'git' if 'git' in url else 'hg'
+ with dest_ctx() as repo_dir:
+ cmd = [exe, 'clone', url, repo_dir]
+ if branch:
+ cmd.extend(['--branch', branch])
+ devnull = open(os.path.devnull, 'w')
+ stdout = devnull if quiet else None
+ subprocess.check_call(cmd, stdout=stdout)
+ yield repo_dir
+
+
+@contextlib.contextmanager
+def null():
+ yield
+
+
+class ExceptionTrap:
+ """
+ A context manager that will catch certain exceptions and provide an
+ indication they occurred.
+
+ >>> with ExceptionTrap() as trap:
+ ... raise Exception()
+ >>> bool(trap)
+ True
+
+ >>> with ExceptionTrap() as trap:
+ ... pass
+ >>> bool(trap)
+ False
+
+ >>> with ExceptionTrap(ValueError) as trap:
+ ... raise ValueError("1 + 1 is not 3")
+ >>> bool(trap)
+ True
+
+ >>> with ExceptionTrap(ValueError) as trap:
+ ... raise Exception()
+ Traceback (most recent call last):
+ ...
+ Exception
+
+ >>> bool(trap)
+ False
+ """
+
+ exc_info = None, None, None
+
+ def __init__(self, exceptions=(Exception,)):
+ self.exceptions = exceptions
+
+ def __enter__(self):
+ return self
+
+ @property
+ def type(self):
+ return self.exc_info[0]
+
+ @property
+ def value(self):
+ return self.exc_info[1]
+
+ @property
+ def tb(self):
+ return self.exc_info[2]
+
+ def __exit__(self, *exc_info):
+ type = exc_info[0]
+ matches = type and issubclass(type, self.exceptions)
+ if matches:
+ self.exc_info = exc_info
+ return matches
+
+ def __bool__(self):
+ return bool(self.type)
+
+ def raises(self, func, *, _test=bool):
+ """
+ Wrap func and replace the result with the truth
+ value of the trap (True if an exception occurred).
+
+ First, give the decorator an alias to support Python 3.8
+ Syntax.
+
+ >>> raises = ExceptionTrap(ValueError).raises
+
+ Now decorate a function that always fails.
+
+ >>> @raises
+ ... def fail():
+ ... raise ValueError('failed')
+ >>> fail()
+ True
+ """
+
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ with ExceptionTrap(self.exceptions) as trap:
+ func(*args, **kwargs)
+ return _test(trap)
+
+ return wrapper
+
+ def passes(self, func):
+ """
+ Wrap func and replace the result with the truth
+ value of the trap (True if no exception).
+
+ First, give the decorator an alias to support Python 3.8
+ Syntax.
+
+ >>> passes = ExceptionTrap(ValueError).passes
+
+ Now decorate a function that always fails.
+
+ >>> @passes
+ ... def fail():
+ ... raise ValueError('failed')
+
+ >>> fail()
+ False
+ """
+ return self.raises(func, _test=operator.not_)
+
+
+class suppress(contextlib.suppress, contextlib.ContextDecorator):
+ """
+ A version of contextlib.suppress with decorator support.
+
+ >>> @suppress(KeyError)
+ ... def key_error():
+ ... {}['']
+ >>> key_error()
+ """
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco/functools.py b/monEnvTP/lib/python3.8/site-packages/jaraco/functools.py
new file mode 100644
index 0000000000000000000000000000000000000000..fcdbb4f9bfa72125b2db27c830418502c575c558
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco/functools.py
@@ -0,0 +1,525 @@
+import functools
+import time
+import inspect
+import collections
+import types
+import itertools
+
+import more_itertools
+
+from typing import Callable, TypeVar
+
+
+CallableT = TypeVar("CallableT", bound=Callable[..., object])
+
+
+def compose(*funcs):
+ """
+ Compose any number of unary functions into a single unary function.
+
+ >>> import textwrap
+ >>> expected = str.strip(textwrap.dedent(compose.__doc__))
+ >>> strip_and_dedent = compose(str.strip, textwrap.dedent)
+ >>> strip_and_dedent(compose.__doc__) == expected
+ True
+
+ Compose also allows the innermost function to take arbitrary arguments.
+
+ >>> round_three = lambda x: round(x, ndigits=3)
+ >>> f = compose(round_three, int.__truediv__)
+ >>> [f(3*x, x+1) for x in range(1,10)]
+ [1.5, 2.0, 2.25, 2.4, 2.5, 2.571, 2.625, 2.667, 2.7]
+ """
+
+ def compose_two(f1, f2):
+ return lambda *args, **kwargs: f1(f2(*args, **kwargs))
+
+ return functools.reduce(compose_two, funcs)
+
+
+def method_caller(method_name, *args, **kwargs):
+ """
+ Return a function that will call a named method on the
+ target object with optional positional and keyword
+ arguments.
+
+ >>> lower = method_caller('lower')
+ >>> lower('MyString')
+ 'mystring'
+ """
+
+ def call_method(target):
+ func = getattr(target, method_name)
+ return func(*args, **kwargs)
+
+ return call_method
+
+
+def once(func):
+ """
+ Decorate func so it's only ever called the first time.
+
+ This decorator can ensure that an expensive or non-idempotent function
+ will not be expensive on subsequent calls and is idempotent.
+
+ >>> add_three = once(lambda a: a+3)
+ >>> add_three(3)
+ 6
+ >>> add_three(9)
+ 6
+ >>> add_three('12')
+ 6
+
+ To reset the stored value, simply clear the property ``saved_result``.
+
+ >>> del add_three.saved_result
+ >>> add_three(9)
+ 12
+ >>> add_three(8)
+ 12
+
+ Or invoke 'reset()' on it.
+
+ >>> add_three.reset()
+ >>> add_three(-3)
+ 0
+ >>> add_three(0)
+ 0
+ """
+
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ if not hasattr(wrapper, 'saved_result'):
+ wrapper.saved_result = func(*args, **kwargs)
+ return wrapper.saved_result
+
+ wrapper.reset = lambda: vars(wrapper).__delitem__('saved_result')
+ return wrapper
+
+
+def method_cache(
+ method: CallableT,
+ cache_wrapper: Callable[
+ [CallableT], CallableT
+ ] = functools.lru_cache(), # type: ignore[assignment]
+) -> CallableT:
+ """
+ Wrap lru_cache to support storing the cache data in the object instances.
+
+ Abstracts the common paradigm where the method explicitly saves an
+ underscore-prefixed protected property on first call and returns that
+ subsequently.
+
+ >>> class MyClass:
+ ... calls = 0
+ ...
+ ... @method_cache
+ ... def method(self, value):
+ ... self.calls += 1
+ ... return value
+
+ >>> a = MyClass()
+ >>> a.method(3)
+ 3
+ >>> for x in range(75):
+ ... res = a.method(x)
+ >>> a.calls
+ 75
+
+ Note that the apparent behavior will be exactly like that of lru_cache
+ except that the cache is stored on each instance, so values in one
+ instance will not flush values from another, and when an instance is
+ deleted, so are the cached values for that instance.
+
+ >>> b = MyClass()
+ >>> for x in range(35):
+ ... res = b.method(x)
+ >>> b.calls
+ 35
+ >>> a.method(0)
+ 0
+ >>> a.calls
+ 75
+
+ Note that if method had been decorated with ``functools.lru_cache()``,
+ a.calls would have been 76 (due to the cached value of 0 having been
+ flushed by the 'b' instance).
+
+ Clear the cache with ``.cache_clear()``
+
+ >>> a.method.cache_clear()
+
+ Same for a method that hasn't yet been called.
+
+ >>> c = MyClass()
+ >>> c.method.cache_clear()
+
+ Another cache wrapper may be supplied:
+
+ >>> cache = functools.lru_cache(maxsize=2)
+ >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache)
+ >>> a = MyClass()
+ >>> a.method2()
+ 3
+
+ Caution - do not subsequently wrap the method with another decorator, such
+ as ``@property``, which changes the semantics of the function.
+
+ See also
+ http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/
+ for another implementation and additional justification.
+ """
+
+ def wrapper(self: object, *args: object, **kwargs: object) -> object:
+ # it's the first call, replace the method with a cached, bound method
+ bound_method: CallableT = types.MethodType( # type: ignore[assignment]
+ method, self
+ )
+ cached_method = cache_wrapper(bound_method)
+ setattr(self, method.__name__, cached_method)
+ return cached_method(*args, **kwargs)
+
+ # Support cache clear even before cache has been created.
+ wrapper.cache_clear = lambda: None # type: ignore[attr-defined]
+
+ return ( # type: ignore[return-value]
+ _special_method_cache(method, cache_wrapper) or wrapper
+ )
+
+
+def _special_method_cache(method, cache_wrapper):
+ """
+ Because Python treats special methods differently, it's not
+ possible to use instance attributes to implement the cached
+ methods.
+
+ Instead, install the wrapper method under a different name
+ and return a simple proxy to that wrapper.
+
+ https://github.com/jaraco/jaraco.functools/issues/5
+ """
+ name = method.__name__
+ special_names = '__getattr__', '__getitem__'
+ if name not in special_names:
+ return
+
+ wrapper_name = '__cached' + name
+
+ def proxy(self, *args, **kwargs):
+ if wrapper_name not in vars(self):
+ bound = types.MethodType(method, self)
+ cache = cache_wrapper(bound)
+ setattr(self, wrapper_name, cache)
+ else:
+ cache = getattr(self, wrapper_name)
+ return cache(*args, **kwargs)
+
+ return proxy
+
+
+def apply(transform):
+ """
+ Decorate a function with a transform function that is
+ invoked on results returned from the decorated function.
+
+ >>> @apply(reversed)
+ ... def get_numbers(start):
+ ... "doc for get_numbers"
+ ... return range(start, start+3)
+ >>> list(get_numbers(4))
+ [6, 5, 4]
+ >>> get_numbers.__doc__
+ 'doc for get_numbers'
+ """
+
+ def wrap(func):
+ return functools.wraps(func)(compose(transform, func))
+
+ return wrap
+
+
+def result_invoke(action):
+ r"""
+ Decorate a function with an action function that is
+ invoked on the results returned from the decorated
+ function (for its side-effect), then return the original
+ result.
+
+ >>> @result_invoke(print)
+ ... def add_two(a, b):
+ ... return a + b
+ >>> x = add_two(2, 3)
+ 5
+ >>> x
+ 5
+ """
+
+ def wrap(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ result = func(*args, **kwargs)
+ action(result)
+ return result
+
+ return wrapper
+
+ return wrap
+
+
+def call_aside(f, *args, **kwargs):
+ """
+ Call a function for its side effect after initialization.
+
+ >>> @call_aside
+ ... def func(): print("called")
+ called
+ >>> func()
+ called
+
+ Use functools.partial to pass parameters to the initial call
+
+ >>> @functools.partial(call_aside, name='bingo')
+ ... def func(name): print("called with", name)
+ called with bingo
+ """
+ f(*args, **kwargs)
+ return f
+
+
+class Throttler:
+ """
+ Rate-limit a function (or other callable)
+ """
+
+ def __init__(self, func, max_rate=float('Inf')):
+ if isinstance(func, Throttler):
+ func = func.func
+ self.func = func
+ self.max_rate = max_rate
+ self.reset()
+
+ def reset(self):
+ self.last_called = 0
+
+ def __call__(self, *args, **kwargs):
+ self._wait()
+ return self.func(*args, **kwargs)
+
+ def _wait(self):
+ "ensure at least 1/max_rate seconds from last call"
+ elapsed = time.time() - self.last_called
+ must_wait = 1 / self.max_rate - elapsed
+ time.sleep(max(0, must_wait))
+ self.last_called = time.time()
+
+ def __get__(self, obj, type=None):
+ return first_invoke(self._wait, functools.partial(self.func, obj))
+
+
+def first_invoke(func1, func2):
+ """
+ Return a function that when invoked will invoke func1 without
+ any parameters (for its side-effect) and then invoke func2
+ with whatever parameters were passed, returning its result.
+ """
+
+ def wrapper(*args, **kwargs):
+ func1()
+ return func2(*args, **kwargs)
+
+ return wrapper
+
+
+def retry_call(func, cleanup=lambda: None, retries=0, trap=()):
+ """
+ Given a callable func, trap the indicated exceptions
+ for up to 'retries' times, invoking cleanup on the
+ exception. On the final attempt, allow any exceptions
+ to propagate.
+ """
+ attempts = itertools.count() if retries == float('inf') else range(retries)
+ for attempt in attempts:
+ try:
+ return func()
+ except trap:
+ cleanup()
+
+ return func()
+
+
+def retry(*r_args, **r_kwargs):
+ """
+ Decorator wrapper for retry_call. Accepts arguments to retry_call
+ except func and then returns a decorator for the decorated function.
+
+ Ex:
+
+ >>> @retry(retries=3)
+ ... def my_func(a, b):
+ ... "this is my funk"
+ ... print(a, b)
+ >>> my_func.__doc__
+ 'this is my funk'
+ """
+
+ def decorate(func):
+ @functools.wraps(func)
+ def wrapper(*f_args, **f_kwargs):
+ bound = functools.partial(func, *f_args, **f_kwargs)
+ return retry_call(bound, *r_args, **r_kwargs)
+
+ return wrapper
+
+ return decorate
+
+
+def print_yielded(func):
+ """
+ Convert a generator into a function that prints all yielded elements
+
+ >>> @print_yielded
+ ... def x():
+ ... yield 3; yield None
+ >>> x()
+ 3
+ None
+ """
+ print_all = functools.partial(map, print)
+ print_results = compose(more_itertools.consume, print_all, func)
+ return functools.wraps(func)(print_results)
+
+
+def pass_none(func):
+ """
+ Wrap func so it's not called if its first param is None
+
+ >>> print_text = pass_none(print)
+ >>> print_text('text')
+ text
+ >>> print_text(None)
+ """
+
+ @functools.wraps(func)
+ def wrapper(param, *args, **kwargs):
+ if param is not None:
+ return func(param, *args, **kwargs)
+
+ return wrapper
+
+
+def assign_params(func, namespace):
+ """
+ Assign parameters from namespace where func solicits.
+
+ >>> def func(x, y=3):
+ ... print(x, y)
+ >>> assigned = assign_params(func, dict(x=2, z=4))
+ >>> assigned()
+ 2 3
+
+ The usual errors are raised if a function doesn't receive
+ its required parameters:
+
+ >>> assigned = assign_params(func, dict(y=3, z=4))
+ >>> assigned()
+ Traceback (most recent call last):
+ TypeError: func() ...argument...
+
+ It even works on methods:
+
+ >>> class Handler:
+ ... def meth(self, arg):
+ ... print(arg)
+ >>> assign_params(Handler().meth, dict(arg='crystal', foo='clear'))()
+ crystal
+ """
+ sig = inspect.signature(func)
+ params = sig.parameters.keys()
+ call_ns = {k: namespace[k] for k in params if k in namespace}
+ return functools.partial(func, **call_ns)
+
+
+def save_method_args(method):
+ """
+ Wrap a method such that when it is called, the args and kwargs are
+ saved on the method.
+
+ >>> class MyClass:
+ ... @save_method_args
+ ... def method(self, a, b):
+ ... print(a, b)
+ >>> my_ob = MyClass()
+ >>> my_ob.method(1, 2)
+ 1 2
+ >>> my_ob._saved_method.args
+ (1, 2)
+ >>> my_ob._saved_method.kwargs
+ {}
+ >>> my_ob.method(a=3, b='foo')
+ 3 foo
+ >>> my_ob._saved_method.args
+ ()
+ >>> my_ob._saved_method.kwargs == dict(a=3, b='foo')
+ True
+
+ The arguments are stored on the instance, allowing for
+ different instance to save different args.
+
+ >>> your_ob = MyClass()
+ >>> your_ob.method({str('x'): 3}, b=[4])
+ {'x': 3} [4]
+ >>> your_ob._saved_method.args
+ ({'x': 3},)
+ >>> my_ob._saved_method.args
+ ()
+ """
+ args_and_kwargs = collections.namedtuple('args_and_kwargs', 'args kwargs')
+
+ @functools.wraps(method)
+ def wrapper(self, *args, **kwargs):
+ attr_name = '_saved_' + method.__name__
+ attr = args_and_kwargs(args, kwargs)
+ setattr(self, attr_name, attr)
+ return method(self, *args, **kwargs)
+
+ return wrapper
+
+
+def except_(*exceptions, replace=None, use=None):
+ """
+ Replace the indicated exceptions, if raised, with the indicated
+ literal replacement or evaluated expression (if present).
+
+ >>> safe_int = except_(ValueError)(int)
+ >>> safe_int('five')
+ >>> safe_int('5')
+ 5
+
+ Specify a literal replacement with ``replace``.
+
+ >>> safe_int_r = except_(ValueError, replace=0)(int)
+ >>> safe_int_r('five')
+ 0
+
+ Provide an expression to ``use`` to pass through particular parameters.
+
+ >>> safe_int_pt = except_(ValueError, use='args[0]')(int)
+ >>> safe_int_pt('five')
+ 'five'
+
+ """
+
+ def decorate(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except exceptions:
+ try:
+ return eval(use)
+ except TypeError:
+ return replace
+
+ return wrapper
+
+ return decorate
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco/text/Lorem ipsum.txt b/monEnvTP/lib/python3.8/site-packages/jaraco/text/Lorem ipsum.txt
new file mode 100644
index 0000000000000000000000000000000000000000..986f944b60b9900c22464a0c027d713854cc204e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco/text/Lorem ipsum.txt
@@ -0,0 +1,2 @@
+Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
+Curabitur pretium tincidunt lacus. Nulla gravida orci a odio. Nullam varius, turpis et commodo pharetra, est eros bibendum elit, nec luctus magna felis sollicitudin mauris. Integer in mauris eu nibh euismod gravida. Duis ac tellus et risus vulputate vehicula. Donec lobortis risus a elit. Etiam tempor. Ut ullamcorper, ligula eu tempor congue, eros est euismod turpis, id tincidunt sapien risus a quam. Maecenas fermentum consequat mi. Donec fermentum. Pellentesque malesuada nulla a mi. Duis sapien sem, aliquet nec, commodo eget, consequat quis, neque. Aliquam faucibus, elit ut dictum aliquet, felis nisl adipiscing sapien, sed malesuada diam lacus eget erat. Cras mollis scelerisque nunc. Nullam arcu. Aliquam consequat. Curabitur augue lorem, dapibus quis, laoreet et, pretium ac, nisi. Aenean magna nisl, mollis quis, molestie eu, feugiat in, orci. In hac habitasse platea dictumst.
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco/text/__init__.py b/monEnvTP/lib/python3.8/site-packages/jaraco/text/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..5f75519a281b7a9ee013a1080063dab637965af2
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/jaraco/text/__init__.py
@@ -0,0 +1,600 @@
+import re
+import itertools
+import textwrap
+import functools
+
+try:
+ from importlib.resources import files # type: ignore
+except ImportError: # pragma: nocover
+ from importlib_resources import files # type: ignore
+
+from jaraco.functools import compose, method_cache
+from jaraco.context import ExceptionTrap
+
+
+def substitution(old, new):
+ """
+ Return a function that will perform a substitution on a string
+ """
+ return lambda s: s.replace(old, new)
+
+
+def multi_substitution(*substitutions):
+ """
+ Take a sequence of pairs specifying substitutions, and create
+ a function that performs those substitutions.
+
+ >>> multi_substitution(('foo', 'bar'), ('bar', 'baz'))('foo')
+ 'baz'
+ """
+ substitutions = itertools.starmap(substitution, substitutions)
+ # compose function applies last function first, so reverse the
+ # substitutions to get the expected order.
+ substitutions = reversed(tuple(substitutions))
+ return compose(*substitutions)
+
+
+class FoldedCase(str):
+ """
+ A case insensitive string class; behaves just like str
+ except compares equal when the only variation is case.
+
+ >>> s = FoldedCase('hello world')
+
+ >>> s == 'Hello World'
+ True
+
+ >>> 'Hello World' == s
+ True
+
+ >>> s != 'Hello World'
+ False
+
+ >>> s.index('O')
+ 4
+
+ >>> s.split('O')
+ ['hell', ' w', 'rld']
+
+ >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta']))
+ ['alpha', 'Beta', 'GAMMA']
+
+ Sequence membership is straightforward.
+
+ >>> "Hello World" in [s]
+ True
+ >>> s in ["Hello World"]
+ True
+
+ You may test for set inclusion, but candidate and elements
+ must both be folded.
+
+ >>> FoldedCase("Hello World") in {s}
+ True
+ >>> s in {FoldedCase("Hello World")}
+ True
+
+ String inclusion works as long as the FoldedCase object
+ is on the right.
+
+ >>> "hello" in FoldedCase("Hello World")
+ True
+
+ But not if the FoldedCase object is on the left:
+
+ >>> FoldedCase('hello') in 'Hello World'
+ False
+
+ In that case, use ``in_``:
+
+ >>> FoldedCase('hello').in_('Hello World')
+ True
+
+ >>> FoldedCase('hello') > FoldedCase('Hello')
+ False
+ """
+
+ def __lt__(self, other):
+ return self.lower() < other.lower()
+
+ def __gt__(self, other):
+ return self.lower() > other.lower()
+
+ def __eq__(self, other):
+ return self.lower() == other.lower()
+
+ def __ne__(self, other):
+ return self.lower() != other.lower()
+
+ def __hash__(self):
+ return hash(self.lower())
+
+ def __contains__(self, other):
+ return super().lower().__contains__(other.lower())
+
+ def in_(self, other):
+ "Does self appear in other?"
+ return self in FoldedCase(other)
+
+ # cache lower since it's likely to be called frequently.
+ @method_cache
+ def lower(self):
+ return super().lower()
+
+ def index(self, sub):
+ return self.lower().index(sub.lower())
+
+ def split(self, splitter=' ', maxsplit=0):
+ pattern = re.compile(re.escape(splitter), re.I)
+ return pattern.split(self, maxsplit)
+
+
+# Python 3.8 compatibility
+_unicode_trap = ExceptionTrap(UnicodeDecodeError)
+
+
+@_unicode_trap.passes
+def is_decodable(value):
+ r"""
+ Return True if the supplied value is decodable (using the default
+ encoding).
+
+ >>> is_decodable(b'\xff')
+ False
+ >>> is_decodable(b'\x32')
+ True
+ """
+ value.decode()
+
+
+def is_binary(value):
+ r"""
+ Return True if the value appears to be binary (that is, it's a byte
+ string and isn't decodable).
+
+ >>> is_binary(b'\xff')
+ True
+ >>> is_binary('\xff')
+ False
+ """
+ return isinstance(value, bytes) and not is_decodable(value)
+
+
+def trim(s):
+ r"""
+ Trim something like a docstring to remove the whitespace that
+ is common due to indentation and formatting.
+
+ >>> trim("\n\tfoo = bar\n\t\tbar = baz\n")
+ 'foo = bar\n\tbar = baz'
+ """
+ return textwrap.dedent(s).strip()
+
+
+def wrap(s):
+ """
+ Wrap lines of text, retaining existing newlines as
+ paragraph markers.
+
+ >>> print(wrap(lorem_ipsum))
+ Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do
+ eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad
+ minim veniam, quis nostrud exercitation ullamco laboris nisi ut
+ aliquip ex ea commodo consequat. Duis aute irure dolor in
+ reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla
+ pariatur. Excepteur sint occaecat cupidatat non proident, sunt in
+ culpa qui officia deserunt mollit anim id est laborum.
+ <BLANKLINE>
+ Curabitur pretium tincidunt lacus. Nulla gravida orci a odio. Nullam
+ varius, turpis et commodo pharetra, est eros bibendum elit, nec luctus
+ magna felis sollicitudin mauris. Integer in mauris eu nibh euismod
+ gravida. Duis ac tellus et risus vulputate vehicula. Donec lobortis
+ risus a elit. Etiam tempor. Ut ullamcorper, ligula eu tempor congue,
+ eros est euismod turpis, id tincidunt sapien risus a quam. Maecenas
+ fermentum consequat mi. Donec fermentum. Pellentesque malesuada nulla
+ a mi. Duis sapien sem, aliquet nec, commodo eget, consequat quis,
+ neque. Aliquam faucibus, elit ut dictum aliquet, felis nisl adipiscing
+ sapien, sed malesuada diam lacus eget erat. Cras mollis scelerisque
+ nunc. Nullam arcu. Aliquam consequat. Curabitur augue lorem, dapibus
+ quis, laoreet et, pretium ac, nisi. Aenean magna nisl, mollis quis,
+ molestie eu, feugiat in, orci. In hac habitasse platea dictumst.
+ """
+ paragraphs = s.splitlines()
+ wrapped = ('\n'.join(textwrap.wrap(para)) for para in paragraphs)
+ return '\n\n'.join(wrapped)
+
+
+def unwrap(s):
+ r"""
+ Given a multi-line string, return an unwrapped version.
+
+ >>> wrapped = wrap(lorem_ipsum)
+ >>> wrapped.count('\n')
+ 20
+ >>> unwrapped = unwrap(wrapped)
+ >>> unwrapped.count('\n')
+ 1
+ >>> print(unwrapped)
+ Lorem ipsum dolor sit amet, consectetur adipiscing ...
+ Curabitur pretium tincidunt lacus. Nulla gravida orci ...
+
+ """
+ paragraphs = re.split(r'\n\n+', s)
+ cleaned = (para.replace('\n', ' ') for para in paragraphs)
+ return '\n'.join(cleaned)
+
+
+lorem_ipsum: str = files(__name__).joinpath('Lorem ipsum.txt').read_text()
+
+
+class Splitter(object):
+ """object that will split a string with the given arguments for each call
+
+ >>> s = Splitter(',')
+ >>> s('hello, world, this is your, master calling')
+ ['hello', ' world', ' this is your', ' master calling']
+ """
+
+ def __init__(self, *args):
+ self.args = args
+
+ def __call__(self, s):
+ return s.split(*self.args)
+
+
+def indent(string, prefix=' ' * 4):
+ """
+ >>> indent('foo')
+ ' foo'
+ """
+ return prefix + string
+
+
+class WordSet(tuple):
+ """
+ Given an identifier, return the words that identifier represents,
+ whether in camel case, underscore-separated, etc.
+
+ >>> WordSet.parse("camelCase")
+ ('camel', 'Case')
+
+ >>> WordSet.parse("under_sep")
+ ('under', 'sep')
+
+ Acronyms should be retained
+
+ >>> WordSet.parse("firstSNL")
+ ('first', 'SNL')
+
+ >>> WordSet.parse("you_and_I")
+ ('you', 'and', 'I')
+
+ >>> WordSet.parse("A simple test")
+ ('A', 'simple', 'test')
+
+ Multiple caps should not interfere with the first cap of another word.
+
+ >>> WordSet.parse("myABCClass")
+ ('my', 'ABC', 'Class')
+
+ The result is a WordSet, so you can get the form you need.
+
+ >>> WordSet.parse("myABCClass").underscore_separated()
+ 'my_ABC_Class'
+
+ >>> WordSet.parse('a-command').camel_case()
+ 'ACommand'
+
+ >>> WordSet.parse('someIdentifier').lowered().space_separated()
+ 'some identifier'
+
+ Slices of the result should return another WordSet.
+
+ >>> WordSet.parse('taken-out-of-context')[1:].underscore_separated()
+ 'out_of_context'
+
+ >>> WordSet.from_class_name(WordSet()).lowered().space_separated()
+ 'word set'
+
+ >>> example = WordSet.parse('figured it out')
+ >>> example.headless_camel_case()
+ 'figuredItOut'
+ >>> example.dash_separated()
+ 'figured-it-out'
+
+ """
+
+ _pattern = re.compile('([A-Z]?[a-z]+)|([A-Z]+(?![a-z]))')
+
+ def capitalized(self):
+ return WordSet(word.capitalize() for word in self)
+
+ def lowered(self):
+ return WordSet(word.lower() for word in self)
+
+ def camel_case(self):
+ return ''.join(self.capitalized())
+
+ def headless_camel_case(self):
+ words = iter(self)
+ first = next(words).lower()
+ new_words = itertools.chain((first,), WordSet(words).camel_case())
+ return ''.join(new_words)
+
+ def underscore_separated(self):
+ return '_'.join(self)
+
+ def dash_separated(self):
+ return '-'.join(self)
+
+ def space_separated(self):
+ return ' '.join(self)
+
+ def trim_right(self, item):
+ """
+ Remove the item from the end of the set.
+
+ >>> WordSet.parse('foo bar').trim_right('foo')
+ ('foo', 'bar')
+ >>> WordSet.parse('foo bar').trim_right('bar')
+ ('foo',)
+ >>> WordSet.parse('').trim_right('bar')
+ ()
+ """
+ return self[:-1] if self and self[-1] == item else self
+
+ def trim_left(self, item):
+ """
+ Remove the item from the beginning of the set.
+
+ >>> WordSet.parse('foo bar').trim_left('foo')
+ ('bar',)
+ >>> WordSet.parse('foo bar').trim_left('bar')
+ ('foo', 'bar')
+ >>> WordSet.parse('').trim_left('bar')
+ ()
+ """
+ return self[1:] if self and self[0] == item else self
+
+ def trim(self, item):
+ """
+ >>> WordSet.parse('foo bar').trim('foo')
+ ('bar',)
+ """
+ return self.trim_left(item).trim_right(item)
+
+ def __getitem__(self, item):
+ result = super(WordSet, self).__getitem__(item)
+ if isinstance(item, slice):
+ result = WordSet(result)
+ return result
+
+ @classmethod
+ def parse(cls, identifier):
+ matches = cls._pattern.finditer(identifier)
+ return WordSet(match.group(0) for match in matches)
+
+ @classmethod
+ def from_class_name(cls, subject):
+ return cls.parse(subject.__class__.__name__)
+
+
+# for backward compatibility
+words = WordSet.parse
+
+
+def simple_html_strip(s):
+ r"""
+ Remove HTML from the string `s`.
+
+ >>> str(simple_html_strip(''))
+ ''
+
+ >>> print(simple_html_strip('A <bold>stormy</bold> day in paradise'))
+ A stormy day in paradise
+
+ >>> print(simple_html_strip('Somebody <!-- do not --> tell the truth.'))
+ Somebody tell the truth.
+
+ >>> print(simple_html_strip('What about<br/>\nmultiple lines?'))
+ What about
+ multiple lines?
+ """
+ html_stripper = re.compile('(<!--.*?-->)|(<[^>]*>)|([^<]+)', re.DOTALL)
+ texts = (match.group(3) or '' for match in html_stripper.finditer(s))
+ return ''.join(texts)
+
+
+class SeparatedValues(str):
+ """
+ A string separated by a separator. Overrides __iter__ for getting
+ the values.
+
+ >>> list(SeparatedValues('a,b,c'))
+ ['a', 'b', 'c']
+
+ Whitespace is stripped and empty values are discarded.
+
+ >>> list(SeparatedValues(' a, b , c, '))
+ ['a', 'b', 'c']
+ """
+
+ separator = ','
+
+ def __iter__(self):
+ parts = self.split(self.separator)
+ return filter(None, (part.strip() for part in parts))
+
+
+class Stripper:
+ r"""
+ Given a series of lines, find the common prefix and strip it from them.
+
+ >>> lines = [
+ ... 'abcdefg\n',
+ ... 'abc\n',
+ ... 'abcde\n',
+ ... ]
+ >>> res = Stripper.strip_prefix(lines)
+ >>> res.prefix
+ 'abc'
+ >>> list(res.lines)
+ ['defg\n', '\n', 'de\n']
+
+ If no prefix is common, nothing should be stripped.
+
+ >>> lines = [
+ ... 'abcd\n',
+ ... '1234\n',
+ ... ]
+ >>> res = Stripper.strip_prefix(lines)
+ >>> res.prefix = ''
+ >>> list(res.lines)
+ ['abcd\n', '1234\n']
+ """
+
+ def __init__(self, prefix, lines):
+ self.prefix = prefix
+ self.lines = map(self, lines)
+
+ @classmethod
+ def strip_prefix(cls, lines):
+ prefix_lines, lines = itertools.tee(lines)
+ prefix = functools.reduce(cls.common_prefix, prefix_lines)
+ return cls(prefix, lines)
+
+ def __call__(self, line):
+ if not self.prefix:
+ return line
+ null, prefix, rest = line.partition(self.prefix)
+ return rest
+
+ @staticmethod
+ def common_prefix(s1, s2):
+ """
+ Return the common prefix of two lines.
+ """
+ index = min(len(s1), len(s2))
+ while s1[:index] != s2[:index]:
+ index -= 1
+ return s1[:index]
+
+
+def remove_prefix(text, prefix):
+ """
+ Remove the prefix from the text if it exists.
+
+ >>> remove_prefix('underwhelming performance', 'underwhelming ')
+ 'performance'
+
+ >>> remove_prefix('something special', 'sample')
+ 'something special'
+ """
+ null, prefix, rest = text.rpartition(prefix)
+ return rest
+
+
+def remove_suffix(text, suffix):
+ """
+ Remove the suffix from the text if it exists.
+
+ >>> remove_suffix('name.git', '.git')
+ 'name'
+
+ >>> remove_suffix('something special', 'sample')
+ 'something special'
+ """
+ rest, suffix, null = text.partition(suffix)
+ return rest
+
+
+def normalize_newlines(text):
+ r"""
+ Replace alternate newlines with the canonical newline.
+
+ >>> normalize_newlines('Lorem Ipsum\u2029')
+ 'Lorem Ipsum\n'
+ >>> normalize_newlines('Lorem Ipsum\r\n')
+ 'Lorem Ipsum\n'
+ >>> normalize_newlines('Lorem Ipsum\x85')
+ 'Lorem Ipsum\n'
+ """
+ newlines = ['\r\n', '\r', '\n', '\u0085', '\u2028', '\u2029']
+ pattern = '|'.join(newlines)
+ return re.sub(pattern, '\n', text)
+
+
+def _nonblank(str):
+ return str and not str.startswith('#')
+
+
+@functools.singledispatch
+def yield_lines(iterable):
+ r"""
+ Yield valid lines of a string or iterable.
+
+ >>> list(yield_lines(''))
+ []
+ >>> list(yield_lines(['foo', 'bar']))
+ ['foo', 'bar']
+ >>> list(yield_lines('foo\nbar'))
+ ['foo', 'bar']
+ >>> list(yield_lines('\nfoo\n#bar\nbaz #comment'))
+ ['foo', 'baz #comment']
+ >>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n']))
+ ['foo', 'bar', 'baz', 'bing']
+ """
+ return itertools.chain.from_iterable(map(yield_lines, iterable))
+
+
+@yield_lines.register(str)
+def _(text):
+ return filter(_nonblank, map(str.strip, text.splitlines()))
+
+
+def drop_comment(line):
+ """
+ Drop comments.
+
+ >>> drop_comment('foo # bar')
+ 'foo'
+
+ A hash without a space may be in a URL.
+
+ >>> drop_comment('http://example.com/foo#bar')
+ 'http://example.com/foo#bar'
+ """
+ return line.partition(' #')[0]
+
+
+def join_continuation(lines):
+ r"""
+ Join lines continued by a trailing backslash.
+
+ >>> list(join_continuation(['foo \\', 'bar', 'baz']))
+ ['foobar', 'baz']
+ >>> list(join_continuation(['foo \\', 'bar', 'baz']))
+ ['foobar', 'baz']
+ >>> list(join_continuation(['foo \\', 'bar \\', 'baz']))
+ ['foobarbaz']
+
+ Not sure why, but...
+ The character preceeding the backslash is also elided.
+
+ >>> list(join_continuation(['goo\\', 'dly']))
+ ['godly']
+
+ A terrible idea, but...
+ If no line is available to continue, suppress the lines.
+
+ >>> list(join_continuation(['foo', 'bar\\', 'baz\\']))
+ ['foo']
+ """
+ lines = iter(lines)
+ for item in lines:
+ while item.endswith('\\'):
+ try:
+ item = item[:-2].strip() + next(lines)
+ except StopIteration:
+ return
+ yield item
diff --git a/monEnvTP/lib/python3.8/site-packages/jaraco/text/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/jaraco/text/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9c47dc35ba8dc392df7ce85c97b9a496f995f49e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/jaraco/text/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/more_itertools-8.12.0.dist-info/INSTALLER b/monEnvTP/lib/python3.8/site-packages/more_itertools-8.12.0.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/more_itertools-8.12.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/more_itertools-8.12.0.dist-info/LICENSE b/monEnvTP/lib/python3.8/site-packages/more_itertools-8.12.0.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..0a523bece3e50519653c4d7a38399baa487fefa1
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/more_itertools-8.12.0.dist-info/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2012 Erik Rose
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/monEnvTP/lib/python3.8/site-packages/more_itertools-8.12.0.dist-info/METADATA b/monEnvTP/lib/python3.8/site-packages/more_itertools-8.12.0.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..9efacdd7454e55f0a3a3701add727f2c0cd5ca46
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/more_itertools-8.12.0.dist-info/METADATA
@@ -0,0 +1,521 @@
+Metadata-Version: 2.1
+Name: more-itertools
+Version: 8.12.0
+Summary: More routines for operating on iterables, beyond itertools
+Home-page: https://github.com/more-itertools/more-itertools
+Author: Erik Rose
+Author-email: erikrose@grinchcentral.com
+License: MIT
+Keywords: itertools,iterator,iteration,filter,peek,peekable,collate,chunk,chunked
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Natural Language :: English
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Software Development :: Libraries
+Requires-Python: >=3.5
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+
+==============
+More Itertools
+==============
+
+.. image:: https://readthedocs.org/projects/more-itertools/badge/?version=latest
+ :target: https://more-itertools.readthedocs.io/en/stable/
+
+Python's ``itertools`` library is a gem - you can compose elegant solutions
+for a variety of problems with the functions it provides. In ``more-itertools``
+we collect additional building blocks, recipes, and routines for working with
+Python iterables.
+
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Grouping | `chunked <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.chunked>`_, |
+| | `ichunked <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ichunked>`_, |
+| | `sliced <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sliced>`_, |
+| | `distribute <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distribute>`_, |
+| | `divide <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.divide>`_, |
+| | `split_at <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_at>`_, |
+| | `split_before <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_before>`_, |
+| | `split_after <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_after>`_, |
+| | `split_into <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_into>`_, |
+| | `split_when <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.split_when>`_, |
+| | `bucket <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.bucket>`_, |
+| | `unzip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unzip>`_, |
+| | `grouper <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.grouper>`_, |
+| | `partition <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.partition>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Lookahead and lookback | `spy <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.spy>`_, |
+| | `peekable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.peekable>`_, |
+| | `seekable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.seekable>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Windowing | `windowed <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.windowed>`_, |
+| | `substrings <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.substrings>`_, |
+| | `substrings_indexes <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.substrings_indexes>`_, |
+| | `stagger <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.stagger>`_, |
+| | `windowed_complete <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.windowed_complete>`_, |
+| | `pairwise <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.pairwise>`_, |
+| | `triplewise <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.triplewise>`_, |
+| | `sliding_window <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sliding_window>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Augmenting | `count_cycle <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.count_cycle>`_, |
+| | `intersperse <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.intersperse>`_, |
+| | `padded <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.padded>`_, |
+| | `mark_ends <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.mark_ends>`_, |
+| | `repeat_last <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.repeat_last>`_, |
+| | `adjacent <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.adjacent>`_, |
+| | `groupby_transform <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.groupby_transform>`_, |
+| | `pad_none <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.pad_none>`_, |
+| | `ncycles <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ncycles>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Combining | `collapse <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.collapse>`_, |
+| | `sort_together <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sort_together>`_, |
+| | `interleave <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave>`_, |
+| | `interleave_longest <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave_longest>`_, |
+| | `interleave_evenly <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.interleave_evenly>`_, |
+| | `zip_offset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_offset>`_, |
+| | `zip_equal <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_equal>`_, |
+| | `zip_broadcast <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.zip_broadcast>`_, |
+| | `dotproduct <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.dotproduct>`_, |
+| | `convolve <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.convolve>`_, |
+| | `flatten <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.flatten>`_, |
+| | `roundrobin <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.roundrobin>`_, |
+| | `prepend <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.prepend>`_, |
+| | `value_chain <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.value_chain>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Summarizing | `ilen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.ilen>`_, |
+| | `unique_to_each <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_to_each>`_, |
+| | `sample <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.sample>`_, |
+| | `consecutive_groups <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consecutive_groups>`_, |
+| | `run_length <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.run_length>`_, |
+| | `map_reduce <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.map_reduce>`_, |
+| | `exactly_n <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.exactly_n>`_, |
+| | `is_sorted <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.is_sorted>`_, |
+| | `all_equal <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.all_equal>`_, |
+| | `all_unique <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.all_unique>`_, |
+| | `minmax <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.minmax>`_, |
+| | `first_true <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.first_true>`_, |
+| | `quantify <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.quantify>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Selecting | `islice_extended <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.islice_extended>`_, |
+| | `first <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.first>`_, |
+| | `last <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.last>`_, |
+| | `one <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.one>`_, |
+| | `only <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.only>`_, |
+| | `strictly_n <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.strictly_n>`_, |
+| | `strip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.strip>`_, |
+| | `lstrip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.lstrip>`_, |
+| | `rstrip <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.rstrip>`_, |
+| | `filter_except <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.filter_except>`_, |
+| | `map_except <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.map_except>`_, |
+| | `nth_or_last <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_or_last>`_, |
+| | `unique_in_window <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_in_window>`_, |
+| | `before_and_after <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.before_and_after>`_, |
+| | `nth <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth>`_, |
+| | `take <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.take>`_, |
+| | `tail <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tail>`_, |
+| | `unique_everseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertoo ls.unique_everseen>`_, |
+| | `unique_justseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.unique_justseen>`_, |
+| | `duplicates_everseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.duplicates_everseen>`_, |
+| | `duplicates_justseen <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.duplicates_justseen>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Combinatorics | `distinct_permutations <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distinct_permutations>`_, |
+| | `distinct_combinations <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.distinct_combinations>`_, |
+| | `circular_shifts <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.circular_shifts>`_, |
+| | `partitions <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.partitions>`_, |
+| | `set_partitions <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.set_partitions>`_, |
+| | `product_index <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.product_index>`_, |
+| | `combination_index <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.combination_index>`_, |
+| | `permutation_index <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.permutation_index>`_, |
+| | `powerset <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.powerset>`_, |
+| | `random_product <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_product>`_, |
+| | `random_permutation <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_permutation>`_, |
+| | `random_combination <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_combination>`_, |
+| | `random_combination_with_replacement <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.random_combination_with_replacement>`_, |
+| | `nth_product <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_product>`_, |
+| | `nth_permutation <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_permutation>`_, |
+| | `nth_combination <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.nth_combination>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Wrapping | `always_iterable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_iterable>`_, |
+| | `always_reversible <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.always_reversible>`_, |
+| | `countable <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.countable>`_, |
+| | `consumer <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consumer>`_, |
+| | `with_iter <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.with_iter>`_, |
+| | `iter_except <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.iter_except>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| Others | `locate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.locate>`_, |
+| | `rlocate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.rlocate>`_, |
+| | `replace <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.replace>`_, |
+| | `numeric_range <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.numeric_range>`_, |
+| | `side_effect <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.side_effect>`_, |
+| | `iterate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.iterate>`_, |
+| | `difference <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.difference>`_, |
+| | `make_decorator <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.make_decorator>`_, |
+| | `SequenceView <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.SequenceView>`_, |
+| | `time_limited <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.time_limited>`_, |
+| | `consume <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.consume>`_, |
+| | `tabulate <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.tabulate>`_, |
+| | `repeatfunc <https://more-itertools.readthedocs.io/en/stable/api.html#more_itertools.repeatfunc>`_ |
++------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+
+
+Getting started
+===============
+
+To get started, install the library with `pip <https://pip.pypa.io/en/stable/>`_:
+
+.. code-block:: shell
+
+ pip install more-itertools
+
+The recipes from the `itertools docs <https://docs.python.org/3/library/itertools.html#itertools-recipes>`_
+are included in the top-level package:
+
+.. code-block:: python
+
+ >>> from more_itertools import flatten
+ >>> iterable = [(0, 1), (2, 3)]
+ >>> list(flatten(iterable))
+ [0, 1, 2, 3]
+
+Several new recipes are available as well:
+
+.. code-block:: python
+
+ >>> from more_itertools import chunked
+ >>> iterable = [0, 1, 2, 3, 4, 5, 6, 7, 8]
+ >>> list(chunked(iterable, 3))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
+
+ >>> from more_itertools import spy
+ >>> iterable = (x * x for x in range(1, 6))
+ >>> head, iterable = spy(iterable, n=3)
+ >>> list(head)
+ [1, 4, 9]
+ >>> list(iterable)
+ [1, 4, 9, 16, 25]
+
+
+
+For the full listing of functions, see the `API documentation <https://more-itertools.readthedocs.io/en/stable/api.html>`_.
+
+
+Links elsewhere
+===============
+
+Blog posts about ``more-itertools``:
+
+* `Yo, I heard you like decorators <https://www.bbayles.com/index/decorator_factory>`__
+* `Tour of Python Itertools <https://martinheinz.dev/blog/16>`__ (`Alternate <https://dev.to/martinheinz/tour-of-python-itertools-4122>`__)
+* `Real-World Python More Itertools <https://www.gidware.com/real-world-more-itertools/>`_
+
+
+Development
+===========
+
+``more-itertools`` is maintained by `@erikrose <https://github.com/erikrose>`_
+and `@bbayles <https://github.com/bbayles>`_, with help from `many others <https://github.com/more-itertools/more-itertools/graphs/contributors>`_.
+If you have a problem or suggestion, please file a bug or pull request in this
+repository. Thanks for contributing!
+
+
+Version History
+===============
+
+
+ :noindex:
+
+8.12.0
+------
+
+* Bug fixes
+ * Some documentation issues were fixed (thanks to Masynchin, spookylukey, astrojuanlu, and stephengmatthews)
+ * Python 3.5 support was temporarily restored (thanks to mattbonnell)
+
+8.11.0
+------
+
+* New functions
+ * The before_and_after, sliding_window, and triplewise recipes from the Python 3.10 docs were added
+ * duplicates_everseen and duplicates_justseen (thanks to OrBin and DavidPratt512)
+ * minmax (thanks to Ricocotam, MSeifert04, and ruancomelli)
+ * strictly_n (thanks to hwalinga and NotWearingPants)
+ * unique_in_window
+
+* Changes to existing functions
+ * groupby_transform had its type stub improved (thanks to mjk4 and ruancomelli)
+ * is_sorted now accepts a ``strict`` parameter (thanks to Dutcho and ruancomelli)
+ * zip_broadcast was updated to fix a bug (thanks to kalekundert)
+
+8.10.0
+------
+
+* Changes to existing functions
+ * The type stub for iter_except was improved (thanks to MarcinKonowalczyk)
+
+* Other changes:
+ * Type stubs now ship with the source release (thanks to saaketp)
+ * The Sphinx docs were improved (thanks to MarcinKonowalczyk)
+
+8.9.0
+-----
+
+* New functions
+ * interleave_evenly (thanks to mbugert)
+ * repeat_each (thanks to FinalSh4re)
+ * chunked_even (thanks to valtron)
+ * map_if (thanks to sassbalint)
+ * zip_broadcast (thanks to kalekundert)
+
+* Changes to existing functions
+ * The type stub for chunked was improved (thanks to PhilMacKay)
+ * The type stubs for zip_equal and `zip_offset` were improved (thanks to maffoo)
+ * Building Sphinx docs locally was improved (thanks to MarcinKonowalczyk)
+
+8.8.0
+-----
+
+* New functions
+ * countable (thanks to krzysieq)
+
+* Changes to existing functions
+ * split_before was updated to handle empy collections (thanks to TiunovNN)
+ * unique_everseen got a performance boost (thanks to Numerlor)
+ * The type hint for value_chain was corrected (thanks to vr2262)
+
+8.7.0
+-----
+
+* New functions
+ * convolve (from the Python itertools docs)
+ * product_index, combination_index, and permutation_index (thanks to N8Brooks)
+ * value_chain (thanks to jenstroeger)
+
+* Changes to existing functions
+ * distinct_combinations now uses a non-recursive algorithm (thanks to knutdrand)
+ * pad_none is now the preferred name for padnone, though the latter remains available.
+ * pairwise will now use the Python standard library implementation on Python 3.10+
+ * sort_together now accepts a ``key`` argument (thanks to brianmaissy)
+ * seekable now has a ``peek`` method, and can indicate whether the iterator it's wrapping is exhausted (thanks to gsakkis)
+ * time_limited can now indicate whether its iterator has expired (thanks to roysmith)
+ * The implementation of unique_everseen was improved (thanks to plammens)
+
+* Other changes:
+ * Various documentation updates (thanks to cthoyt, Evantm, and cyphase)
+
+8.6.0
+-----
+
+* New itertools
+ * all_unique (thanks to brianmaissy)
+ * nth_product and nth_permutation (thanks to N8Brooks)
+
+* Changes to existing itertools
+ * chunked and sliced now accept a ``strict`` parameter (thanks to shlomif and jtwool)
+
+* Other changes
+ * Python 3.5 has reached its end of life and is no longer supported.
+ * Python 3.9 is officially supported.
+ * Various documentation fixes (thanks to timgates42)
+
+8.5.0
+-----
+
+* New itertools
+ * windowed_complete (thanks to MarcinKonowalczyk)
+
+* Changes to existing itertools:
+ * The is_sorted implementation was improved (thanks to cool-RR)
+ * The groupby_transform now accepts a ``reducefunc`` parameter.
+ * The last implementation was improved (thanks to brianmaissy)
+
+* Other changes
+ * Various documentation fixes (thanks to craigrosie, samuelstjean, PiCT0)
+ * The tests for distinct_combinations were improved (thanks to Minabsapi)
+ * Automated tests now run on GitHub Actions. All commits now check:
+ * That unit tests pass
+ * That the examples in docstrings work
+ * That test coverage remains high (using `coverage`)
+ * For linting errors (using `flake8`)
+ * For consistent style (using `black`)
+ * That the type stubs work (using `mypy`)
+ * That the docs build correctly (using `sphinx`)
+ * That packages build correctly (using `twine`)
+
+8.4.0
+-----
+
+* New itertools
+ * mark_ends (thanks to kalekundert)
+ * is_sorted
+
+* Changes to existing itertools:
+ * islice_extended can now be used with real slices (thanks to cool-RR)
+ * The implementations for filter_except and map_except were improved (thanks to SergBobrovsky)
+
+* Other changes
+ * Automated tests now enforce code style (using `black <https://github.com/psf/black>`__)
+ * The various signatures of islice_extended and numeric_range now appear in the docs (thanks to dsfulf)
+ * The test configuration for mypy was updated (thanks to blueyed)
+
+
+8.3.0
+-----
+
+* New itertools
+ * zip_equal (thanks to frankier and alexmojaki)
+
+* Changes to existing itertools:
+ * split_at, split_before, split_after, and split_when all got a ``maxsplit`` paramter (thanks to jferard and ilai-deutel)
+ * split_at now accepts a ``keep_separator`` parameter (thanks to jferard)
+ * distinct_permutations can now generate ``r``-length permutations (thanks to SergBobrovsky and ilai-deutel)
+ * The windowed implementation was improved (thanks to SergBobrovsky)
+ * The spy implementation was improved (thanks to has2k1)
+
+* Other changes
+ * Type stubs are now tested with ``stubtest`` (thanks to ilai-deutel)
+ * Tests now run with ``python -m unittest`` instead of ``python setup.py test`` (thanks to jdufresne)
+
+8.2.0
+-----
+
+* Bug fixes
+ * The .pyi files for typing were updated. (thanks to blueyed and ilai-deutel)
+
+* Changes to existing itertools:
+ * numeric_range now behaves more like the built-in range. (thanks to jferard)
+ * bucket now allows for enumerating keys. (thanks to alexchandel)
+ * sliced now should now work for numpy arrays. (thanks to sswingle)
+ * seekable now has a ``maxlen`` parameter.
+
+8.1.0
+-----
+
+* Bug fixes
+ * partition works with ``pred=None`` again. (thanks to MSeifert04)
+
+* New itertools
+ * sample (thanks to tommyod)
+ * nth_or_last (thanks to d-ryzhikov)
+
+* Changes to existing itertools:
+ * The implementation for divide was improved. (thanks to jferard)
+
+8.0.2
+-----
+
+* Bug fixes
+ * The type stub files are now part of the wheel distribution (thanks to keisheiled)
+
+8.0.1
+-----
+
+* Bug fixes
+ * The type stub files now work for functions imported from the
+ root package (thanks to keisheiled)
+
+8.0.0
+-----
+
+* New itertools and other additions
+ * This library now ships type hints for use with mypy.
+ (thanks to ilai-deutel for the implementation, and to gabbard and fmagin for assistance)
+ * split_when (thanks to jferard)
+ * repeat_last (thanks to d-ryzhikov)
+
+* Changes to existing itertools:
+ * The implementation for set_partitions was improved. (thanks to jferard)
+ * partition was optimized for expensive predicates. (thanks to stevecj)
+ * unique_everseen and groupby_transform were re-factored. (thanks to SergBobrovsky)
+ * The implementation for difference was improved. (thanks to Jabbey92)
+
+* Other changes
+ * Python 3.4 has reached its end of life and is no longer supported.
+ * Python 3.8 is officially supported. (thanks to jdufresne)
+ * The ``collate`` function has been deprecated.
+ It raises a ``DeprecationWarning`` if used, and will be removed in a future release.
+ * one and only now provide more informative error messages. (thanks to gabbard)
+ * Unit tests were moved outside of the main package (thanks to jdufresne)
+ * Various documentation fixes (thanks to kriomant, gabbard, jdufresne)
+
+
+7.2.0
+-----
+
+* New itertools
+ * distinct_combinations
+ * set_partitions (thanks to kbarrett)
+ * filter_except
+ * map_except
+
+7.1.0
+-----
+
+* New itertools
+ * ichunked (thanks davebelais and youtux)
+ * only (thanks jaraco)
+
+* Changes to existing itertools:
+ * numeric_range now supports ranges specified by
+ ``datetime.datetime`` and ``datetime.timedelta`` objects (thanks to MSeifert04 for tests).
+ * difference now supports an *initial* keyword argument.
+
+
+* Other changes
+ * Various documentation fixes (thanks raimon49, pylang)
+
+7.0.0
+-----
+
+* New itertools:
+ * time_limited
+ * partitions (thanks to rominf and Saluev)
+ * substrings_indexes (thanks to rominf)
+
+* Changes to existing itertools:
+ * collapse now treats ``bytes`` objects the same as ``str`` objects. (thanks to Sweenpet)
+
+The major version update is due to the change in the default behavior of
+collapse. It now treats ``bytes`` objects the same as ``str`` objects.
+This aligns its behavior with always_iterable.
+
+.. code-block:: python
+
+ >>> from more_itertools import collapse
+ >>> iterable = [[1, 2], b'345', [6]]
+ >>> print(list(collapse(iterable)))
+ [1, 2, b'345', 6]
+
+6.0.0
+-----
+
+* Major changes:
+ * Python 2.7 is no longer supported. The 5.0.0 release will be the last
+ version targeting Python 2.7.
+ * All future releases will target the active versions of Python 3.
+ As of 2019, those are Python 3.4 and above.
+ * The ``six`` library is no longer a dependency.
+ * The accumulate function is no longer part of this library. You
+ may import a better version from the standard ``itertools`` module.
+
+* Changes to existing itertools:
+ * The order of the parameters in grouper have changed to match
+ the latest recipe in the itertools documentation. Use of the old order
+ will be supported in this release, but emit a ``DeprecationWarning``.
+ The legacy behavior will be dropped in a future release. (thanks to jaraco)
+ * distinct_permutations was improved (thanks to jferard - see also `permutations with unique values <https://stackoverflow.com/questions/6284396/permutations-with-unique-values>`_ at StackOverflow.)
+ * An unused parameter was removed from substrings. (thanks to pylang)
+
+* Other changes:
+ * The docs for unique_everseen were improved. (thanks to jferard and MSeifert04)
+ * Several Python 2-isms were removed. (thanks to jaraco, MSeifert04, and hugovk)
+
+
diff --git a/monEnvTP/lib/python3.8/site-packages/more_itertools-8.12.0.dist-info/RECORD b/monEnvTP/lib/python3.8/site-packages/more_itertools-8.12.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..314c477d602c5f303c0a5c2a3f1b024817e946a0
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/more_itertools-8.12.0.dist-info/RECORD
@@ -0,0 +1,16 @@
+more_itertools-8.12.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+more_itertools-8.12.0.dist-info/LICENSE,sha256=CfHIyelBrz5YTVlkHqm4fYPAyw_QB-te85Gn4mQ8GkY,1053
+more_itertools-8.12.0.dist-info/METADATA,sha256=QCCEcisEPr7iSfBIKCukhP-FbG9ehMK8tDIliZ3FBDc,39405
+more_itertools-8.12.0.dist-info/RECORD,,
+more_itertools-8.12.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
+more_itertools-8.12.0.dist-info/top_level.txt,sha256=fAuqRXu9LPhxdB9ujJowcFOu1rZ8wzSpOW9_jlKis6M,15
+more_itertools/__init__.py,sha256=ZQYu_9H6stSG7viUgT32TFqslqcZwq82kWRZooKiI8Y,83
+more_itertools/__init__.pyi,sha256=5B3eTzON1BBuOLob1vCflyEb2lSd6usXQQ-Cv-hXkeA,43
+more_itertools/__pycache__/__init__.cpython-38.pyc,,
+more_itertools/__pycache__/more.cpython-38.pyc,,
+more_itertools/__pycache__/recipes.cpython-38.pyc,,
+more_itertools/more.py,sha256=jSrvV9BK-XKa4x7MPPp9yWYRDtRgR5h7yryEqHMU4mg,132578
+more_itertools/more.pyi,sha256=kWOkRKx0V8ZwC1D2j0c0DUfy56dazzpmRcm5ZuY_aqo,20006
+more_itertools/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+more_itertools/recipes.py,sha256=N6aCDwoIPvE-aiqpGU-nbFwqiM3X8MKRcxBM84naW88,18410
+more_itertools/recipes.pyi,sha256=Lx3vb0p_vY7rF8MQuguvOcVaS9qd1WRL8JO_qVo7hiY,3925
diff --git a/monEnvTP/lib/python3.8/site-packages/more_itertools-8.12.0.dist-info/WHEEL b/monEnvTP/lib/python3.8/site-packages/more_itertools-8.12.0.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..5bad85fdc1cd08553756d0fb2c7be8b5ad6af7fb
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/more_itertools-8.12.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/monEnvTP/lib/python3.8/site-packages/more_itertools-8.12.0.dist-info/top_level.txt b/monEnvTP/lib/python3.8/site-packages/more_itertools-8.12.0.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..a5035befb3b2eff88c51a6d4d62142ecb10aba8b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/more_itertools-8.12.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+more_itertools
diff --git a/monEnvTP/lib/python3.8/site-packages/more_itertools/__init__.py b/monEnvTP/lib/python3.8/site-packages/more_itertools/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..ea38bef1f661e62d577b3c2207386d901d851c72
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/more_itertools/__init__.py
@@ -0,0 +1,4 @@
+from .more import * # noqa
+from .recipes import * # noqa
+
+__version__ = '8.12.0'
diff --git a/monEnvTP/lib/python3.8/site-packages/more_itertools/__init__.pyi b/monEnvTP/lib/python3.8/site-packages/more_itertools/__init__.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..96f6e36c7f4ac9ea0aebdcd9e11b8d1ff092d2ef
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/more_itertools/__init__.pyi
@@ -0,0 +1,2 @@
+from .more import *
+from .recipes import *
diff --git a/monEnvTP/lib/python3.8/site-packages/more_itertools/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/more_itertools/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e9cd9772e8987b92d67e56c6ee43b159537d6b1e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/more_itertools/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/more_itertools/__pycache__/more.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/more_itertools/__pycache__/more.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..745c61d6631f4a65f3667ee10835cf81e46f11d5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/more_itertools/__pycache__/more.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/more_itertools/__pycache__/recipes.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/more_itertools/__pycache__/recipes.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0b9fcb55d60e03a46ae82f93e38b842b4ed8f37f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/more_itertools/__pycache__/recipes.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/more_itertools/more.py b/monEnvTP/lib/python3.8/site-packages/more_itertools/more.py
new file mode 100644
index 0000000000000000000000000000000000000000..630af973f25214d1f34e09a113f69dfb1c944269
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/more_itertools/more.py
@@ -0,0 +1,4317 @@
+import warnings
+
+from collections import Counter, defaultdict, deque, abc
+from collections.abc import Sequence
+from concurrent.futures import ThreadPoolExecutor
+from functools import partial, reduce, wraps
+from heapq import merge, heapify, heapreplace, heappop
+from itertools import (
+ chain,
+ compress,
+ count,
+ cycle,
+ dropwhile,
+ groupby,
+ islice,
+ repeat,
+ starmap,
+ takewhile,
+ tee,
+ zip_longest,
+)
+from math import exp, factorial, floor, log
+from queue import Empty, Queue
+from random import random, randrange, uniform
+from operator import itemgetter, mul, sub, gt, lt, ge, le
+from sys import hexversion, maxsize
+from time import monotonic
+
+from .recipes import (
+ consume,
+ flatten,
+ pairwise,
+ powerset,
+ take,
+ unique_everseen,
+)
+
+__all__ = [
+ 'AbortThread',
+ 'SequenceView',
+ 'UnequalIterablesError',
+ 'adjacent',
+ 'all_unique',
+ 'always_iterable',
+ 'always_reversible',
+ 'bucket',
+ 'callback_iter',
+ 'chunked',
+ 'chunked_even',
+ 'circular_shifts',
+ 'collapse',
+ 'collate',
+ 'combination_index',
+ 'consecutive_groups',
+ 'consumer',
+ 'count_cycle',
+ 'countable',
+ 'difference',
+ 'distinct_combinations',
+ 'distinct_permutations',
+ 'distribute',
+ 'divide',
+ 'duplicates_everseen',
+ 'duplicates_justseen',
+ 'exactly_n',
+ 'filter_except',
+ 'first',
+ 'groupby_transform',
+ 'ichunked',
+ 'ilen',
+ 'interleave',
+ 'interleave_evenly',
+ 'interleave_longest',
+ 'intersperse',
+ 'is_sorted',
+ 'islice_extended',
+ 'iterate',
+ 'last',
+ 'locate',
+ 'lstrip',
+ 'make_decorator',
+ 'map_except',
+ 'map_if',
+ 'map_reduce',
+ 'mark_ends',
+ 'minmax',
+ 'nth_or_last',
+ 'nth_permutation',
+ 'nth_product',
+ 'numeric_range',
+ 'one',
+ 'only',
+ 'padded',
+ 'partitions',
+ 'peekable',
+ 'permutation_index',
+ 'product_index',
+ 'raise_',
+ 'repeat_each',
+ 'repeat_last',
+ 'replace',
+ 'rlocate',
+ 'rstrip',
+ 'run_length',
+ 'sample',
+ 'seekable',
+ 'set_partitions',
+ 'side_effect',
+ 'sliced',
+ 'sort_together',
+ 'split_after',
+ 'split_at',
+ 'split_before',
+ 'split_into',
+ 'split_when',
+ 'spy',
+ 'stagger',
+ 'strip',
+ 'strictly_n',
+ 'substrings',
+ 'substrings_indexes',
+ 'time_limited',
+ 'unique_in_window',
+ 'unique_to_each',
+ 'unzip',
+ 'value_chain',
+ 'windowed',
+ 'windowed_complete',
+ 'with_iter',
+ 'zip_broadcast',
+ 'zip_equal',
+ 'zip_offset',
+]
+
+
+_marker = object()
+
+
+def chunked(iterable, n, strict=False):
+ """Break *iterable* into lists of length *n*:
+
+ >>> list(chunked([1, 2, 3, 4, 5, 6], 3))
+ [[1, 2, 3], [4, 5, 6]]
+
+ By the default, the last yielded list will have fewer than *n* elements
+ if the length of *iterable* is not divisible by *n*:
+
+ >>> list(chunked([1, 2, 3, 4, 5, 6, 7, 8], 3))
+ [[1, 2, 3], [4, 5, 6], [7, 8]]
+
+ To use a fill-in value instead, see the :func:`grouper` recipe.
+
+ If the length of *iterable* is not divisible by *n* and *strict* is
+ ``True``, then ``ValueError`` will be raised before the last
+ list is yielded.
+
+ """
+ iterator = iter(partial(take, n, iter(iterable)), [])
+ if strict:
+ if n is None:
+ raise ValueError('n must not be None when using strict mode.')
+
+ def ret():
+ for chunk in iterator:
+ if len(chunk) != n:
+ raise ValueError('iterable is not divisible by n.')
+ yield chunk
+
+ return iter(ret())
+ else:
+ return iterator
+
+
+def first(iterable, default=_marker):
+ """Return the first item of *iterable*, or *default* if *iterable* is
+ empty.
+
+ >>> first([0, 1, 2, 3])
+ 0
+ >>> first([], 'some default')
+ 'some default'
+
+ If *default* is not provided and there are no items in the iterable,
+ raise ``ValueError``.
+
+ :func:`first` is useful when you have a generator of expensive-to-retrieve
+ values and want any arbitrary one. It is marginally shorter than
+ ``next(iter(iterable), default)``.
+
+ """
+ try:
+ return next(iter(iterable))
+ except StopIteration as e:
+ if default is _marker:
+ raise ValueError(
+ 'first() was called on an empty iterable, and no '
+ 'default value was provided.'
+ ) from e
+ return default
+
+
+def last(iterable, default=_marker):
+ """Return the last item of *iterable*, or *default* if *iterable* is
+ empty.
+
+ >>> last([0, 1, 2, 3])
+ 3
+ >>> last([], 'some default')
+ 'some default'
+
+ If *default* is not provided and there are no items in the iterable,
+ raise ``ValueError``.
+ """
+ try:
+ if isinstance(iterable, Sequence):
+ return iterable[-1]
+ # Work around https://bugs.python.org/issue38525
+ elif hasattr(iterable, '__reversed__') and (hexversion != 0x030800F0):
+ return next(reversed(iterable))
+ else:
+ return deque(iterable, maxlen=1)[-1]
+ except (IndexError, TypeError, StopIteration):
+ if default is _marker:
+ raise ValueError(
+ 'last() was called on an empty iterable, and no default was '
+ 'provided.'
+ )
+ return default
+
+
+def nth_or_last(iterable, n, default=_marker):
+ """Return the nth or the last item of *iterable*,
+ or *default* if *iterable* is empty.
+
+ >>> nth_or_last([0, 1, 2, 3], 2)
+ 2
+ >>> nth_or_last([0, 1], 2)
+ 1
+ >>> nth_or_last([], 0, 'some default')
+ 'some default'
+
+ If *default* is not provided and there are no items in the iterable,
+ raise ``ValueError``.
+ """
+ return last(islice(iterable, n + 1), default=default)
+
+
+class peekable:
+ """Wrap an iterator to allow lookahead and prepending elements.
+
+ Call :meth:`peek` on the result to get the value that will be returned
+ by :func:`next`. This won't advance the iterator:
+
+ >>> p = peekable(['a', 'b'])
+ >>> p.peek()
+ 'a'
+ >>> next(p)
+ 'a'
+
+ Pass :meth:`peek` a default value to return that instead of raising
+ ``StopIteration`` when the iterator is exhausted.
+
+ >>> p = peekable([])
+ >>> p.peek('hi')
+ 'hi'
+
+ peekables also offer a :meth:`prepend` method, which "inserts" items
+ at the head of the iterable:
+
+ >>> p = peekable([1, 2, 3])
+ >>> p.prepend(10, 11, 12)
+ >>> next(p)
+ 10
+ >>> p.peek()
+ 11
+ >>> list(p)
+ [11, 12, 1, 2, 3]
+
+ peekables can be indexed. Index 0 is the item that will be returned by
+ :func:`next`, index 1 is the item after that, and so on:
+ The values up to the given index will be cached.
+
+ >>> p = peekable(['a', 'b', 'c', 'd'])
+ >>> p[0]
+ 'a'
+ >>> p[1]
+ 'b'
+ >>> next(p)
+ 'a'
+
+ Negative indexes are supported, but be aware that they will cache the
+ remaining items in the source iterator, which may require significant
+ storage.
+
+ To check whether a peekable is exhausted, check its truth value:
+
+ >>> p = peekable(['a', 'b'])
+ >>> if p: # peekable has items
+ ... list(p)
+ ['a', 'b']
+ >>> if not p: # peekable is exhausted
+ ... list(p)
+ []
+
+ """
+
+ def __init__(self, iterable):
+ self._it = iter(iterable)
+ self._cache = deque()
+
+ def __iter__(self):
+ return self
+
+ def __bool__(self):
+ try:
+ self.peek()
+ except StopIteration:
+ return False
+ return True
+
+ def peek(self, default=_marker):
+ """Return the item that will be next returned from ``next()``.
+
+ Return ``default`` if there are no items left. If ``default`` is not
+ provided, raise ``StopIteration``.
+
+ """
+ if not self._cache:
+ try:
+ self._cache.append(next(self._it))
+ except StopIteration:
+ if default is _marker:
+ raise
+ return default
+ return self._cache[0]
+
+ def prepend(self, *items):
+ """Stack up items to be the next ones returned from ``next()`` or
+ ``self.peek()``. The items will be returned in
+ first in, first out order::
+
+ >>> p = peekable([1, 2, 3])
+ >>> p.prepend(10, 11, 12)
+ >>> next(p)
+ 10
+ >>> list(p)
+ [11, 12, 1, 2, 3]
+
+ It is possible, by prepending items, to "resurrect" a peekable that
+ previously raised ``StopIteration``.
+
+ >>> p = peekable([])
+ >>> next(p)
+ Traceback (most recent call last):
+ ...
+ StopIteration
+ >>> p.prepend(1)
+ >>> next(p)
+ 1
+ >>> next(p)
+ Traceback (most recent call last):
+ ...
+ StopIteration
+
+ """
+ self._cache.extendleft(reversed(items))
+
+ def __next__(self):
+ if self._cache:
+ return self._cache.popleft()
+
+ return next(self._it)
+
+ def _get_slice(self, index):
+ # Normalize the slice's arguments
+ step = 1 if (index.step is None) else index.step
+ if step > 0:
+ start = 0 if (index.start is None) else index.start
+ stop = maxsize if (index.stop is None) else index.stop
+ elif step < 0:
+ start = -1 if (index.start is None) else index.start
+ stop = (-maxsize - 1) if (index.stop is None) else index.stop
+ else:
+ raise ValueError('slice step cannot be zero')
+
+ # If either the start or stop index is negative, we'll need to cache
+ # the rest of the iterable in order to slice from the right side.
+ if (start < 0) or (stop < 0):
+ self._cache.extend(self._it)
+ # Otherwise we'll need to find the rightmost index and cache to that
+ # point.
+ else:
+ n = min(max(start, stop) + 1, maxsize)
+ cache_len = len(self._cache)
+ if n >= cache_len:
+ self._cache.extend(islice(self._it, n - cache_len))
+
+ return list(self._cache)[index]
+
+ def __getitem__(self, index):
+ if isinstance(index, slice):
+ return self._get_slice(index)
+
+ cache_len = len(self._cache)
+ if index < 0:
+ self._cache.extend(self._it)
+ elif index >= cache_len:
+ self._cache.extend(islice(self._it, index + 1 - cache_len))
+
+ return self._cache[index]
+
+
+def collate(*iterables, **kwargs):
+ """Return a sorted merge of the items from each of several already-sorted
+ *iterables*.
+
+ >>> list(collate('ACDZ', 'AZ', 'JKL'))
+ ['A', 'A', 'C', 'D', 'J', 'K', 'L', 'Z', 'Z']
+
+ Works lazily, keeping only the next value from each iterable in memory. Use
+ :func:`collate` to, for example, perform a n-way mergesort of items that
+ don't fit in memory.
+
+ If a *key* function is specified, the iterables will be sorted according
+ to its result:
+
+ >>> key = lambda s: int(s) # Sort by numeric value, not by string
+ >>> list(collate(['1', '10'], ['2', '11'], key=key))
+ ['1', '2', '10', '11']
+
+
+ If the *iterables* are sorted in descending order, set *reverse* to
+ ``True``:
+
+ >>> list(collate([5, 3, 1], [4, 2, 0], reverse=True))
+ [5, 4, 3, 2, 1, 0]
+
+ If the elements of the passed-in iterables are out of order, you might get
+ unexpected results.
+
+ On Python 3.5+, this function is an alias for :func:`heapq.merge`.
+
+ """
+ warnings.warn(
+ "collate is no longer part of more_itertools, use heapq.merge",
+ DeprecationWarning,
+ )
+ return merge(*iterables, **kwargs)
+
+
+def consumer(func):
+ """Decorator that automatically advances a PEP-342-style "reverse iterator"
+ to its first yield point so you don't have to call ``next()`` on it
+ manually.
+
+ >>> @consumer
+ ... def tally():
+ ... i = 0
+ ... while True:
+ ... print('Thing number %s is %s.' % (i, (yield)))
+ ... i += 1
+ ...
+ >>> t = tally()
+ >>> t.send('red')
+ Thing number 0 is red.
+ >>> t.send('fish')
+ Thing number 1 is fish.
+
+ Without the decorator, you would have to call ``next(t)`` before
+ ``t.send()`` could be used.
+
+ """
+
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ gen = func(*args, **kwargs)
+ next(gen)
+ return gen
+
+ return wrapper
+
+
+def ilen(iterable):
+ """Return the number of items in *iterable*.
+
+ >>> ilen(x for x in range(1000000) if x % 3 == 0)
+ 333334
+
+ This consumes the iterable, so handle with care.
+
+ """
+ # This approach was selected because benchmarks showed it's likely the
+ # fastest of the known implementations at the time of writing.
+ # See GitHub tracker: #236, #230.
+ counter = count()
+ deque(zip(iterable, counter), maxlen=0)
+ return next(counter)
+
+
+def iterate(func, start):
+ """Return ``start``, ``func(start)``, ``func(func(start))``, ...
+
+ >>> from itertools import islice
+ >>> list(islice(iterate(lambda x: 2*x, 1), 10))
+ [1, 2, 4, 8, 16, 32, 64, 128, 256, 512]
+
+ """
+ while True:
+ yield start
+ start = func(start)
+
+
+def with_iter(context_manager):
+ """Wrap an iterable in a ``with`` statement, so it closes once exhausted.
+
+ For example, this will close the file when the iterator is exhausted::
+
+ upper_lines = (line.upper() for line in with_iter(open('foo')))
+
+ Any context manager which returns an iterable is a candidate for
+ ``with_iter``.
+
+ """
+ with context_manager as iterable:
+ yield from iterable
+
+
+def one(iterable, too_short=None, too_long=None):
+ """Return the first item from *iterable*, which is expected to contain only
+ that item. Raise an exception if *iterable* is empty or has more than one
+ item.
+
+ :func:`one` is useful for ensuring that an iterable contains only one item.
+ For example, it can be used to retrieve the result of a database query
+ that is expected to return a single row.
+
+ If *iterable* is empty, ``ValueError`` will be raised. You may specify a
+ different exception with the *too_short* keyword:
+
+ >>> it = []
+ >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: too many items in iterable (expected 1)'
+ >>> too_short = IndexError('too few items')
+ >>> one(it, too_short=too_short) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ IndexError: too few items
+
+ Similarly, if *iterable* contains more than one item, ``ValueError`` will
+ be raised. You may specify a different exception with the *too_long*
+ keyword:
+
+ >>> it = ['too', 'many']
+ >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: Expected exactly one item in iterable, but got 'too',
+ 'many', and perhaps more.
+ >>> too_long = RuntimeError
+ >>> one(it, too_long=too_long) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ RuntimeError
+
+ Note that :func:`one` attempts to advance *iterable* twice to ensure there
+ is only one item. See :func:`spy` or :func:`peekable` to check iterable
+ contents less destructively.
+
+ """
+ it = iter(iterable)
+
+ try:
+ first_value = next(it)
+ except StopIteration as e:
+ raise (
+ too_short or ValueError('too few items in iterable (expected 1)')
+ ) from e
+
+ try:
+ second_value = next(it)
+ except StopIteration:
+ pass
+ else:
+ msg = (
+ 'Expected exactly one item in iterable, but got {!r}, {!r}, '
+ 'and perhaps more.'.format(first_value, second_value)
+ )
+ raise too_long or ValueError(msg)
+
+ return first_value
+
+
+def raise_(exception, *args):
+ raise exception(*args)
+
+
+def strictly_n(iterable, n, too_short=None, too_long=None):
+ """Validate that *iterable* has exactly *n* items and return them if
+ it does. If it has fewer than *n* items, call function *too_short*
+ with those items. If it has more than *n* items, call function
+ *too_long* with the first ``n + 1`` items.
+
+ >>> iterable = ['a', 'b', 'c', 'd']
+ >>> n = 4
+ >>> list(strictly_n(iterable, n))
+ ['a', 'b', 'c', 'd']
+
+ By default, *too_short* and *too_long* are functions that raise
+ ``ValueError``.
+
+ >>> list(strictly_n('ab', 3)) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: too few items in iterable (got 2)
+
+ >>> list(strictly_n('abc', 2)) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: too many items in iterable (got at least 3)
+
+ You can instead supply functions that do something else.
+ *too_short* will be called with the number of items in *iterable*.
+ *too_long* will be called with `n + 1`.
+
+ >>> def too_short(item_count):
+ ... raise RuntimeError
+ >>> it = strictly_n('abcd', 6, too_short=too_short)
+ >>> list(it) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ RuntimeError
+
+ >>> def too_long(item_count):
+ ... print('The boss is going to hear about this')
+ >>> it = strictly_n('abcdef', 4, too_long=too_long)
+ >>> list(it)
+ The boss is going to hear about this
+ ['a', 'b', 'c', 'd']
+
+ """
+ if too_short is None:
+ too_short = lambda item_count: raise_(
+ ValueError,
+ 'Too few items in iterable (got {})'.format(item_count),
+ )
+
+ if too_long is None:
+ too_long = lambda item_count: raise_(
+ ValueError,
+ 'Too many items in iterable (got at least {})'.format(item_count),
+ )
+
+ it = iter(iterable)
+ for i in range(n):
+ try:
+ item = next(it)
+ except StopIteration:
+ too_short(i)
+ return
+ else:
+ yield item
+
+ try:
+ next(it)
+ except StopIteration:
+ pass
+ else:
+ too_long(n + 1)
+
+
+def distinct_permutations(iterable, r=None):
+ """Yield successive distinct permutations of the elements in *iterable*.
+
+ >>> sorted(distinct_permutations([1, 0, 1]))
+ [(0, 1, 1), (1, 0, 1), (1, 1, 0)]
+
+ Equivalent to ``set(permutations(iterable))``, except duplicates are not
+ generated and thrown away. For larger input sequences this is much more
+ efficient.
+
+ Duplicate permutations arise when there are duplicated elements in the
+ input iterable. The number of items returned is
+ `n! / (x_1! * x_2! * ... * x_n!)`, where `n` is the total number of
+ items input, and each `x_i` is the count of a distinct item in the input
+ sequence.
+
+ If *r* is given, only the *r*-length permutations are yielded.
+
+ >>> sorted(distinct_permutations([1, 0, 1], r=2))
+ [(0, 1), (1, 0), (1, 1)]
+ >>> sorted(distinct_permutations(range(3), r=2))
+ [(0, 1), (0, 2), (1, 0), (1, 2), (2, 0), (2, 1)]
+
+ """
+ # Algorithm: https://w.wiki/Qai
+ def _full(A):
+ while True:
+ # Yield the permutation we have
+ yield tuple(A)
+
+ # Find the largest index i such that A[i] < A[i + 1]
+ for i in range(size - 2, -1, -1):
+ if A[i] < A[i + 1]:
+ break
+ # If no such index exists, this permutation is the last one
+ else:
+ return
+
+ # Find the largest index j greater than j such that A[i] < A[j]
+ for j in range(size - 1, i, -1):
+ if A[i] < A[j]:
+ break
+
+ # Swap the value of A[i] with that of A[j], then reverse the
+ # sequence from A[i + 1] to form the new permutation
+ A[i], A[j] = A[j], A[i]
+ A[i + 1 :] = A[: i - size : -1] # A[i + 1:][::-1]
+
+ # Algorithm: modified from the above
+ def _partial(A, r):
+ # Split A into the first r items and the last r items
+ head, tail = A[:r], A[r:]
+ right_head_indexes = range(r - 1, -1, -1)
+ left_tail_indexes = range(len(tail))
+
+ while True:
+ # Yield the permutation we have
+ yield tuple(head)
+
+ # Starting from the right, find the first index of the head with
+ # value smaller than the maximum value of the tail - call it i.
+ pivot = tail[-1]
+ for i in right_head_indexes:
+ if head[i] < pivot:
+ break
+ pivot = head[i]
+ else:
+ return
+
+ # Starting from the left, find the first value of the tail
+ # with a value greater than head[i] and swap.
+ for j in left_tail_indexes:
+ if tail[j] > head[i]:
+ head[i], tail[j] = tail[j], head[i]
+ break
+ # If we didn't find one, start from the right and find the first
+ # index of the head with a value greater than head[i] and swap.
+ else:
+ for j in right_head_indexes:
+ if head[j] > head[i]:
+ head[i], head[j] = head[j], head[i]
+ break
+
+ # Reverse head[i + 1:] and swap it with tail[:r - (i + 1)]
+ tail += head[: i - r : -1] # head[i + 1:][::-1]
+ i += 1
+ head[i:], tail[:] = tail[: r - i], tail[r - i :]
+
+ items = sorted(iterable)
+
+ size = len(items)
+ if r is None:
+ r = size
+
+ if 0 < r <= size:
+ return _full(items) if (r == size) else _partial(items, r)
+
+ return iter(() if r else ((),))
+
+
+def intersperse(e, iterable, n=1):
+ """Intersperse filler element *e* among the items in *iterable*, leaving
+ *n* items between each filler element.
+
+ >>> list(intersperse('!', [1, 2, 3, 4, 5]))
+ [1, '!', 2, '!', 3, '!', 4, '!', 5]
+
+ >>> list(intersperse(None, [1, 2, 3, 4, 5], n=2))
+ [1, 2, None, 3, 4, None, 5]
+
+ """
+ if n == 0:
+ raise ValueError('n must be > 0')
+ elif n == 1:
+ # interleave(repeat(e), iterable) -> e, x_0, e, x_1, e, x_2...
+ # islice(..., 1, None) -> x_0, e, x_1, e, x_2...
+ return islice(interleave(repeat(e), iterable), 1, None)
+ else:
+ # interleave(filler, chunks) -> [e], [x_0, x_1], [e], [x_2, x_3]...
+ # islice(..., 1, None) -> [x_0, x_1], [e], [x_2, x_3]...
+ # flatten(...) -> x_0, x_1, e, x_2, x_3...
+ filler = repeat([e])
+ chunks = chunked(iterable, n)
+ return flatten(islice(interleave(filler, chunks), 1, None))
+
+
+def unique_to_each(*iterables):
+ """Return the elements from each of the input iterables that aren't in the
+ other input iterables.
+
+ For example, suppose you have a set of packages, each with a set of
+ dependencies::
+
+ {'pkg_1': {'A', 'B'}, 'pkg_2': {'B', 'C'}, 'pkg_3': {'B', 'D'}}
+
+ If you remove one package, which dependencies can also be removed?
+
+ If ``pkg_1`` is removed, then ``A`` is no longer necessary - it is not
+ associated with ``pkg_2`` or ``pkg_3``. Similarly, ``C`` is only needed for
+ ``pkg_2``, and ``D`` is only needed for ``pkg_3``::
+
+ >>> unique_to_each({'A', 'B'}, {'B', 'C'}, {'B', 'D'})
+ [['A'], ['C'], ['D']]
+
+ If there are duplicates in one input iterable that aren't in the others
+ they will be duplicated in the output. Input order is preserved::
+
+ >>> unique_to_each("mississippi", "missouri")
+ [['p', 'p'], ['o', 'u', 'r']]
+
+ It is assumed that the elements of each iterable are hashable.
+
+ """
+ pool = [list(it) for it in iterables]
+ counts = Counter(chain.from_iterable(map(set, pool)))
+ uniques = {element for element in counts if counts[element] == 1}
+ return [list(filter(uniques.__contains__, it)) for it in pool]
+
+
+def windowed(seq, n, fillvalue=None, step=1):
+ """Return a sliding window of width *n* over the given iterable.
+
+ >>> all_windows = windowed([1, 2, 3, 4, 5], 3)
+ >>> list(all_windows)
+ [(1, 2, 3), (2, 3, 4), (3, 4, 5)]
+
+ When the window is larger than the iterable, *fillvalue* is used in place
+ of missing values:
+
+ >>> list(windowed([1, 2, 3], 4))
+ [(1, 2, 3, None)]
+
+ Each window will advance in increments of *step*:
+
+ >>> list(windowed([1, 2, 3, 4, 5, 6], 3, fillvalue='!', step=2))
+ [(1, 2, 3), (3, 4, 5), (5, 6, '!')]
+
+ To slide into the iterable's items, use :func:`chain` to add filler items
+ to the left:
+
+ >>> iterable = [1, 2, 3, 4]
+ >>> n = 3
+ >>> padding = [None] * (n - 1)
+ >>> list(windowed(chain(padding, iterable), 3))
+ [(None, None, 1), (None, 1, 2), (1, 2, 3), (2, 3, 4)]
+ """
+ if n < 0:
+ raise ValueError('n must be >= 0')
+ if n == 0:
+ yield tuple()
+ return
+ if step < 1:
+ raise ValueError('step must be >= 1')
+
+ window = deque(maxlen=n)
+ i = n
+ for _ in map(window.append, seq):
+ i -= 1
+ if not i:
+ i = step
+ yield tuple(window)
+
+ size = len(window)
+ if size < n:
+ yield tuple(chain(window, repeat(fillvalue, n - size)))
+ elif 0 < i < min(step, n):
+ window += (fillvalue,) * i
+ yield tuple(window)
+
+
+def substrings(iterable):
+ """Yield all of the substrings of *iterable*.
+
+ >>> [''.join(s) for s in substrings('more')]
+ ['m', 'o', 'r', 'e', 'mo', 'or', 're', 'mor', 'ore', 'more']
+
+ Note that non-string iterables can also be subdivided.
+
+ >>> list(substrings([0, 1, 2]))
+ [(0,), (1,), (2,), (0, 1), (1, 2), (0, 1, 2)]
+
+ """
+ # The length-1 substrings
+ seq = []
+ for item in iter(iterable):
+ seq.append(item)
+ yield (item,)
+ seq = tuple(seq)
+ item_count = len(seq)
+
+ # And the rest
+ for n in range(2, item_count + 1):
+ for i in range(item_count - n + 1):
+ yield seq[i : i + n]
+
+
+def substrings_indexes(seq, reverse=False):
+ """Yield all substrings and their positions in *seq*
+
+ The items yielded will be a tuple of the form ``(substr, i, j)``, where
+ ``substr == seq[i:j]``.
+
+ This function only works for iterables that support slicing, such as
+ ``str`` objects.
+
+ >>> for item in substrings_indexes('more'):
+ ... print(item)
+ ('m', 0, 1)
+ ('o', 1, 2)
+ ('r', 2, 3)
+ ('e', 3, 4)
+ ('mo', 0, 2)
+ ('or', 1, 3)
+ ('re', 2, 4)
+ ('mor', 0, 3)
+ ('ore', 1, 4)
+ ('more', 0, 4)
+
+ Set *reverse* to ``True`` to yield the same items in the opposite order.
+
+
+ """
+ r = range(1, len(seq) + 1)
+ if reverse:
+ r = reversed(r)
+ return (
+ (seq[i : i + L], i, i + L) for L in r for i in range(len(seq) - L + 1)
+ )
+
+
+class bucket:
+ """Wrap *iterable* and return an object that buckets it iterable into
+ child iterables based on a *key* function.
+
+ >>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3']
+ >>> s = bucket(iterable, key=lambda x: x[0]) # Bucket by 1st character
+ >>> sorted(list(s)) # Get the keys
+ ['a', 'b', 'c']
+ >>> a_iterable = s['a']
+ >>> next(a_iterable)
+ 'a1'
+ >>> next(a_iterable)
+ 'a2'
+ >>> list(s['b'])
+ ['b1', 'b2', 'b3']
+
+ The original iterable will be advanced and its items will be cached until
+ they are used by the child iterables. This may require significant storage.
+
+ By default, attempting to select a bucket to which no items belong will
+ exhaust the iterable and cache all values.
+ If you specify a *validator* function, selected buckets will instead be
+ checked against it.
+
+ >>> from itertools import count
+ >>> it = count(1, 2) # Infinite sequence of odd numbers
+ >>> key = lambda x: x % 10 # Bucket by last digit
+ >>> validator = lambda x: x in {1, 3, 5, 7, 9} # Odd digits only
+ >>> s = bucket(it, key=key, validator=validator)
+ >>> 2 in s
+ False
+ >>> list(s[2])
+ []
+
+ """
+
+ def __init__(self, iterable, key, validator=None):
+ self._it = iter(iterable)
+ self._key = key
+ self._cache = defaultdict(deque)
+ self._validator = validator or (lambda x: True)
+
+ def __contains__(self, value):
+ if not self._validator(value):
+ return False
+
+ try:
+ item = next(self[value])
+ except StopIteration:
+ return False
+ else:
+ self._cache[value].appendleft(item)
+
+ return True
+
+ def _get_values(self, value):
+ """
+ Helper to yield items from the parent iterator that match *value*.
+ Items that don't match are stored in the local cache as they
+ are encountered.
+ """
+ while True:
+ # If we've cached some items that match the target value, emit
+ # the first one and evict it from the cache.
+ if self._cache[value]:
+ yield self._cache[value].popleft()
+ # Otherwise we need to advance the parent iterator to search for
+ # a matching item, caching the rest.
+ else:
+ while True:
+ try:
+ item = next(self._it)
+ except StopIteration:
+ return
+ item_value = self._key(item)
+ if item_value == value:
+ yield item
+ break
+ elif self._validator(item_value):
+ self._cache[item_value].append(item)
+
+ def __iter__(self):
+ for item in self._it:
+ item_value = self._key(item)
+ if self._validator(item_value):
+ self._cache[item_value].append(item)
+
+ yield from self._cache.keys()
+
+ def __getitem__(self, value):
+ if not self._validator(value):
+ return iter(())
+
+ return self._get_values(value)
+
+
+def spy(iterable, n=1):
+ """Return a 2-tuple with a list containing the first *n* elements of
+ *iterable*, and an iterator with the same items as *iterable*.
+ This allows you to "look ahead" at the items in the iterable without
+ advancing it.
+
+ There is one item in the list by default:
+
+ >>> iterable = 'abcdefg'
+ >>> head, iterable = spy(iterable)
+ >>> head
+ ['a']
+ >>> list(iterable)
+ ['a', 'b', 'c', 'd', 'e', 'f', 'g']
+
+ You may use unpacking to retrieve items instead of lists:
+
+ >>> (head,), iterable = spy('abcdefg')
+ >>> head
+ 'a'
+ >>> (first, second), iterable = spy('abcdefg', 2)
+ >>> first
+ 'a'
+ >>> second
+ 'b'
+
+ The number of items requested can be larger than the number of items in
+ the iterable:
+
+ >>> iterable = [1, 2, 3, 4, 5]
+ >>> head, iterable = spy(iterable, 10)
+ >>> head
+ [1, 2, 3, 4, 5]
+ >>> list(iterable)
+ [1, 2, 3, 4, 5]
+
+ """
+ it = iter(iterable)
+ head = take(n, it)
+
+ return head.copy(), chain(head, it)
+
+
+def interleave(*iterables):
+ """Return a new iterable yielding from each iterable in turn,
+ until the shortest is exhausted.
+
+ >>> list(interleave([1, 2, 3], [4, 5], [6, 7, 8]))
+ [1, 4, 6, 2, 5, 7]
+
+ For a version that doesn't terminate after the shortest iterable is
+ exhausted, see :func:`interleave_longest`.
+
+ """
+ return chain.from_iterable(zip(*iterables))
+
+
+def interleave_longest(*iterables):
+ """Return a new iterable yielding from each iterable in turn,
+ skipping any that are exhausted.
+
+ >>> list(interleave_longest([1, 2, 3], [4, 5], [6, 7, 8]))
+ [1, 4, 6, 2, 5, 7, 3, 8]
+
+ This function produces the same output as :func:`roundrobin`, but may
+ perform better for some inputs (in particular when the number of iterables
+ is large).
+
+ """
+ i = chain.from_iterable(zip_longest(*iterables, fillvalue=_marker))
+ return (x for x in i if x is not _marker)
+
+
+def interleave_evenly(iterables, lengths=None):
+ """
+ Interleave multiple iterables so that their elements are evenly distributed
+ throughout the output sequence.
+
+ >>> iterables = [1, 2, 3, 4, 5], ['a', 'b']
+ >>> list(interleave_evenly(iterables))
+ [1, 2, 'a', 3, 4, 'b', 5]
+
+ >>> iterables = [[1, 2, 3], [4, 5], [6, 7, 8]]
+ >>> list(interleave_evenly(iterables))
+ [1, 6, 4, 2, 7, 3, 8, 5]
+
+ This function requires iterables of known length. Iterables without
+ ``__len__()`` can be used by manually specifying lengths with *lengths*:
+
+ >>> from itertools import combinations, repeat
+ >>> iterables = [combinations(range(4), 2), ['a', 'b', 'c']]
+ >>> lengths = [4 * (4 - 1) // 2, 3]
+ >>> list(interleave_evenly(iterables, lengths=lengths))
+ [(0, 1), (0, 2), 'a', (0, 3), (1, 2), 'b', (1, 3), (2, 3), 'c']
+
+ Based on Bresenham's algorithm.
+ """
+ if lengths is None:
+ try:
+ lengths = [len(it) for it in iterables]
+ except TypeError:
+ raise ValueError(
+ 'Iterable lengths could not be determined automatically. '
+ 'Specify them with the lengths keyword.'
+ )
+ elif len(iterables) != len(lengths):
+ raise ValueError('Mismatching number of iterables and lengths.')
+
+ dims = len(lengths)
+
+ # sort iterables by length, descending
+ lengths_permute = sorted(
+ range(dims), key=lambda i: lengths[i], reverse=True
+ )
+ lengths_desc = [lengths[i] for i in lengths_permute]
+ iters_desc = [iter(iterables[i]) for i in lengths_permute]
+
+ # the longest iterable is the primary one (Bresenham: the longest
+ # distance along an axis)
+ delta_primary, deltas_secondary = lengths_desc[0], lengths_desc[1:]
+ iter_primary, iters_secondary = iters_desc[0], iters_desc[1:]
+ errors = [delta_primary // dims] * len(deltas_secondary)
+
+ to_yield = sum(lengths)
+ while to_yield:
+ yield next(iter_primary)
+ to_yield -= 1
+ # update errors for each secondary iterable
+ errors = [e - delta for e, delta in zip(errors, deltas_secondary)]
+
+ # those iterables for which the error is negative are yielded
+ # ("diagonal step" in Bresenham)
+ for i, e in enumerate(errors):
+ if e < 0:
+ yield next(iters_secondary[i])
+ to_yield -= 1
+ errors[i] += delta_primary
+
+
+def collapse(iterable, base_type=None, levels=None):
+ """Flatten an iterable with multiple levels of nesting (e.g., a list of
+ lists of tuples) into non-iterable types.
+
+ >>> iterable = [(1, 2), ([3, 4], [[5], [6]])]
+ >>> list(collapse(iterable))
+ [1, 2, 3, 4, 5, 6]
+
+ Binary and text strings are not considered iterable and
+ will not be collapsed.
+
+ To avoid collapsing other types, specify *base_type*:
+
+ >>> iterable = ['ab', ('cd', 'ef'), ['gh', 'ij']]
+ >>> list(collapse(iterable, base_type=tuple))
+ ['ab', ('cd', 'ef'), 'gh', 'ij']
+
+ Specify *levels* to stop flattening after a certain level:
+
+ >>> iterable = [('a', ['b']), ('c', ['d'])]
+ >>> list(collapse(iterable)) # Fully flattened
+ ['a', 'b', 'c', 'd']
+ >>> list(collapse(iterable, levels=1)) # Only one level flattened
+ ['a', ['b'], 'c', ['d']]
+
+ """
+
+ def walk(node, level):
+ if (
+ ((levels is not None) and (level > levels))
+ or isinstance(node, (str, bytes))
+ or ((base_type is not None) and isinstance(node, base_type))
+ ):
+ yield node
+ return
+
+ try:
+ tree = iter(node)
+ except TypeError:
+ yield node
+ return
+ else:
+ for child in tree:
+ yield from walk(child, level + 1)
+
+ yield from walk(iterable, 0)
+
+
+def side_effect(func, iterable, chunk_size=None, before=None, after=None):
+ """Invoke *func* on each item in *iterable* (or on each *chunk_size* group
+ of items) before yielding the item.
+
+ `func` must be a function that takes a single argument. Its return value
+ will be discarded.
+
+ *before* and *after* are optional functions that take no arguments. They
+ will be executed before iteration starts and after it ends, respectively.
+
+ `side_effect` can be used for logging, updating progress bars, or anything
+ that is not functionally "pure."
+
+ Emitting a status message:
+
+ >>> from more_itertools import consume
+ >>> func = lambda item: print('Received {}'.format(item))
+ >>> consume(side_effect(func, range(2)))
+ Received 0
+ Received 1
+
+ Operating on chunks of items:
+
+ >>> pair_sums = []
+ >>> func = lambda chunk: pair_sums.append(sum(chunk))
+ >>> list(side_effect(func, [0, 1, 2, 3, 4, 5], 2))
+ [0, 1, 2, 3, 4, 5]
+ >>> list(pair_sums)
+ [1, 5, 9]
+
+ Writing to a file-like object:
+
+ >>> from io import StringIO
+ >>> from more_itertools import consume
+ >>> f = StringIO()
+ >>> func = lambda x: print(x, file=f)
+ >>> before = lambda: print(u'HEADER', file=f)
+ >>> after = f.close
+ >>> it = [u'a', u'b', u'c']
+ >>> consume(side_effect(func, it, before=before, after=after))
+ >>> f.closed
+ True
+
+ """
+ try:
+ if before is not None:
+ before()
+
+ if chunk_size is None:
+ for item in iterable:
+ func(item)
+ yield item
+ else:
+ for chunk in chunked(iterable, chunk_size):
+ func(chunk)
+ yield from chunk
+ finally:
+ if after is not None:
+ after()
+
+
+def sliced(seq, n, strict=False):
+ """Yield slices of length *n* from the sequence *seq*.
+
+ >>> list(sliced((1, 2, 3, 4, 5, 6), 3))
+ [(1, 2, 3), (4, 5, 6)]
+
+ By the default, the last yielded slice will have fewer than *n* elements
+ if the length of *seq* is not divisible by *n*:
+
+ >>> list(sliced((1, 2, 3, 4, 5, 6, 7, 8), 3))
+ [(1, 2, 3), (4, 5, 6), (7, 8)]
+
+ If the length of *seq* is not divisible by *n* and *strict* is
+ ``True``, then ``ValueError`` will be raised before the last
+ slice is yielded.
+
+ This function will only work for iterables that support slicing.
+ For non-sliceable iterables, see :func:`chunked`.
+
+ """
+ iterator = takewhile(len, (seq[i : i + n] for i in count(0, n)))
+ if strict:
+
+ def ret():
+ for _slice in iterator:
+ if len(_slice) != n:
+ raise ValueError("seq is not divisible by n.")
+ yield _slice
+
+ return iter(ret())
+ else:
+ return iterator
+
+
+def split_at(iterable, pred, maxsplit=-1, keep_separator=False):
+ """Yield lists of items from *iterable*, where each list is delimited by
+ an item where callable *pred* returns ``True``.
+
+ >>> list(split_at('abcdcba', lambda x: x == 'b'))
+ [['a'], ['c', 'd', 'c'], ['a']]
+
+ >>> list(split_at(range(10), lambda n: n % 2 == 1))
+ [[0], [2], [4], [6], [8], []]
+
+ At most *maxsplit* splits are done. If *maxsplit* is not specified or -1,
+ then there is no limit on the number of splits:
+
+ >>> list(split_at(range(10), lambda n: n % 2 == 1, maxsplit=2))
+ [[0], [2], [4, 5, 6, 7, 8, 9]]
+
+ By default, the delimiting items are not included in the output.
+ The include them, set *keep_separator* to ``True``.
+
+ >>> list(split_at('abcdcba', lambda x: x == 'b', keep_separator=True))
+ [['a'], ['b'], ['c', 'd', 'c'], ['b'], ['a']]
+
+ """
+ if maxsplit == 0:
+ yield list(iterable)
+ return
+
+ buf = []
+ it = iter(iterable)
+ for item in it:
+ if pred(item):
+ yield buf
+ if keep_separator:
+ yield [item]
+ if maxsplit == 1:
+ yield list(it)
+ return
+ buf = []
+ maxsplit -= 1
+ else:
+ buf.append(item)
+ yield buf
+
+
+def split_before(iterable, pred, maxsplit=-1):
+ """Yield lists of items from *iterable*, where each list ends just before
+ an item for which callable *pred* returns ``True``:
+
+ >>> list(split_before('OneTwo', lambda s: s.isupper()))
+ [['O', 'n', 'e'], ['T', 'w', 'o']]
+
+ >>> list(split_before(range(10), lambda n: n % 3 == 0))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]
+
+ At most *maxsplit* splits are done. If *maxsplit* is not specified or -1,
+ then there is no limit on the number of splits:
+
+ >>> list(split_before(range(10), lambda n: n % 3 == 0, maxsplit=2))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8, 9]]
+ """
+ if maxsplit == 0:
+ yield list(iterable)
+ return
+
+ buf = []
+ it = iter(iterable)
+ for item in it:
+ if pred(item) and buf:
+ yield buf
+ if maxsplit == 1:
+ yield [item] + list(it)
+ return
+ buf = []
+ maxsplit -= 1
+ buf.append(item)
+ if buf:
+ yield buf
+
+
+def split_after(iterable, pred, maxsplit=-1):
+ """Yield lists of items from *iterable*, where each list ends with an
+ item where callable *pred* returns ``True``:
+
+ >>> list(split_after('one1two2', lambda s: s.isdigit()))
+ [['o', 'n', 'e', '1'], ['t', 'w', 'o', '2']]
+
+ >>> list(split_after(range(10), lambda n: n % 3 == 0))
+ [[0], [1, 2, 3], [4, 5, 6], [7, 8, 9]]
+
+ At most *maxsplit* splits are done. If *maxsplit* is not specified or -1,
+ then there is no limit on the number of splits:
+
+ >>> list(split_after(range(10), lambda n: n % 3 == 0, maxsplit=2))
+ [[0], [1, 2, 3], [4, 5, 6, 7, 8, 9]]
+
+ """
+ if maxsplit == 0:
+ yield list(iterable)
+ return
+
+ buf = []
+ it = iter(iterable)
+ for item in it:
+ buf.append(item)
+ if pred(item) and buf:
+ yield buf
+ if maxsplit == 1:
+ yield list(it)
+ return
+ buf = []
+ maxsplit -= 1
+ if buf:
+ yield buf
+
+
+def split_when(iterable, pred, maxsplit=-1):
+ """Split *iterable* into pieces based on the output of *pred*.
+ *pred* should be a function that takes successive pairs of items and
+ returns ``True`` if the iterable should be split in between them.
+
+ For example, to find runs of increasing numbers, split the iterable when
+ element ``i`` is larger than element ``i + 1``:
+
+ >>> list(split_when([1, 2, 3, 3, 2, 5, 2, 4, 2], lambda x, y: x > y))
+ [[1, 2, 3, 3], [2, 5], [2, 4], [2]]
+
+ At most *maxsplit* splits are done. If *maxsplit* is not specified or -1,
+ then there is no limit on the number of splits:
+
+ >>> list(split_when([1, 2, 3, 3, 2, 5, 2, 4, 2],
+ ... lambda x, y: x > y, maxsplit=2))
+ [[1, 2, 3, 3], [2, 5], [2, 4, 2]]
+
+ """
+ if maxsplit == 0:
+ yield list(iterable)
+ return
+
+ it = iter(iterable)
+ try:
+ cur_item = next(it)
+ except StopIteration:
+ return
+
+ buf = [cur_item]
+ for next_item in it:
+ if pred(cur_item, next_item):
+ yield buf
+ if maxsplit == 1:
+ yield [next_item] + list(it)
+ return
+ buf = []
+ maxsplit -= 1
+
+ buf.append(next_item)
+ cur_item = next_item
+
+ yield buf
+
+
+def split_into(iterable, sizes):
+ """Yield a list of sequential items from *iterable* of length 'n' for each
+ integer 'n' in *sizes*.
+
+ >>> list(split_into([1,2,3,4,5,6], [1,2,3]))
+ [[1], [2, 3], [4, 5, 6]]
+
+ If the sum of *sizes* is smaller than the length of *iterable*, then the
+ remaining items of *iterable* will not be returned.
+
+ >>> list(split_into([1,2,3,4,5,6], [2,3]))
+ [[1, 2], [3, 4, 5]]
+
+ If the sum of *sizes* is larger than the length of *iterable*, fewer items
+ will be returned in the iteration that overruns *iterable* and further
+ lists will be empty:
+
+ >>> list(split_into([1,2,3,4], [1,2,3,4]))
+ [[1], [2, 3], [4], []]
+
+ When a ``None`` object is encountered in *sizes*, the returned list will
+ contain items up to the end of *iterable* the same way that itertools.slice
+ does:
+
+ >>> list(split_into([1,2,3,4,5,6,7,8,9,0], [2,3,None]))
+ [[1, 2], [3, 4, 5], [6, 7, 8, 9, 0]]
+
+ :func:`split_into` can be useful for grouping a series of items where the
+ sizes of the groups are not uniform. An example would be where in a row
+ from a table, multiple columns represent elements of the same feature
+ (e.g. a point represented by x,y,z) but, the format is not the same for
+ all columns.
+ """
+ # convert the iterable argument into an iterator so its contents can
+ # be consumed by islice in case it is a generator
+ it = iter(iterable)
+
+ for size in sizes:
+ if size is None:
+ yield list(it)
+ return
+ else:
+ yield list(islice(it, size))
+
+
+def padded(iterable, fillvalue=None, n=None, next_multiple=False):
+ """Yield the elements from *iterable*, followed by *fillvalue*, such that
+ at least *n* items are emitted.
+
+ >>> list(padded([1, 2, 3], '?', 5))
+ [1, 2, 3, '?', '?']
+
+ If *next_multiple* is ``True``, *fillvalue* will be emitted until the
+ number of items emitted is a multiple of *n*::
+
+ >>> list(padded([1, 2, 3, 4], n=3, next_multiple=True))
+ [1, 2, 3, 4, None, None]
+
+ If *n* is ``None``, *fillvalue* will be emitted indefinitely.
+
+ """
+ it = iter(iterable)
+ if n is None:
+ yield from chain(it, repeat(fillvalue))
+ elif n < 1:
+ raise ValueError('n must be at least 1')
+ else:
+ item_count = 0
+ for item in it:
+ yield item
+ item_count += 1
+
+ remaining = (n - item_count) % n if next_multiple else n - item_count
+ for _ in range(remaining):
+ yield fillvalue
+
+
+def repeat_each(iterable, n=2):
+ """Repeat each element in *iterable* *n* times.
+
+ >>> list(repeat_each('ABC', 3))
+ ['A', 'A', 'A', 'B', 'B', 'B', 'C', 'C', 'C']
+ """
+ return chain.from_iterable(map(repeat, iterable, repeat(n)))
+
+
+def repeat_last(iterable, default=None):
+ """After the *iterable* is exhausted, keep yielding its last element.
+
+ >>> list(islice(repeat_last(range(3)), 5))
+ [0, 1, 2, 2, 2]
+
+ If the iterable is empty, yield *default* forever::
+
+ >>> list(islice(repeat_last(range(0), 42), 5))
+ [42, 42, 42, 42, 42]
+
+ """
+ item = _marker
+ for item in iterable:
+ yield item
+ final = default if item is _marker else item
+ yield from repeat(final)
+
+
+def distribute(n, iterable):
+ """Distribute the items from *iterable* among *n* smaller iterables.
+
+ >>> group_1, group_2 = distribute(2, [1, 2, 3, 4, 5, 6])
+ >>> list(group_1)
+ [1, 3, 5]
+ >>> list(group_2)
+ [2, 4, 6]
+
+ If the length of *iterable* is not evenly divisible by *n*, then the
+ length of the returned iterables will not be identical:
+
+ >>> children = distribute(3, [1, 2, 3, 4, 5, 6, 7])
+ >>> [list(c) for c in children]
+ [[1, 4, 7], [2, 5], [3, 6]]
+
+ If the length of *iterable* is smaller than *n*, then the last returned
+ iterables will be empty:
+
+ >>> children = distribute(5, [1, 2, 3])
+ >>> [list(c) for c in children]
+ [[1], [2], [3], [], []]
+
+ This function uses :func:`itertools.tee` and may require significant
+ storage. If you need the order items in the smaller iterables to match the
+ original iterable, see :func:`divide`.
+
+ """
+ if n < 1:
+ raise ValueError('n must be at least 1')
+
+ children = tee(iterable, n)
+ return [islice(it, index, None, n) for index, it in enumerate(children)]
+
+
+def stagger(iterable, offsets=(-1, 0, 1), longest=False, fillvalue=None):
+ """Yield tuples whose elements are offset from *iterable*.
+ The amount by which the `i`-th item in each tuple is offset is given by
+ the `i`-th item in *offsets*.
+
+ >>> list(stagger([0, 1, 2, 3]))
+ [(None, 0, 1), (0, 1, 2), (1, 2, 3)]
+ >>> list(stagger(range(8), offsets=(0, 2, 4)))
+ [(0, 2, 4), (1, 3, 5), (2, 4, 6), (3, 5, 7)]
+
+ By default, the sequence will end when the final element of a tuple is the
+ last item in the iterable. To continue until the first element of a tuple
+ is the last item in the iterable, set *longest* to ``True``::
+
+ >>> list(stagger([0, 1, 2, 3], longest=True))
+ [(None, 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, None), (3, None, None)]
+
+ By default, ``None`` will be used to replace offsets beyond the end of the
+ sequence. Specify *fillvalue* to use some other value.
+
+ """
+ children = tee(iterable, len(offsets))
+
+ return zip_offset(
+ *children, offsets=offsets, longest=longest, fillvalue=fillvalue
+ )
+
+
+class UnequalIterablesError(ValueError):
+ def __init__(self, details=None):
+ msg = 'Iterables have different lengths'
+ if details is not None:
+ msg += (': index 0 has length {}; index {} has length {}').format(
+ *details
+ )
+
+ super().__init__(msg)
+
+
+def _zip_equal_generator(iterables):
+ for combo in zip_longest(*iterables, fillvalue=_marker):
+ for val in combo:
+ if val is _marker:
+ raise UnequalIterablesError()
+ yield combo
+
+
+def _zip_equal(*iterables):
+ # Check whether the iterables are all the same size.
+ try:
+ first_size = len(iterables[0])
+ for i, it in enumerate(iterables[1:], 1):
+ size = len(it)
+ if size != first_size:
+ break
+ else:
+ # If we didn't break out, we can use the built-in zip.
+ return zip(*iterables)
+
+ # If we did break out, there was a mismatch.
+ raise UnequalIterablesError(details=(first_size, i, size))
+ # If any one of the iterables didn't have a length, start reading
+ # them until one runs out.
+ except TypeError:
+ return _zip_equal_generator(iterables)
+
+
+def zip_equal(*iterables):
+ """``zip`` the input *iterables* together, but raise
+ ``UnequalIterablesError`` if they aren't all the same length.
+
+ >>> it_1 = range(3)
+ >>> it_2 = iter('abc')
+ >>> list(zip_equal(it_1, it_2))
+ [(0, 'a'), (1, 'b'), (2, 'c')]
+
+ >>> it_1 = range(3)
+ >>> it_2 = iter('abcd')
+ >>> list(zip_equal(it_1, it_2)) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ more_itertools.more.UnequalIterablesError: Iterables have different
+ lengths
+
+ """
+ if hexversion >= 0x30A00A6:
+ warnings.warn(
+ (
+ 'zip_equal will be removed in a future version of '
+ 'more-itertools. Use the builtin zip function with '
+ 'strict=True instead.'
+ ),
+ DeprecationWarning,
+ )
+
+ return _zip_equal(*iterables)
+
+
+def zip_offset(*iterables, offsets, longest=False, fillvalue=None):
+ """``zip`` the input *iterables* together, but offset the `i`-th iterable
+ by the `i`-th item in *offsets*.
+
+ >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1)))
+ [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e')]
+
+ This can be used as a lightweight alternative to SciPy or pandas to analyze
+ data sets in which some series have a lead or lag relationship.
+
+ By default, the sequence will end when the shortest iterable is exhausted.
+ To continue until the longest iterable is exhausted, set *longest* to
+ ``True``.
+
+ >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1), longest=True))
+ [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e'), (None, 'f')]
+
+ By default, ``None`` will be used to replace offsets beyond the end of the
+ sequence. Specify *fillvalue* to use some other value.
+
+ """
+ if len(iterables) != len(offsets):
+ raise ValueError("Number of iterables and offsets didn't match")
+
+ staggered = []
+ for it, n in zip(iterables, offsets):
+ if n < 0:
+ staggered.append(chain(repeat(fillvalue, -n), it))
+ elif n > 0:
+ staggered.append(islice(it, n, None))
+ else:
+ staggered.append(it)
+
+ if longest:
+ return zip_longest(*staggered, fillvalue=fillvalue)
+
+ return zip(*staggered)
+
+
+def sort_together(iterables, key_list=(0,), key=None, reverse=False):
+ """Return the input iterables sorted together, with *key_list* as the
+ priority for sorting. All iterables are trimmed to the length of the
+ shortest one.
+
+ This can be used like the sorting function in a spreadsheet. If each
+ iterable represents a column of data, the key list determines which
+ columns are used for sorting.
+
+ By default, all iterables are sorted using the ``0``-th iterable::
+
+ >>> iterables = [(4, 3, 2, 1), ('a', 'b', 'c', 'd')]
+ >>> sort_together(iterables)
+ [(1, 2, 3, 4), ('d', 'c', 'b', 'a')]
+
+ Set a different key list to sort according to another iterable.
+ Specifying multiple keys dictates how ties are broken::
+
+ >>> iterables = [(3, 1, 2), (0, 1, 0), ('c', 'b', 'a')]
+ >>> sort_together(iterables, key_list=(1, 2))
+ [(2, 3, 1), (0, 0, 1), ('a', 'c', 'b')]
+
+ To sort by a function of the elements of the iterable, pass a *key*
+ function. Its arguments are the elements of the iterables corresponding to
+ the key list::
+
+ >>> names = ('a', 'b', 'c')
+ >>> lengths = (1, 2, 3)
+ >>> widths = (5, 2, 1)
+ >>> def area(length, width):
+ ... return length * width
+ >>> sort_together([names, lengths, widths], key_list=(1, 2), key=area)
+ [('c', 'b', 'a'), (3, 2, 1), (1, 2, 5)]
+
+ Set *reverse* to ``True`` to sort in descending order.
+
+ >>> sort_together([(1, 2, 3), ('c', 'b', 'a')], reverse=True)
+ [(3, 2, 1), ('a', 'b', 'c')]
+
+ """
+ if key is None:
+ # if there is no key function, the key argument to sorted is an
+ # itemgetter
+ key_argument = itemgetter(*key_list)
+ else:
+ # if there is a key function, call it with the items at the offsets
+ # specified by the key function as arguments
+ key_list = list(key_list)
+ if len(key_list) == 1:
+ # if key_list contains a single item, pass the item at that offset
+ # as the only argument to the key function
+ key_offset = key_list[0]
+ key_argument = lambda zipped_items: key(zipped_items[key_offset])
+ else:
+ # if key_list contains multiple items, use itemgetter to return a
+ # tuple of items, which we pass as *args to the key function
+ get_key_items = itemgetter(*key_list)
+ key_argument = lambda zipped_items: key(
+ *get_key_items(zipped_items)
+ )
+
+ return list(
+ zip(*sorted(zip(*iterables), key=key_argument, reverse=reverse))
+ )
+
+
+def unzip(iterable):
+ """The inverse of :func:`zip`, this function disaggregates the elements
+ of the zipped *iterable*.
+
+ The ``i``-th iterable contains the ``i``-th element from each element
+ of the zipped iterable. The first element is used to to determine the
+ length of the remaining elements.
+
+ >>> iterable = [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
+ >>> letters, numbers = unzip(iterable)
+ >>> list(letters)
+ ['a', 'b', 'c', 'd']
+ >>> list(numbers)
+ [1, 2, 3, 4]
+
+ This is similar to using ``zip(*iterable)``, but it avoids reading
+ *iterable* into memory. Note, however, that this function uses
+ :func:`itertools.tee` and thus may require significant storage.
+
+ """
+ head, iterable = spy(iter(iterable))
+ if not head:
+ # empty iterable, e.g. zip([], [], [])
+ return ()
+ # spy returns a one-length iterable as head
+ head = head[0]
+ iterables = tee(iterable, len(head))
+
+ def itemgetter(i):
+ def getter(obj):
+ try:
+ return obj[i]
+ except IndexError:
+ # basically if we have an iterable like
+ # iter([(1, 2, 3), (4, 5), (6,)])
+ # the second unzipped iterable would fail at the third tuple
+ # since it would try to access tup[1]
+ # same with the third unzipped iterable and the second tuple
+ # to support these "improperly zipped" iterables,
+ # we create a custom itemgetter
+ # which just stops the unzipped iterables
+ # at first length mismatch
+ raise StopIteration
+
+ return getter
+
+ return tuple(map(itemgetter(i), it) for i, it in enumerate(iterables))
+
+
+def divide(n, iterable):
+ """Divide the elements from *iterable* into *n* parts, maintaining
+ order.
+
+ >>> group_1, group_2 = divide(2, [1, 2, 3, 4, 5, 6])
+ >>> list(group_1)
+ [1, 2, 3]
+ >>> list(group_2)
+ [4, 5, 6]
+
+ If the length of *iterable* is not evenly divisible by *n*, then the
+ length of the returned iterables will not be identical:
+
+ >>> children = divide(3, [1, 2, 3, 4, 5, 6, 7])
+ >>> [list(c) for c in children]
+ [[1, 2, 3], [4, 5], [6, 7]]
+
+ If the length of the iterable is smaller than n, then the last returned
+ iterables will be empty:
+
+ >>> children = divide(5, [1, 2, 3])
+ >>> [list(c) for c in children]
+ [[1], [2], [3], [], []]
+
+ This function will exhaust the iterable before returning and may require
+ significant storage. If order is not important, see :func:`distribute`,
+ which does not first pull the iterable into memory.
+
+ """
+ if n < 1:
+ raise ValueError('n must be at least 1')
+
+ try:
+ iterable[:0]
+ except TypeError:
+ seq = tuple(iterable)
+ else:
+ seq = iterable
+
+ q, r = divmod(len(seq), n)
+
+ ret = []
+ stop = 0
+ for i in range(1, n + 1):
+ start = stop
+ stop += q + 1 if i <= r else q
+ ret.append(iter(seq[start:stop]))
+
+ return ret
+
+
+def always_iterable(obj, base_type=(str, bytes)):
+ """If *obj* is iterable, return an iterator over its items::
+
+ >>> obj = (1, 2, 3)
+ >>> list(always_iterable(obj))
+ [1, 2, 3]
+
+ If *obj* is not iterable, return a one-item iterable containing *obj*::
+
+ >>> obj = 1
+ >>> list(always_iterable(obj))
+ [1]
+
+ If *obj* is ``None``, return an empty iterable:
+
+ >>> obj = None
+ >>> list(always_iterable(None))
+ []
+
+ By default, binary and text strings are not considered iterable::
+
+ >>> obj = 'foo'
+ >>> list(always_iterable(obj))
+ ['foo']
+
+ If *base_type* is set, objects for which ``isinstance(obj, base_type)``
+ returns ``True`` won't be considered iterable.
+
+ >>> obj = {'a': 1}
+ >>> list(always_iterable(obj)) # Iterate over the dict's keys
+ ['a']
+ >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit
+ [{'a': 1}]
+
+ Set *base_type* to ``None`` to avoid any special handling and treat objects
+ Python considers iterable as iterable:
+
+ >>> obj = 'foo'
+ >>> list(always_iterable(obj, base_type=None))
+ ['f', 'o', 'o']
+ """
+ if obj is None:
+ return iter(())
+
+ if (base_type is not None) and isinstance(obj, base_type):
+ return iter((obj,))
+
+ try:
+ return iter(obj)
+ except TypeError:
+ return iter((obj,))
+
+
+def adjacent(predicate, iterable, distance=1):
+ """Return an iterable over `(bool, item)` tuples where the `item` is
+ drawn from *iterable* and the `bool` indicates whether
+ that item satisfies the *predicate* or is adjacent to an item that does.
+
+ For example, to find whether items are adjacent to a ``3``::
+
+ >>> list(adjacent(lambda x: x == 3, range(6)))
+ [(False, 0), (False, 1), (True, 2), (True, 3), (True, 4), (False, 5)]
+
+ Set *distance* to change what counts as adjacent. For example, to find
+ whether items are two places away from a ``3``:
+
+ >>> list(adjacent(lambda x: x == 3, range(6), distance=2))
+ [(False, 0), (True, 1), (True, 2), (True, 3), (True, 4), (True, 5)]
+
+ This is useful for contextualizing the results of a search function.
+ For example, a code comparison tool might want to identify lines that
+ have changed, but also surrounding lines to give the viewer of the diff
+ context.
+
+ The predicate function will only be called once for each item in the
+ iterable.
+
+ See also :func:`groupby_transform`, which can be used with this function
+ to group ranges of items with the same `bool` value.
+
+ """
+ # Allow distance=0 mainly for testing that it reproduces results with map()
+ if distance < 0:
+ raise ValueError('distance must be at least 0')
+
+ i1, i2 = tee(iterable)
+ padding = [False] * distance
+ selected = chain(padding, map(predicate, i1), padding)
+ adjacent_to_selected = map(any, windowed(selected, 2 * distance + 1))
+ return zip(adjacent_to_selected, i2)
+
+
+def groupby_transform(iterable, keyfunc=None, valuefunc=None, reducefunc=None):
+ """An extension of :func:`itertools.groupby` that can apply transformations
+ to the grouped data.
+
+ * *keyfunc* is a function computing a key value for each item in *iterable*
+ * *valuefunc* is a function that transforms the individual items from
+ *iterable* after grouping
+ * *reducefunc* is a function that transforms each group of items
+
+ >>> iterable = 'aAAbBBcCC'
+ >>> keyfunc = lambda k: k.upper()
+ >>> valuefunc = lambda v: v.lower()
+ >>> reducefunc = lambda g: ''.join(g)
+ >>> list(groupby_transform(iterable, keyfunc, valuefunc, reducefunc))
+ [('A', 'aaa'), ('B', 'bbb'), ('C', 'ccc')]
+
+ Each optional argument defaults to an identity function if not specified.
+
+ :func:`groupby_transform` is useful when grouping elements of an iterable
+ using a separate iterable as the key. To do this, :func:`zip` the iterables
+ and pass a *keyfunc* that extracts the first element and a *valuefunc*
+ that extracts the second element::
+
+ >>> from operator import itemgetter
+ >>> keys = [0, 0, 1, 1, 1, 2, 2, 2, 3]
+ >>> values = 'abcdefghi'
+ >>> iterable = zip(keys, values)
+ >>> grouper = groupby_transform(iterable, itemgetter(0), itemgetter(1))
+ >>> [(k, ''.join(g)) for k, g in grouper]
+ [(0, 'ab'), (1, 'cde'), (2, 'fgh'), (3, 'i')]
+
+ Note that the order of items in the iterable is significant.
+ Only adjacent items are grouped together, so if you don't want any
+ duplicate groups, you should sort the iterable by the key function.
+
+ """
+ ret = groupby(iterable, keyfunc)
+ if valuefunc:
+ ret = ((k, map(valuefunc, g)) for k, g in ret)
+ if reducefunc:
+ ret = ((k, reducefunc(g)) for k, g in ret)
+
+ return ret
+
+
+class numeric_range(abc.Sequence, abc.Hashable):
+ """An extension of the built-in ``range()`` function whose arguments can
+ be any orderable numeric type.
+
+ With only *stop* specified, *start* defaults to ``0`` and *step*
+ defaults to ``1``. The output items will match the type of *stop*:
+
+ >>> list(numeric_range(3.5))
+ [0.0, 1.0, 2.0, 3.0]
+
+ With only *start* and *stop* specified, *step* defaults to ``1``. The
+ output items will match the type of *start*:
+
+ >>> from decimal import Decimal
+ >>> start = Decimal('2.1')
+ >>> stop = Decimal('5.1')
+ >>> list(numeric_range(start, stop))
+ [Decimal('2.1'), Decimal('3.1'), Decimal('4.1')]
+
+ With *start*, *stop*, and *step* specified the output items will match
+ the type of ``start + step``:
+
+ >>> from fractions import Fraction
+ >>> start = Fraction(1, 2) # Start at 1/2
+ >>> stop = Fraction(5, 2) # End at 5/2
+ >>> step = Fraction(1, 2) # Count by 1/2
+ >>> list(numeric_range(start, stop, step))
+ [Fraction(1, 2), Fraction(1, 1), Fraction(3, 2), Fraction(2, 1)]
+
+ If *step* is zero, ``ValueError`` is raised. Negative steps are supported:
+
+ >>> list(numeric_range(3, -1, -1.0))
+ [3.0, 2.0, 1.0, 0.0]
+
+ Be aware of the limitations of floating point numbers; the representation
+ of the yielded numbers may be surprising.
+
+ ``datetime.datetime`` objects can be used for *start* and *stop*, if *step*
+ is a ``datetime.timedelta`` object:
+
+ >>> import datetime
+ >>> start = datetime.datetime(2019, 1, 1)
+ >>> stop = datetime.datetime(2019, 1, 3)
+ >>> step = datetime.timedelta(days=1)
+ >>> items = iter(numeric_range(start, stop, step))
+ >>> next(items)
+ datetime.datetime(2019, 1, 1, 0, 0)
+ >>> next(items)
+ datetime.datetime(2019, 1, 2, 0, 0)
+
+ """
+
+ _EMPTY_HASH = hash(range(0, 0))
+
+ def __init__(self, *args):
+ argc = len(args)
+ if argc == 1:
+ (self._stop,) = args
+ self._start = type(self._stop)(0)
+ self._step = type(self._stop - self._start)(1)
+ elif argc == 2:
+ self._start, self._stop = args
+ self._step = type(self._stop - self._start)(1)
+ elif argc == 3:
+ self._start, self._stop, self._step = args
+ elif argc == 0:
+ raise TypeError(
+ 'numeric_range expected at least '
+ '1 argument, got {}'.format(argc)
+ )
+ else:
+ raise TypeError(
+ 'numeric_range expected at most '
+ '3 arguments, got {}'.format(argc)
+ )
+
+ self._zero = type(self._step)(0)
+ if self._step == self._zero:
+ raise ValueError('numeric_range() arg 3 must not be zero')
+ self._growing = self._step > self._zero
+ self._init_len()
+
+ def __bool__(self):
+ if self._growing:
+ return self._start < self._stop
+ else:
+ return self._start > self._stop
+
+ def __contains__(self, elem):
+ if self._growing:
+ if self._start <= elem < self._stop:
+ return (elem - self._start) % self._step == self._zero
+ else:
+ if self._start >= elem > self._stop:
+ return (self._start - elem) % (-self._step) == self._zero
+
+ return False
+
+ def __eq__(self, other):
+ if isinstance(other, numeric_range):
+ empty_self = not bool(self)
+ empty_other = not bool(other)
+ if empty_self or empty_other:
+ return empty_self and empty_other # True if both empty
+ else:
+ return (
+ self._start == other._start
+ and self._step == other._step
+ and self._get_by_index(-1) == other._get_by_index(-1)
+ )
+ else:
+ return False
+
+ def __getitem__(self, key):
+ if isinstance(key, int):
+ return self._get_by_index(key)
+ elif isinstance(key, slice):
+ step = self._step if key.step is None else key.step * self._step
+
+ if key.start is None or key.start <= -self._len:
+ start = self._start
+ elif key.start >= self._len:
+ start = self._stop
+ else: # -self._len < key.start < self._len
+ start = self._get_by_index(key.start)
+
+ if key.stop is None or key.stop >= self._len:
+ stop = self._stop
+ elif key.stop <= -self._len:
+ stop = self._start
+ else: # -self._len < key.stop < self._len
+ stop = self._get_by_index(key.stop)
+
+ return numeric_range(start, stop, step)
+ else:
+ raise TypeError(
+ 'numeric range indices must be '
+ 'integers or slices, not {}'.format(type(key).__name__)
+ )
+
+ def __hash__(self):
+ if self:
+ return hash((self._start, self._get_by_index(-1), self._step))
+ else:
+ return self._EMPTY_HASH
+
+ def __iter__(self):
+ values = (self._start + (n * self._step) for n in count())
+ if self._growing:
+ return takewhile(partial(gt, self._stop), values)
+ else:
+ return takewhile(partial(lt, self._stop), values)
+
+ def __len__(self):
+ return self._len
+
+ def _init_len(self):
+ if self._growing:
+ start = self._start
+ stop = self._stop
+ step = self._step
+ else:
+ start = self._stop
+ stop = self._start
+ step = -self._step
+ distance = stop - start
+ if distance <= self._zero:
+ self._len = 0
+ else: # distance > 0 and step > 0: regular euclidean division
+ q, r = divmod(distance, step)
+ self._len = int(q) + int(r != self._zero)
+
+ def __reduce__(self):
+ return numeric_range, (self._start, self._stop, self._step)
+
+ def __repr__(self):
+ if self._step == 1:
+ return "numeric_range({}, {})".format(
+ repr(self._start), repr(self._stop)
+ )
+ else:
+ return "numeric_range({}, {}, {})".format(
+ repr(self._start), repr(self._stop), repr(self._step)
+ )
+
+ def __reversed__(self):
+ return iter(
+ numeric_range(
+ self._get_by_index(-1), self._start - self._step, -self._step
+ )
+ )
+
+ def count(self, value):
+ return int(value in self)
+
+ def index(self, value):
+ if self._growing:
+ if self._start <= value < self._stop:
+ q, r = divmod(value - self._start, self._step)
+ if r == self._zero:
+ return int(q)
+ else:
+ if self._start >= value > self._stop:
+ q, r = divmod(self._start - value, -self._step)
+ if r == self._zero:
+ return int(q)
+
+ raise ValueError("{} is not in numeric range".format(value))
+
+ def _get_by_index(self, i):
+ if i < 0:
+ i += self._len
+ if i < 0 or i >= self._len:
+ raise IndexError("numeric range object index out of range")
+ return self._start + i * self._step
+
+
+def count_cycle(iterable, n=None):
+ """Cycle through the items from *iterable* up to *n* times, yielding
+ the number of completed cycles along with each item. If *n* is omitted the
+ process repeats indefinitely.
+
+ >>> list(count_cycle('AB', 3))
+ [(0, 'A'), (0, 'B'), (1, 'A'), (1, 'B'), (2, 'A'), (2, 'B')]
+
+ """
+ iterable = tuple(iterable)
+ if not iterable:
+ return iter(())
+ counter = count() if n is None else range(n)
+ return ((i, item) for i in counter for item in iterable)
+
+
+def mark_ends(iterable):
+ """Yield 3-tuples of the form ``(is_first, is_last, item)``.
+
+ >>> list(mark_ends('ABC'))
+ [(True, False, 'A'), (False, False, 'B'), (False, True, 'C')]
+
+ Use this when looping over an iterable to take special action on its first
+ and/or last items:
+
+ >>> iterable = ['Header', 100, 200, 'Footer']
+ >>> total = 0
+ >>> for is_first, is_last, item in mark_ends(iterable):
+ ... if is_first:
+ ... continue # Skip the header
+ ... if is_last:
+ ... continue # Skip the footer
+ ... total += item
+ >>> print(total)
+ 300
+ """
+ it = iter(iterable)
+
+ try:
+ b = next(it)
+ except StopIteration:
+ return
+
+ try:
+ for i in count():
+ a = b
+ b = next(it)
+ yield i == 0, False, a
+
+ except StopIteration:
+ yield i == 0, True, a
+
+
+def locate(iterable, pred=bool, window_size=None):
+ """Yield the index of each item in *iterable* for which *pred* returns
+ ``True``.
+
+ *pred* defaults to :func:`bool`, which will select truthy items:
+
+ >>> list(locate([0, 1, 1, 0, 1, 0, 0]))
+ [1, 2, 4]
+
+ Set *pred* to a custom function to, e.g., find the indexes for a particular
+ item.
+
+ >>> list(locate(['a', 'b', 'c', 'b'], lambda x: x == 'b'))
+ [1, 3]
+
+ If *window_size* is given, then the *pred* function will be called with
+ that many items. This enables searching for sub-sequences:
+
+ >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3]
+ >>> pred = lambda *args: args == (1, 2, 3)
+ >>> list(locate(iterable, pred=pred, window_size=3))
+ [1, 5, 9]
+
+ Use with :func:`seekable` to find indexes and then retrieve the associated
+ items:
+
+ >>> from itertools import count
+ >>> from more_itertools import seekable
+ >>> source = (3 * n + 1 if (n % 2) else n // 2 for n in count())
+ >>> it = seekable(source)
+ >>> pred = lambda x: x > 100
+ >>> indexes = locate(it, pred=pred)
+ >>> i = next(indexes)
+ >>> it.seek(i)
+ >>> next(it)
+ 106
+
+ """
+ if window_size is None:
+ return compress(count(), map(pred, iterable))
+
+ if window_size < 1:
+ raise ValueError('window size must be at least 1')
+
+ it = windowed(iterable, window_size, fillvalue=_marker)
+ return compress(count(), starmap(pred, it))
+
+
+def lstrip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the beginning
+ for which *pred* returns ``True``.
+
+ For example, to remove a set of items from the start of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(lstrip(iterable, pred))
+ [1, 2, None, 3, False, None]
+
+ This function is analogous to to :func:`str.lstrip`, and is essentially
+ an wrapper for :func:`itertools.dropwhile`.
+
+ """
+ return dropwhile(pred, iterable)
+
+
+def rstrip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the end
+ for which *pred* returns ``True``.
+
+ For example, to remove a set of items from the end of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(rstrip(iterable, pred))
+ [None, False, None, 1, 2, None, 3]
+
+ This function is analogous to :func:`str.rstrip`.
+
+ """
+ cache = []
+ cache_append = cache.append
+ cache_clear = cache.clear
+ for x in iterable:
+ if pred(x):
+ cache_append(x)
+ else:
+ yield from cache
+ cache_clear()
+ yield x
+
+
+def strip(iterable, pred):
+ """Yield the items from *iterable*, but strip any from the
+ beginning and end for which *pred* returns ``True``.
+
+ For example, to remove a set of items from both ends of an iterable:
+
+ >>> iterable = (None, False, None, 1, 2, None, 3, False, None)
+ >>> pred = lambda x: x in {None, False, ''}
+ >>> list(strip(iterable, pred))
+ [1, 2, None, 3]
+
+ This function is analogous to :func:`str.strip`.
+
+ """
+ return rstrip(lstrip(iterable, pred), pred)
+
+
+class islice_extended:
+ """An extension of :func:`itertools.islice` that supports negative values
+ for *stop*, *start*, and *step*.
+
+ >>> iterable = iter('abcdefgh')
+ >>> list(islice_extended(iterable, -4, -1))
+ ['e', 'f', 'g']
+
+ Slices with negative values require some caching of *iterable*, but this
+ function takes care to minimize the amount of memory required.
+
+ For example, you can use a negative step with an infinite iterator:
+
+ >>> from itertools import count
+ >>> list(islice_extended(count(), 110, 99, -2))
+ [110, 108, 106, 104, 102, 100]
+
+ You can also use slice notation directly:
+
+ >>> iterable = map(str, count())
+ >>> it = islice_extended(iterable)[10:20:2]
+ >>> list(it)
+ ['10', '12', '14', '16', '18']
+
+ """
+
+ def __init__(self, iterable, *args):
+ it = iter(iterable)
+ if args:
+ self._iterable = _islice_helper(it, slice(*args))
+ else:
+ self._iterable = it
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ return next(self._iterable)
+
+ def __getitem__(self, key):
+ if isinstance(key, slice):
+ return islice_extended(_islice_helper(self._iterable, key))
+
+ raise TypeError('islice_extended.__getitem__ argument must be a slice')
+
+
+def _islice_helper(it, s):
+ start = s.start
+ stop = s.stop
+ if s.step == 0:
+ raise ValueError('step argument must be a non-zero integer or None.')
+ step = s.step or 1
+
+ if step > 0:
+ start = 0 if (start is None) else start
+
+ if start < 0:
+ # Consume all but the last -start items
+ cache = deque(enumerate(it, 1), maxlen=-start)
+ len_iter = cache[-1][0] if cache else 0
+
+ # Adjust start to be positive
+ i = max(len_iter + start, 0)
+
+ # Adjust stop to be positive
+ if stop is None:
+ j = len_iter
+ elif stop >= 0:
+ j = min(stop, len_iter)
+ else:
+ j = max(len_iter + stop, 0)
+
+ # Slice the cache
+ n = j - i
+ if n <= 0:
+ return
+
+ for index, item in islice(cache, 0, n, step):
+ yield item
+ elif (stop is not None) and (stop < 0):
+ # Advance to the start position
+ next(islice(it, start, start), None)
+
+ # When stop is negative, we have to carry -stop items while
+ # iterating
+ cache = deque(islice(it, -stop), maxlen=-stop)
+
+ for index, item in enumerate(it):
+ cached_item = cache.popleft()
+ if index % step == 0:
+ yield cached_item
+ cache.append(item)
+ else:
+ # When both start and stop are positive we have the normal case
+ yield from islice(it, start, stop, step)
+ else:
+ start = -1 if (start is None) else start
+
+ if (stop is not None) and (stop < 0):
+ # Consume all but the last items
+ n = -stop - 1
+ cache = deque(enumerate(it, 1), maxlen=n)
+ len_iter = cache[-1][0] if cache else 0
+
+ # If start and stop are both negative they are comparable and
+ # we can just slice. Otherwise we can adjust start to be negative
+ # and then slice.
+ if start < 0:
+ i, j = start, stop
+ else:
+ i, j = min(start - len_iter, -1), None
+
+ for index, item in list(cache)[i:j:step]:
+ yield item
+ else:
+ # Advance to the stop position
+ if stop is not None:
+ m = stop + 1
+ next(islice(it, m, m), None)
+
+ # stop is positive, so if start is negative they are not comparable
+ # and we need the rest of the items.
+ if start < 0:
+ i = start
+ n = None
+ # stop is None and start is positive, so we just need items up to
+ # the start index.
+ elif stop is None:
+ i = None
+ n = start + 1
+ # Both stop and start are positive, so they are comparable.
+ else:
+ i = None
+ n = start - stop
+ if n <= 0:
+ return
+
+ cache = list(islice(it, n))
+
+ yield from cache[i::step]
+
+
+def always_reversible(iterable):
+ """An extension of :func:`reversed` that supports all iterables, not
+ just those which implement the ``Reversible`` or ``Sequence`` protocols.
+
+ >>> print(*always_reversible(x for x in range(3)))
+ 2 1 0
+
+ If the iterable is already reversible, this function returns the
+ result of :func:`reversed()`. If the iterable is not reversible,
+ this function will cache the remaining items in the iterable and
+ yield them in reverse order, which may require significant storage.
+ """
+ try:
+ return reversed(iterable)
+ except TypeError:
+ return reversed(list(iterable))
+
+
+def consecutive_groups(iterable, ordering=lambda x: x):
+ """Yield groups of consecutive items using :func:`itertools.groupby`.
+ The *ordering* function determines whether two items are adjacent by
+ returning their position.
+
+ By default, the ordering function is the identity function. This is
+ suitable for finding runs of numbers:
+
+ >>> iterable = [1, 10, 11, 12, 20, 30, 31, 32, 33, 40]
+ >>> for group in consecutive_groups(iterable):
+ ... print(list(group))
+ [1]
+ [10, 11, 12]
+ [20]
+ [30, 31, 32, 33]
+ [40]
+
+ For finding runs of adjacent letters, try using the :meth:`index` method
+ of a string of letters:
+
+ >>> from string import ascii_lowercase
+ >>> iterable = 'abcdfgilmnop'
+ >>> ordering = ascii_lowercase.index
+ >>> for group in consecutive_groups(iterable, ordering):
+ ... print(list(group))
+ ['a', 'b', 'c', 'd']
+ ['f', 'g']
+ ['i']
+ ['l', 'm', 'n', 'o', 'p']
+
+ Each group of consecutive items is an iterator that shares it source with
+ *iterable*. When an an output group is advanced, the previous group is
+ no longer available unless its elements are copied (e.g., into a ``list``).
+
+ >>> iterable = [1, 2, 11, 12, 21, 22]
+ >>> saved_groups = []
+ >>> for group in consecutive_groups(iterable):
+ ... saved_groups.append(list(group)) # Copy group elements
+ >>> saved_groups
+ [[1, 2], [11, 12], [21, 22]]
+
+ """
+ for k, g in groupby(
+ enumerate(iterable), key=lambda x: x[0] - ordering(x[1])
+ ):
+ yield map(itemgetter(1), g)
+
+
+def difference(iterable, func=sub, *, initial=None):
+ """This function is the inverse of :func:`itertools.accumulate`. By default
+ it will compute the first difference of *iterable* using
+ :func:`operator.sub`:
+
+ >>> from itertools import accumulate
+ >>> iterable = accumulate([0, 1, 2, 3, 4]) # produces 0, 1, 3, 6, 10
+ >>> list(difference(iterable))
+ [0, 1, 2, 3, 4]
+
+ *func* defaults to :func:`operator.sub`, but other functions can be
+ specified. They will be applied as follows::
+
+ A, B, C, D, ... --> A, func(B, A), func(C, B), func(D, C), ...
+
+ For example, to do progressive division:
+
+ >>> iterable = [1, 2, 6, 24, 120]
+ >>> func = lambda x, y: x // y
+ >>> list(difference(iterable, func))
+ [1, 2, 3, 4, 5]
+
+ If the *initial* keyword is set, the first element will be skipped when
+ computing successive differences.
+
+ >>> it = [10, 11, 13, 16] # from accumulate([1, 2, 3], initial=10)
+ >>> list(difference(it, initial=10))
+ [1, 2, 3]
+
+ """
+ a, b = tee(iterable)
+ try:
+ first = [next(b)]
+ except StopIteration:
+ return iter([])
+
+ if initial is not None:
+ first = []
+
+ return chain(first, starmap(func, zip(b, a)))
+
+
+class SequenceView(Sequence):
+ """Return a read-only view of the sequence object *target*.
+
+ :class:`SequenceView` objects are analogous to Python's built-in
+ "dictionary view" types. They provide a dynamic view of a sequence's items,
+ meaning that when the sequence updates, so does the view.
+
+ >>> seq = ['0', '1', '2']
+ >>> view = SequenceView(seq)
+ >>> view
+ SequenceView(['0', '1', '2'])
+ >>> seq.append('3')
+ >>> view
+ SequenceView(['0', '1', '2', '3'])
+
+ Sequence views support indexing, slicing, and length queries. They act
+ like the underlying sequence, except they don't allow assignment:
+
+ >>> view[1]
+ '1'
+ >>> view[1:-1]
+ ['1', '2']
+ >>> len(view)
+ 4
+
+ Sequence views are useful as an alternative to copying, as they don't
+ require (much) extra storage.
+
+ """
+
+ def __init__(self, target):
+ if not isinstance(target, Sequence):
+ raise TypeError
+ self._target = target
+
+ def __getitem__(self, index):
+ return self._target[index]
+
+ def __len__(self):
+ return len(self._target)
+
+ def __repr__(self):
+ return '{}({})'.format(self.__class__.__name__, repr(self._target))
+
+
+class seekable:
+ """Wrap an iterator to allow for seeking backward and forward. This
+ progressively caches the items in the source iterable so they can be
+ re-visited.
+
+ Call :meth:`seek` with an index to seek to that position in the source
+ iterable.
+
+ To "reset" an iterator, seek to ``0``:
+
+ >>> from itertools import count
+ >>> it = seekable((str(n) for n in count()))
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> it.seek(0)
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> next(it)
+ '3'
+
+ You can also seek forward:
+
+ >>> it = seekable((str(n) for n in range(20)))
+ >>> it.seek(10)
+ >>> next(it)
+ '10'
+ >>> it.seek(20) # Seeking past the end of the source isn't a problem
+ >>> list(it)
+ []
+ >>> it.seek(0) # Resetting works even after hitting the end
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+
+ Call :meth:`peek` to look ahead one item without advancing the iterator:
+
+ >>> it = seekable('1234')
+ >>> it.peek()
+ '1'
+ >>> list(it)
+ ['1', '2', '3', '4']
+ >>> it.peek(default='empty')
+ 'empty'
+
+ Before the iterator is at its end, calling :func:`bool` on it will return
+ ``True``. After it will return ``False``:
+
+ >>> it = seekable('5678')
+ >>> bool(it)
+ True
+ >>> list(it)
+ ['5', '6', '7', '8']
+ >>> bool(it)
+ False
+
+ You may view the contents of the cache with the :meth:`elements` method.
+ That returns a :class:`SequenceView`, a view that updates automatically:
+
+ >>> it = seekable((str(n) for n in range(10)))
+ >>> next(it), next(it), next(it)
+ ('0', '1', '2')
+ >>> elements = it.elements()
+ >>> elements
+ SequenceView(['0', '1', '2'])
+ >>> next(it)
+ '3'
+ >>> elements
+ SequenceView(['0', '1', '2', '3'])
+
+ By default, the cache grows as the source iterable progresses, so beware of
+ wrapping very large or infinite iterables. Supply *maxlen* to limit the
+ size of the cache (this of course limits how far back you can seek).
+
+ >>> from itertools import count
+ >>> it = seekable((str(n) for n in count()), maxlen=2)
+ >>> next(it), next(it), next(it), next(it)
+ ('0', '1', '2', '3')
+ >>> list(it.elements())
+ ['2', '3']
+ >>> it.seek(0)
+ >>> next(it), next(it), next(it), next(it)
+ ('2', '3', '4', '5')
+ >>> next(it)
+ '6'
+
+ """
+
+ def __init__(self, iterable, maxlen=None):
+ self._source = iter(iterable)
+ if maxlen is None:
+ self._cache = []
+ else:
+ self._cache = deque([], maxlen)
+ self._index = None
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ if self._index is not None:
+ try:
+ item = self._cache[self._index]
+ except IndexError:
+ self._index = None
+ else:
+ self._index += 1
+ return item
+
+ item = next(self._source)
+ self._cache.append(item)
+ return item
+
+ def __bool__(self):
+ try:
+ self.peek()
+ except StopIteration:
+ return False
+ return True
+
+ def peek(self, default=_marker):
+ try:
+ peeked = next(self)
+ except StopIteration:
+ if default is _marker:
+ raise
+ return default
+ if self._index is None:
+ self._index = len(self._cache)
+ self._index -= 1
+ return peeked
+
+ def elements(self):
+ return SequenceView(self._cache)
+
+ def seek(self, index):
+ self._index = index
+ remainder = index - len(self._cache)
+ if remainder > 0:
+ consume(self, remainder)
+
+
+class run_length:
+ """
+ :func:`run_length.encode` compresses an iterable with run-length encoding.
+ It yields groups of repeated items with the count of how many times they
+ were repeated:
+
+ >>> uncompressed = 'abbcccdddd'
+ >>> list(run_length.encode(uncompressed))
+ [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
+
+ :func:`run_length.decode` decompresses an iterable that was previously
+ compressed with run-length encoding. It yields the items of the
+ decompressed iterable:
+
+ >>> compressed = [('a', 1), ('b', 2), ('c', 3), ('d', 4)]
+ >>> list(run_length.decode(compressed))
+ ['a', 'b', 'b', 'c', 'c', 'c', 'd', 'd', 'd', 'd']
+
+ """
+
+ @staticmethod
+ def encode(iterable):
+ return ((k, ilen(g)) for k, g in groupby(iterable))
+
+ @staticmethod
+ def decode(iterable):
+ return chain.from_iterable(repeat(k, n) for k, n in iterable)
+
+
+def exactly_n(iterable, n, predicate=bool):
+ """Return ``True`` if exactly ``n`` items in the iterable are ``True``
+ according to the *predicate* function.
+
+ >>> exactly_n([True, True, False], 2)
+ True
+ >>> exactly_n([True, True, False], 1)
+ False
+ >>> exactly_n([0, 1, 2, 3, 4, 5], 3, lambda x: x < 3)
+ True
+
+ The iterable will be advanced until ``n + 1`` truthy items are encountered,
+ so avoid calling it on infinite iterables.
+
+ """
+ return len(take(n + 1, filter(predicate, iterable))) == n
+
+
+def circular_shifts(iterable):
+ """Return a list of circular shifts of *iterable*.
+
+ >>> circular_shifts(range(4))
+ [(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)]
+ """
+ lst = list(iterable)
+ return take(len(lst), windowed(cycle(lst), len(lst)))
+
+
+def make_decorator(wrapping_func, result_index=0):
+ """Return a decorator version of *wrapping_func*, which is a function that
+ modifies an iterable. *result_index* is the position in that function's
+ signature where the iterable goes.
+
+ This lets you use itertools on the "production end," i.e. at function
+ definition. This can augment what the function returns without changing the
+ function's code.
+
+ For example, to produce a decorator version of :func:`chunked`:
+
+ >>> from more_itertools import chunked
+ >>> chunker = make_decorator(chunked, result_index=0)
+ >>> @chunker(3)
+ ... def iter_range(n):
+ ... return iter(range(n))
+ ...
+ >>> list(iter_range(9))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
+
+ To only allow truthy items to be returned:
+
+ >>> truth_serum = make_decorator(filter, result_index=1)
+ >>> @truth_serum(bool)
+ ... def boolean_test():
+ ... return [0, 1, '', ' ', False, True]
+ ...
+ >>> list(boolean_test())
+ [1, ' ', True]
+
+ The :func:`peekable` and :func:`seekable` wrappers make for practical
+ decorators:
+
+ >>> from more_itertools import peekable
+ >>> peekable_function = make_decorator(peekable)
+ >>> @peekable_function()
+ ... def str_range(*args):
+ ... return (str(x) for x in range(*args))
+ ...
+ >>> it = str_range(1, 20, 2)
+ >>> next(it), next(it), next(it)
+ ('1', '3', '5')
+ >>> it.peek()
+ '7'
+ >>> next(it)
+ '7'
+
+ """
+ # See https://sites.google.com/site/bbayles/index/decorator_factory for
+ # notes on how this works.
+ def decorator(*wrapping_args, **wrapping_kwargs):
+ def outer_wrapper(f):
+ def inner_wrapper(*args, **kwargs):
+ result = f(*args, **kwargs)
+ wrapping_args_ = list(wrapping_args)
+ wrapping_args_.insert(result_index, result)
+ return wrapping_func(*wrapping_args_, **wrapping_kwargs)
+
+ return inner_wrapper
+
+ return outer_wrapper
+
+ return decorator
+
+
+def map_reduce(iterable, keyfunc, valuefunc=None, reducefunc=None):
+ """Return a dictionary that maps the items in *iterable* to categories
+ defined by *keyfunc*, transforms them with *valuefunc*, and
+ then summarizes them by category with *reducefunc*.
+
+ *valuefunc* defaults to the identity function if it is unspecified.
+ If *reducefunc* is unspecified, no summarization takes place:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> result = map_reduce('abbccc', keyfunc)
+ >>> sorted(result.items())
+ [('A', ['a']), ('B', ['b', 'b']), ('C', ['c', 'c', 'c'])]
+
+ Specifying *valuefunc* transforms the categorized items:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> valuefunc = lambda x: 1
+ >>> result = map_reduce('abbccc', keyfunc, valuefunc)
+ >>> sorted(result.items())
+ [('A', [1]), ('B', [1, 1]), ('C', [1, 1, 1])]
+
+ Specifying *reducefunc* summarizes the categorized items:
+
+ >>> keyfunc = lambda x: x.upper()
+ >>> valuefunc = lambda x: 1
+ >>> reducefunc = sum
+ >>> result = map_reduce('abbccc', keyfunc, valuefunc, reducefunc)
+ >>> sorted(result.items())
+ [('A', 1), ('B', 2), ('C', 3)]
+
+ You may want to filter the input iterable before applying the map/reduce
+ procedure:
+
+ >>> all_items = range(30)
+ >>> items = [x for x in all_items if 10 <= x <= 20] # Filter
+ >>> keyfunc = lambda x: x % 2 # Evens map to 0; odds to 1
+ >>> categories = map_reduce(items, keyfunc=keyfunc)
+ >>> sorted(categories.items())
+ [(0, [10, 12, 14, 16, 18, 20]), (1, [11, 13, 15, 17, 19])]
+ >>> summaries = map_reduce(items, keyfunc=keyfunc, reducefunc=sum)
+ >>> sorted(summaries.items())
+ [(0, 90), (1, 75)]
+
+ Note that all items in the iterable are gathered into a list before the
+ summarization step, which may require significant storage.
+
+ The returned object is a :obj:`collections.defaultdict` with the
+ ``default_factory`` set to ``None``, such that it behaves like a normal
+ dictionary.
+
+ """
+ valuefunc = (lambda x: x) if (valuefunc is None) else valuefunc
+
+ ret = defaultdict(list)
+ for item in iterable:
+ key = keyfunc(item)
+ value = valuefunc(item)
+ ret[key].append(value)
+
+ if reducefunc is not None:
+ for key, value_list in ret.items():
+ ret[key] = reducefunc(value_list)
+
+ ret.default_factory = None
+ return ret
+
+
+def rlocate(iterable, pred=bool, window_size=None):
+ """Yield the index of each item in *iterable* for which *pred* returns
+ ``True``, starting from the right and moving left.
+
+ *pred* defaults to :func:`bool`, which will select truthy items:
+
+ >>> list(rlocate([0, 1, 1, 0, 1, 0, 0])) # Truthy at 1, 2, and 4
+ [4, 2, 1]
+
+ Set *pred* to a custom function to, e.g., find the indexes for a particular
+ item:
+
+ >>> iterable = iter('abcb')
+ >>> pred = lambda x: x == 'b'
+ >>> list(rlocate(iterable, pred))
+ [3, 1]
+
+ If *window_size* is given, then the *pred* function will be called with
+ that many items. This enables searching for sub-sequences:
+
+ >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3]
+ >>> pred = lambda *args: args == (1, 2, 3)
+ >>> list(rlocate(iterable, pred=pred, window_size=3))
+ [9, 5, 1]
+
+ Beware, this function won't return anything for infinite iterables.
+ If *iterable* is reversible, ``rlocate`` will reverse it and search from
+ the right. Otherwise, it will search from the left and return the results
+ in reverse order.
+
+ See :func:`locate` to for other example applications.
+
+ """
+ if window_size is None:
+ try:
+ len_iter = len(iterable)
+ return (len_iter - i - 1 for i in locate(reversed(iterable), pred))
+ except TypeError:
+ pass
+
+ return reversed(list(locate(iterable, pred, window_size)))
+
+
+def replace(iterable, pred, substitutes, count=None, window_size=1):
+ """Yield the items from *iterable*, replacing the items for which *pred*
+ returns ``True`` with the items from the iterable *substitutes*.
+
+ >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1]
+ >>> pred = lambda x: x == 0
+ >>> substitutes = (2, 3)
+ >>> list(replace(iterable, pred, substitutes))
+ [1, 1, 2, 3, 1, 1, 2, 3, 1, 1]
+
+ If *count* is given, the number of replacements will be limited:
+
+ >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1, 0]
+ >>> pred = lambda x: x == 0
+ >>> substitutes = [None]
+ >>> list(replace(iterable, pred, substitutes, count=2))
+ [1, 1, None, 1, 1, None, 1, 1, 0]
+
+ Use *window_size* to control the number of items passed as arguments to
+ *pred*. This allows for locating and replacing subsequences.
+
+ >>> iterable = [0, 1, 2, 5, 0, 1, 2, 5]
+ >>> window_size = 3
+ >>> pred = lambda *args: args == (0, 1, 2) # 3 items passed to pred
+ >>> substitutes = [3, 4] # Splice in these items
+ >>> list(replace(iterable, pred, substitutes, window_size=window_size))
+ [3, 4, 5, 3, 4, 5]
+
+ """
+ if window_size < 1:
+ raise ValueError('window_size must be at least 1')
+
+ # Save the substitutes iterable, since it's used more than once
+ substitutes = tuple(substitutes)
+
+ # Add padding such that the number of windows matches the length of the
+ # iterable
+ it = chain(iterable, [_marker] * (window_size - 1))
+ windows = windowed(it, window_size)
+
+ n = 0
+ for w in windows:
+ # If the current window matches our predicate (and we haven't hit
+ # our maximum number of replacements), splice in the substitutes
+ # and then consume the following windows that overlap with this one.
+ # For example, if the iterable is (0, 1, 2, 3, 4...)
+ # and the window size is 2, we have (0, 1), (1, 2), (2, 3)...
+ # If the predicate matches on (0, 1), we need to zap (0, 1) and (1, 2)
+ if pred(*w):
+ if (count is None) or (n < count):
+ n += 1
+ yield from substitutes
+ consume(windows, window_size - 1)
+ continue
+
+ # If there was no match (or we've reached the replacement limit),
+ # yield the first item from the window.
+ if w and (w[0] is not _marker):
+ yield w[0]
+
+
+def partitions(iterable):
+ """Yield all possible order-preserving partitions of *iterable*.
+
+ >>> iterable = 'abc'
+ >>> for part in partitions(iterable):
+ ... print([''.join(p) for p in part])
+ ['abc']
+ ['a', 'bc']
+ ['ab', 'c']
+ ['a', 'b', 'c']
+
+ This is unrelated to :func:`partition`.
+
+ """
+ sequence = list(iterable)
+ n = len(sequence)
+ for i in powerset(range(1, n)):
+ yield [sequence[i:j] for i, j in zip((0,) + i, i + (n,))]
+
+
+def set_partitions(iterable, k=None):
+ """
+ Yield the set partitions of *iterable* into *k* parts. Set partitions are
+ not order-preserving.
+
+ >>> iterable = 'abc'
+ >>> for part in set_partitions(iterable, 2):
+ ... print([''.join(p) for p in part])
+ ['a', 'bc']
+ ['ab', 'c']
+ ['b', 'ac']
+
+
+ If *k* is not given, every set partition is generated.
+
+ >>> iterable = 'abc'
+ >>> for part in set_partitions(iterable):
+ ... print([''.join(p) for p in part])
+ ['abc']
+ ['a', 'bc']
+ ['ab', 'c']
+ ['b', 'ac']
+ ['a', 'b', 'c']
+
+ """
+ L = list(iterable)
+ n = len(L)
+ if k is not None:
+ if k < 1:
+ raise ValueError(
+ "Can't partition in a negative or zero number of groups"
+ )
+ elif k > n:
+ return
+
+ def set_partitions_helper(L, k):
+ n = len(L)
+ if k == 1:
+ yield [L]
+ elif n == k:
+ yield [[s] for s in L]
+ else:
+ e, *M = L
+ for p in set_partitions_helper(M, k - 1):
+ yield [[e], *p]
+ for p in set_partitions_helper(M, k):
+ for i in range(len(p)):
+ yield p[:i] + [[e] + p[i]] + p[i + 1 :]
+
+ if k is None:
+ for k in range(1, n + 1):
+ yield from set_partitions_helper(L, k)
+ else:
+ yield from set_partitions_helper(L, k)
+
+
+class time_limited:
+ """
+ Yield items from *iterable* until *limit_seconds* have passed.
+ If the time limit expires before all items have been yielded, the
+ ``timed_out`` parameter will be set to ``True``.
+
+ >>> from time import sleep
+ >>> def generator():
+ ... yield 1
+ ... yield 2
+ ... sleep(0.2)
+ ... yield 3
+ >>> iterable = time_limited(0.1, generator())
+ >>> list(iterable)
+ [1, 2]
+ >>> iterable.timed_out
+ True
+
+ Note that the time is checked before each item is yielded, and iteration
+ stops if the time elapsed is greater than *limit_seconds*. If your time
+ limit is 1 second, but it takes 2 seconds to generate the first item from
+ the iterable, the function will run for 2 seconds and not yield anything.
+
+ """
+
+ def __init__(self, limit_seconds, iterable):
+ if limit_seconds < 0:
+ raise ValueError('limit_seconds must be positive')
+ self.limit_seconds = limit_seconds
+ self._iterable = iter(iterable)
+ self._start_time = monotonic()
+ self.timed_out = False
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ item = next(self._iterable)
+ if monotonic() - self._start_time > self.limit_seconds:
+ self.timed_out = True
+ raise StopIteration
+
+ return item
+
+
+def only(iterable, default=None, too_long=None):
+ """If *iterable* has only one item, return it.
+ If it has zero items, return *default*.
+ If it has more than one item, raise the exception given by *too_long*,
+ which is ``ValueError`` by default.
+
+ >>> only([], default='missing')
+ 'missing'
+ >>> only([1])
+ 1
+ >>> only([1, 2]) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValueError: Expected exactly one item in iterable, but got 1, 2,
+ and perhaps more.'
+ >>> only([1, 2], too_long=TypeError) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ TypeError
+
+ Note that :func:`only` attempts to advance *iterable* twice to ensure there
+ is only one item. See :func:`spy` or :func:`peekable` to check
+ iterable contents less destructively.
+ """
+ it = iter(iterable)
+ first_value = next(it, default)
+
+ try:
+ second_value = next(it)
+ except StopIteration:
+ pass
+ else:
+ msg = (
+ 'Expected exactly one item in iterable, but got {!r}, {!r}, '
+ 'and perhaps more.'.format(first_value, second_value)
+ )
+ raise too_long or ValueError(msg)
+
+ return first_value
+
+
+def ichunked(iterable, n):
+ """Break *iterable* into sub-iterables with *n* elements each.
+ :func:`ichunked` is like :func:`chunked`, but it yields iterables
+ instead of lists.
+
+ If the sub-iterables are read in order, the elements of *iterable*
+ won't be stored in memory.
+ If they are read out of order, :func:`itertools.tee` is used to cache
+ elements as necessary.
+
+ >>> from itertools import count
+ >>> all_chunks = ichunked(count(), 4)
+ >>> c_1, c_2, c_3 = next(all_chunks), next(all_chunks), next(all_chunks)
+ >>> list(c_2) # c_1's elements have been cached; c_3's haven't been
+ [4, 5, 6, 7]
+ >>> list(c_1)
+ [0, 1, 2, 3]
+ >>> list(c_3)
+ [8, 9, 10, 11]
+
+ """
+ source = iter(iterable)
+
+ while True:
+ # Check to see whether we're at the end of the source iterable
+ item = next(source, _marker)
+ if item is _marker:
+ return
+
+ # Clone the source and yield an n-length slice
+ source, it = tee(chain([item], source))
+ yield islice(it, n)
+
+ # Advance the source iterable
+ consume(source, n)
+
+
+def distinct_combinations(iterable, r):
+ """Yield the distinct combinations of *r* items taken from *iterable*.
+
+ >>> list(distinct_combinations([0, 0, 1], 2))
+ [(0, 0), (0, 1)]
+
+ Equivalent to ``set(combinations(iterable))``, except duplicates are not
+ generated and thrown away. For larger input sequences this is much more
+ efficient.
+
+ """
+ if r < 0:
+ raise ValueError('r must be non-negative')
+ elif r == 0:
+ yield ()
+ return
+ pool = tuple(iterable)
+ generators = [unique_everseen(enumerate(pool), key=itemgetter(1))]
+ current_combo = [None] * r
+ level = 0
+ while generators:
+ try:
+ cur_idx, p = next(generators[-1])
+ except StopIteration:
+ generators.pop()
+ level -= 1
+ continue
+ current_combo[level] = p
+ if level + 1 == r:
+ yield tuple(current_combo)
+ else:
+ generators.append(
+ unique_everseen(
+ enumerate(pool[cur_idx + 1 :], cur_idx + 1),
+ key=itemgetter(1),
+ )
+ )
+ level += 1
+
+
+def filter_except(validator, iterable, *exceptions):
+ """Yield the items from *iterable* for which the *validator* function does
+ not raise one of the specified *exceptions*.
+
+ *validator* is called for each item in *iterable*.
+ It should be a function that accepts one argument and raises an exception
+ if that item is not valid.
+
+ >>> iterable = ['1', '2', 'three', '4', None]
+ >>> list(filter_except(int, iterable, ValueError, TypeError))
+ ['1', '2', '4']
+
+ If an exception other than one given by *exceptions* is raised by
+ *validator*, it is raised like normal.
+ """
+ for item in iterable:
+ try:
+ validator(item)
+ except exceptions:
+ pass
+ else:
+ yield item
+
+
+def map_except(function, iterable, *exceptions):
+ """Transform each item from *iterable* with *function* and yield the
+ result, unless *function* raises one of the specified *exceptions*.
+
+ *function* is called to transform each item in *iterable*.
+ It should accept one argument.
+
+ >>> iterable = ['1', '2', 'three', '4', None]
+ >>> list(map_except(int, iterable, ValueError, TypeError))
+ [1, 2, 4]
+
+ If an exception other than one given by *exceptions* is raised by
+ *function*, it is raised like normal.
+ """
+ for item in iterable:
+ try:
+ yield function(item)
+ except exceptions:
+ pass
+
+
+def map_if(iterable, pred, func, func_else=lambda x: x):
+ """Evaluate each item from *iterable* using *pred*. If the result is
+ equivalent to ``True``, transform the item with *func* and yield it.
+ Otherwise, transform the item with *func_else* and yield it.
+
+ *pred*, *func*, and *func_else* should each be functions that accept
+ one argument. By default, *func_else* is the identity function.
+
+ >>> from math import sqrt
+ >>> iterable = list(range(-5, 5))
+ >>> iterable
+ [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4]
+ >>> list(map_if(iterable, lambda x: x > 3, lambda x: 'toobig'))
+ [-5, -4, -3, -2, -1, 0, 1, 2, 3, 'toobig']
+ >>> list(map_if(iterable, lambda x: x >= 0,
+ ... lambda x: f'{sqrt(x):.2f}', lambda x: None))
+ [None, None, None, None, None, '0.00', '1.00', '1.41', '1.73', '2.00']
+ """
+ for item in iterable:
+ yield func(item) if pred(item) else func_else(item)
+
+
+def _sample_unweighted(iterable, k):
+ # Implementation of "Algorithm L" from the 1994 paper by Kim-Hung Li:
+ # "Reservoir-Sampling Algorithms of Time Complexity O(n(1+log(N/n)))".
+
+ # Fill up the reservoir (collection of samples) with the first `k` samples
+ reservoir = take(k, iterable)
+
+ # Generate random number that's the largest in a sample of k U(0,1) numbers
+ # Largest order statistic: https://en.wikipedia.org/wiki/Order_statistic
+ W = exp(log(random()) / k)
+
+ # The number of elements to skip before changing the reservoir is a random
+ # number with a geometric distribution. Sample it using random() and logs.
+ next_index = k + floor(log(random()) / log(1 - W))
+
+ for index, element in enumerate(iterable, k):
+
+ if index == next_index:
+ reservoir[randrange(k)] = element
+ # The new W is the largest in a sample of k U(0, `old_W`) numbers
+ W *= exp(log(random()) / k)
+ next_index += floor(log(random()) / log(1 - W)) + 1
+
+ return reservoir
+
+
+def _sample_weighted(iterable, k, weights):
+ # Implementation of "A-ExpJ" from the 2006 paper by Efraimidis et al. :
+ # "Weighted random sampling with a reservoir".
+
+ # Log-transform for numerical stability for weights that are small/large
+ weight_keys = (log(random()) / weight for weight in weights)
+
+ # Fill up the reservoir (collection of samples) with the first `k`
+ # weight-keys and elements, then heapify the list.
+ reservoir = take(k, zip(weight_keys, iterable))
+ heapify(reservoir)
+
+ # The number of jumps before changing the reservoir is a random variable
+ # with an exponential distribution. Sample it using random() and logs.
+ smallest_weight_key, _ = reservoir[0]
+ weights_to_skip = log(random()) / smallest_weight_key
+
+ for weight, element in zip(weights, iterable):
+ if weight >= weights_to_skip:
+ # The notation here is consistent with the paper, but we store
+ # the weight-keys in log-space for better numerical stability.
+ smallest_weight_key, _ = reservoir[0]
+ t_w = exp(weight * smallest_weight_key)
+ r_2 = uniform(t_w, 1) # generate U(t_w, 1)
+ weight_key = log(r_2) / weight
+ heapreplace(reservoir, (weight_key, element))
+ smallest_weight_key, _ = reservoir[0]
+ weights_to_skip = log(random()) / smallest_weight_key
+ else:
+ weights_to_skip -= weight
+
+ # Equivalent to [element for weight_key, element in sorted(reservoir)]
+ return [heappop(reservoir)[1] for _ in range(k)]
+
+
+def sample(iterable, k, weights=None):
+ """Return a *k*-length list of elements chosen (without replacement)
+ from the *iterable*. Like :func:`random.sample`, but works on iterables
+ of unknown length.
+
+ >>> iterable = range(100)
+ >>> sample(iterable, 5) # doctest: +SKIP
+ [81, 60, 96, 16, 4]
+
+ An iterable with *weights* may also be given:
+
+ >>> iterable = range(100)
+ >>> weights = (i * i + 1 for i in range(100))
+ >>> sampled = sample(iterable, 5, weights=weights) # doctest: +SKIP
+ [79, 67, 74, 66, 78]
+
+ The algorithm can also be used to generate weighted random permutations.
+ The relative weight of each item determines the probability that it
+ appears late in the permutation.
+
+ >>> data = "abcdefgh"
+ >>> weights = range(1, len(data) + 1)
+ >>> sample(data, k=len(data), weights=weights) # doctest: +SKIP
+ ['c', 'a', 'b', 'e', 'g', 'd', 'h', 'f']
+ """
+ if k == 0:
+ return []
+
+ iterable = iter(iterable)
+ if weights is None:
+ return _sample_unweighted(iterable, k)
+ else:
+ weights = iter(weights)
+ return _sample_weighted(iterable, k, weights)
+
+
+def is_sorted(iterable, key=None, reverse=False, strict=False):
+ """Returns ``True`` if the items of iterable are in sorted order, and
+ ``False`` otherwise. *key* and *reverse* have the same meaning that they do
+ in the built-in :func:`sorted` function.
+
+ >>> is_sorted(['1', '2', '3', '4', '5'], key=int)
+ True
+ >>> is_sorted([5, 4, 3, 1, 2], reverse=True)
+ False
+
+ If *strict*, tests for strict sorting, that is, returns ``False`` if equal
+ elements are found:
+
+ >>> is_sorted([1, 2, 2])
+ True
+ >>> is_sorted([1, 2, 2], strict=True)
+ False
+
+ The function returns ``False`` after encountering the first out-of-order
+ item. If there are no out-of-order items, the iterable is exhausted.
+ """
+
+ compare = (le if reverse else ge) if strict else (lt if reverse else gt)
+ it = iterable if key is None else map(key, iterable)
+ return not any(starmap(compare, pairwise(it)))
+
+
+class AbortThread(BaseException):
+ pass
+
+
+class callback_iter:
+ """Convert a function that uses callbacks to an iterator.
+
+ Let *func* be a function that takes a `callback` keyword argument.
+ For example:
+
+ >>> def func(callback=None):
+ ... for i, c in [(1, 'a'), (2, 'b'), (3, 'c')]:
+ ... if callback:
+ ... callback(i, c)
+ ... return 4
+
+
+ Use ``with callback_iter(func)`` to get an iterator over the parameters
+ that are delivered to the callback.
+
+ >>> with callback_iter(func) as it:
+ ... for args, kwargs in it:
+ ... print(args)
+ (1, 'a')
+ (2, 'b')
+ (3, 'c')
+
+ The function will be called in a background thread. The ``done`` property
+ indicates whether it has completed execution.
+
+ >>> it.done
+ True
+
+ If it completes successfully, its return value will be available
+ in the ``result`` property.
+
+ >>> it.result
+ 4
+
+ Notes:
+
+ * If the function uses some keyword argument besides ``callback``, supply
+ *callback_kwd*.
+ * If it finished executing, but raised an exception, accessing the
+ ``result`` property will raise the same exception.
+ * If it hasn't finished executing, accessing the ``result``
+ property from within the ``with`` block will raise ``RuntimeError``.
+ * If it hasn't finished executing, accessing the ``result`` property from
+ outside the ``with`` block will raise a
+ ``more_itertools.AbortThread`` exception.
+ * Provide *wait_seconds* to adjust how frequently the it is polled for
+ output.
+
+ """
+
+ def __init__(self, func, callback_kwd='callback', wait_seconds=0.1):
+ self._func = func
+ self._callback_kwd = callback_kwd
+ self._aborted = False
+ self._future = None
+ self._wait_seconds = wait_seconds
+ self._executor = ThreadPoolExecutor(max_workers=1)
+ self._iterator = self._reader()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self._aborted = True
+ self._executor.shutdown()
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ return next(self._iterator)
+
+ @property
+ def done(self):
+ if self._future is None:
+ return False
+ return self._future.done()
+
+ @property
+ def result(self):
+ if not self.done:
+ raise RuntimeError('Function has not yet completed')
+
+ return self._future.result()
+
+ def _reader(self):
+ q = Queue()
+
+ def callback(*args, **kwargs):
+ if self._aborted:
+ raise AbortThread('canceled by user')
+
+ q.put((args, kwargs))
+
+ self._future = self._executor.submit(
+ self._func, **{self._callback_kwd: callback}
+ )
+
+ while True:
+ try:
+ item = q.get(timeout=self._wait_seconds)
+ except Empty:
+ pass
+ else:
+ q.task_done()
+ yield item
+
+ if self._future.done():
+ break
+
+ remaining = []
+ while True:
+ try:
+ item = q.get_nowait()
+ except Empty:
+ break
+ else:
+ q.task_done()
+ remaining.append(item)
+ q.join()
+ yield from remaining
+
+
+def windowed_complete(iterable, n):
+ """
+ Yield ``(beginning, middle, end)`` tuples, where:
+
+ * Each ``middle`` has *n* items from *iterable*
+ * Each ``beginning`` has the items before the ones in ``middle``
+ * Each ``end`` has the items after the ones in ``middle``
+
+ >>> iterable = range(7)
+ >>> n = 3
+ >>> for beginning, middle, end in windowed_complete(iterable, n):
+ ... print(beginning, middle, end)
+ () (0, 1, 2) (3, 4, 5, 6)
+ (0,) (1, 2, 3) (4, 5, 6)
+ (0, 1) (2, 3, 4) (5, 6)
+ (0, 1, 2) (3, 4, 5) (6,)
+ (0, 1, 2, 3) (4, 5, 6) ()
+
+ Note that *n* must be at least 0 and most equal to the length of
+ *iterable*.
+
+ This function will exhaust the iterable and may require significant
+ storage.
+ """
+ if n < 0:
+ raise ValueError('n must be >= 0')
+
+ seq = tuple(iterable)
+ size = len(seq)
+
+ if n > size:
+ raise ValueError('n must be <= len(seq)')
+
+ for i in range(size - n + 1):
+ beginning = seq[:i]
+ middle = seq[i : i + n]
+ end = seq[i + n :]
+ yield beginning, middle, end
+
+
+def all_unique(iterable, key=None):
+ """
+ Returns ``True`` if all the elements of *iterable* are unique (no two
+ elements are equal).
+
+ >>> all_unique('ABCB')
+ False
+
+ If a *key* function is specified, it will be used to make comparisons.
+
+ >>> all_unique('ABCb')
+ True
+ >>> all_unique('ABCb', str.lower)
+ False
+
+ The function returns as soon as the first non-unique element is
+ encountered. Iterables with a mix of hashable and unhashable items can
+ be used, but the function will be slower for unhashable items.
+ """
+ seenset = set()
+ seenset_add = seenset.add
+ seenlist = []
+ seenlist_add = seenlist.append
+ for element in map(key, iterable) if key else iterable:
+ try:
+ if element in seenset:
+ return False
+ seenset_add(element)
+ except TypeError:
+ if element in seenlist:
+ return False
+ seenlist_add(element)
+ return True
+
+
+def nth_product(index, *args):
+ """Equivalent to ``list(product(*args))[index]``.
+
+ The products of *args* can be ordered lexicographically.
+ :func:`nth_product` computes the product at sort position *index* without
+ computing the previous products.
+
+ >>> nth_product(8, range(2), range(2), range(2), range(2))
+ (1, 0, 0, 0)
+
+ ``IndexError`` will be raised if the given *index* is invalid.
+ """
+ pools = list(map(tuple, reversed(args)))
+ ns = list(map(len, pools))
+
+ c = reduce(mul, ns)
+
+ if index < 0:
+ index += c
+
+ if not 0 <= index < c:
+ raise IndexError
+
+ result = []
+ for pool, n in zip(pools, ns):
+ result.append(pool[index % n])
+ index //= n
+
+ return tuple(reversed(result))
+
+
+def nth_permutation(iterable, r, index):
+ """Equivalent to ``list(permutations(iterable, r))[index]```
+
+ The subsequences of *iterable* that are of length *r* where order is
+ important can be ordered lexicographically. :func:`nth_permutation`
+ computes the subsequence at sort position *index* directly, without
+ computing the previous subsequences.
+
+ >>> nth_permutation('ghijk', 2, 5)
+ ('h', 'i')
+
+ ``ValueError`` will be raised If *r* is negative or greater than the length
+ of *iterable*.
+ ``IndexError`` will be raised if the given *index* is invalid.
+ """
+ pool = list(iterable)
+ n = len(pool)
+
+ if r is None or r == n:
+ r, c = n, factorial(n)
+ elif not 0 <= r < n:
+ raise ValueError
+ else:
+ c = factorial(n) // factorial(n - r)
+
+ if index < 0:
+ index += c
+
+ if not 0 <= index < c:
+ raise IndexError
+
+ if c == 0:
+ return tuple()
+
+ result = [0] * r
+ q = index * factorial(n) // c if r < n else index
+ for d in range(1, n + 1):
+ q, i = divmod(q, d)
+ if 0 <= n - d < r:
+ result[n - d] = i
+ if q == 0:
+ break
+
+ return tuple(map(pool.pop, result))
+
+
+def value_chain(*args):
+ """Yield all arguments passed to the function in the same order in which
+ they were passed. If an argument itself is iterable then iterate over its
+ values.
+
+ >>> list(value_chain(1, 2, 3, [4, 5, 6]))
+ [1, 2, 3, 4, 5, 6]
+
+ Binary and text strings are not considered iterable and are emitted
+ as-is:
+
+ >>> list(value_chain('12', '34', ['56', '78']))
+ ['12', '34', '56', '78']
+
+
+ Multiple levels of nesting are not flattened.
+
+ """
+ for value in args:
+ if isinstance(value, (str, bytes)):
+ yield value
+ continue
+ try:
+ yield from value
+ except TypeError:
+ yield value
+
+
+def product_index(element, *args):
+ """Equivalent to ``list(product(*args)).index(element)``
+
+ The products of *args* can be ordered lexicographically.
+ :func:`product_index` computes the first index of *element* without
+ computing the previous products.
+
+ >>> product_index([8, 2], range(10), range(5))
+ 42
+
+ ``ValueError`` will be raised if the given *element* isn't in the product
+ of *args*.
+ """
+ index = 0
+
+ for x, pool in zip_longest(element, args, fillvalue=_marker):
+ if x is _marker or pool is _marker:
+ raise ValueError('element is not a product of args')
+
+ pool = tuple(pool)
+ index = index * len(pool) + pool.index(x)
+
+ return index
+
+
+def combination_index(element, iterable):
+ """Equivalent to ``list(combinations(iterable, r)).index(element)``
+
+ The subsequences of *iterable* that are of length *r* can be ordered
+ lexicographically. :func:`combination_index` computes the index of the
+ first *element*, without computing the previous combinations.
+
+ >>> combination_index('adf', 'abcdefg')
+ 10
+
+ ``ValueError`` will be raised if the given *element* isn't one of the
+ combinations of *iterable*.
+ """
+ element = enumerate(element)
+ k, y = next(element, (None, None))
+ if k is None:
+ return 0
+
+ indexes = []
+ pool = enumerate(iterable)
+ for n, x in pool:
+ if x == y:
+ indexes.append(n)
+ tmp, y = next(element, (None, None))
+ if tmp is None:
+ break
+ else:
+ k = tmp
+ else:
+ raise ValueError('element is not a combination of iterable')
+
+ n, _ = last(pool, default=(n, None))
+
+ # Python versiosn below 3.8 don't have math.comb
+ index = 1
+ for i, j in enumerate(reversed(indexes), start=1):
+ j = n - j
+ if i <= j:
+ index += factorial(j) // (factorial(i) * factorial(j - i))
+
+ return factorial(n + 1) // (factorial(k + 1) * factorial(n - k)) - index
+
+
+def permutation_index(element, iterable):
+ """Equivalent to ``list(permutations(iterable, r)).index(element)```
+
+ The subsequences of *iterable* that are of length *r* where order is
+ important can be ordered lexicographically. :func:`permutation_index`
+ computes the index of the first *element* directly, without computing
+ the previous permutations.
+
+ >>> permutation_index([1, 3, 2], range(5))
+ 19
+
+ ``ValueError`` will be raised if the given *element* isn't one of the
+ permutations of *iterable*.
+ """
+ index = 0
+ pool = list(iterable)
+ for i, x in zip(range(len(pool), -1, -1), element):
+ r = pool.index(x)
+ index = index * i + r
+ del pool[r]
+
+ return index
+
+
+class countable:
+ """Wrap *iterable* and keep a count of how many items have been consumed.
+
+ The ``items_seen`` attribute starts at ``0`` and increments as the iterable
+ is consumed:
+
+ >>> iterable = map(str, range(10))
+ >>> it = countable(iterable)
+ >>> it.items_seen
+ 0
+ >>> next(it), next(it)
+ ('0', '1')
+ >>> list(it)
+ ['2', '3', '4', '5', '6', '7', '8', '9']
+ >>> it.items_seen
+ 10
+ """
+
+ def __init__(self, iterable):
+ self._it = iter(iterable)
+ self.items_seen = 0
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ item = next(self._it)
+ self.items_seen += 1
+
+ return item
+
+
+def chunked_even(iterable, n):
+ """Break *iterable* into lists of approximately length *n*.
+ Items are distributed such the lengths of the lists differ by at most
+ 1 item.
+
+ >>> iterable = [1, 2, 3, 4, 5, 6, 7]
+ >>> n = 3
+ >>> list(chunked_even(iterable, n)) # List lengths: 3, 2, 2
+ [[1, 2, 3], [4, 5], [6, 7]]
+ >>> list(chunked(iterable, n)) # List lengths: 3, 3, 1
+ [[1, 2, 3], [4, 5, 6], [7]]
+
+ """
+
+ len_method = getattr(iterable, '__len__', None)
+
+ if len_method is None:
+ return _chunked_even_online(iterable, n)
+ else:
+ return _chunked_even_finite(iterable, len_method(), n)
+
+
+def _chunked_even_online(iterable, n):
+ buffer = []
+ maxbuf = n + (n - 2) * (n - 1)
+ for x in iterable:
+ buffer.append(x)
+ if len(buffer) == maxbuf:
+ yield buffer[:n]
+ buffer = buffer[n:]
+ yield from _chunked_even_finite(buffer, len(buffer), n)
+
+
+def _chunked_even_finite(iterable, N, n):
+ if N < 1:
+ return
+
+ # Lists are either size `full_size <= n` or `partial_size = full_size - 1`
+ q, r = divmod(N, n)
+ num_lists = q + (1 if r > 0 else 0)
+ q, r = divmod(N, num_lists)
+ full_size = q + (1 if r > 0 else 0)
+ partial_size = full_size - 1
+ num_full = N - partial_size * num_lists
+ num_partial = num_lists - num_full
+
+ buffer = []
+ iterator = iter(iterable)
+
+ # Yield num_full lists of full_size
+ for x in iterator:
+ buffer.append(x)
+ if len(buffer) == full_size:
+ yield buffer
+ buffer = []
+ num_full -= 1
+ if num_full <= 0:
+ break
+
+ # Yield num_partial lists of partial_size
+ for x in iterator:
+ buffer.append(x)
+ if len(buffer) == partial_size:
+ yield buffer
+ buffer = []
+ num_partial -= 1
+
+
+def zip_broadcast(*objects, scalar_types=(str, bytes), strict=False):
+ """A version of :func:`zip` that "broadcasts" any scalar
+ (i.e., non-iterable) items into output tuples.
+
+ >>> iterable_1 = [1, 2, 3]
+ >>> iterable_2 = ['a', 'b', 'c']
+ >>> scalar = '_'
+ >>> list(zip_broadcast(iterable_1, iterable_2, scalar))
+ [(1, 'a', '_'), (2, 'b', '_'), (3, 'c', '_')]
+
+ The *scalar_types* keyword argument determines what types are considered
+ scalar. It is set to ``(str, bytes)`` by default. Set it to ``None`` to
+ treat strings and byte strings as iterable:
+
+ >>> list(zip_broadcast('abc', 0, 'xyz', scalar_types=None))
+ [('a', 0, 'x'), ('b', 0, 'y'), ('c', 0, 'z')]
+
+ If the *strict* keyword argument is ``True``, then
+ ``UnequalIterablesError`` will be raised if any of the iterables have
+ different lengthss.
+ """
+
+ def is_scalar(obj):
+ if scalar_types and isinstance(obj, scalar_types):
+ return True
+ try:
+ iter(obj)
+ except TypeError:
+ return True
+ else:
+ return False
+
+ size = len(objects)
+ if not size:
+ return
+
+ iterables, iterable_positions = [], []
+ scalars, scalar_positions = [], []
+ for i, obj in enumerate(objects):
+ if is_scalar(obj):
+ scalars.append(obj)
+ scalar_positions.append(i)
+ else:
+ iterables.append(iter(obj))
+ iterable_positions.append(i)
+
+ if len(scalars) == size:
+ yield tuple(objects)
+ return
+
+ zipper = _zip_equal if strict else zip
+ for item in zipper(*iterables):
+ new_item = [None] * size
+
+ for i, elem in zip(iterable_positions, item):
+ new_item[i] = elem
+
+ for i, elem in zip(scalar_positions, scalars):
+ new_item[i] = elem
+
+ yield tuple(new_item)
+
+
+def unique_in_window(iterable, n, key=None):
+ """Yield the items from *iterable* that haven't been seen recently.
+ *n* is the size of the lookback window.
+
+ >>> iterable = [0, 1, 0, 2, 3, 0]
+ >>> n = 3
+ >>> list(unique_in_window(iterable, n))
+ [0, 1, 2, 3, 0]
+
+ The *key* function, if provided, will be used to determine uniqueness:
+
+ >>> list(unique_in_window('abAcda', 3, key=lambda x: x.lower()))
+ ['a', 'b', 'c', 'd', 'a']
+
+ The items in *iterable* must be hashable.
+
+ """
+ if n <= 0:
+ raise ValueError('n must be greater than 0')
+
+ window = deque(maxlen=n)
+ uniques = set()
+ use_key = key is not None
+
+ for item in iterable:
+ k = key(item) if use_key else item
+ if k in uniques:
+ continue
+
+ if len(uniques) == n:
+ uniques.discard(window[0])
+
+ uniques.add(k)
+ window.append(k)
+
+ yield item
+
+
+def duplicates_everseen(iterable, key=None):
+ """Yield duplicate elements after their first appearance.
+
+ >>> list(duplicates_everseen('mississippi'))
+ ['s', 'i', 's', 's', 'i', 'p', 'i']
+ >>> list(duplicates_everseen('AaaBbbCccAaa', str.lower))
+ ['a', 'a', 'b', 'b', 'c', 'c', 'A', 'a', 'a']
+
+ This function is analagous to :func:`unique_everseen` and is subject to
+ the same performance considerations.
+
+ """
+ seen_set = set()
+ seen_list = []
+ use_key = key is not None
+
+ for element in iterable:
+ k = key(element) if use_key else element
+ try:
+ if k not in seen_set:
+ seen_set.add(k)
+ else:
+ yield element
+ except TypeError:
+ if k not in seen_list:
+ seen_list.append(k)
+ else:
+ yield element
+
+
+def duplicates_justseen(iterable, key=None):
+ """Yields serially-duplicate elements after their first appearance.
+
+ >>> list(duplicates_justseen('mississippi'))
+ ['s', 's', 'p']
+ >>> list(duplicates_justseen('AaaBbbCccAaa', str.lower))
+ ['a', 'a', 'b', 'b', 'c', 'c', 'a', 'a']
+
+ This function is analagous to :func:`unique_justseen`.
+
+ """
+ return flatten(
+ map(
+ lambda group_tuple: islice_extended(group_tuple[1])[1:],
+ groupby(iterable, key),
+ )
+ )
+
+
+def minmax(iterable_or_value, *others, key=None, default=_marker):
+ """Returns both the smallest and largest items in an iterable
+ or the largest of two or more arguments.
+
+ >>> minmax([3, 1, 5])
+ (1, 5)
+
+ >>> minmax(4, 2, 6)
+ (2, 6)
+
+ If a *key* function is provided, it will be used to transform the input
+ items for comparison.
+
+ >>> minmax([5, 30], key=str) # '30' sorts before '5'
+ (30, 5)
+
+ If a *default* value is provided, it will be returned if there are no
+ input items.
+
+ >>> minmax([], default=(0, 0))
+ (0, 0)
+
+ Otherwise ``ValueError`` is raised.
+
+ This function is based on the
+ `recipe <http://code.activestate.com/recipes/577916/>`__ by
+ Raymond Hettinger and takes care to minimize the number of comparisons
+ performed.
+ """
+ iterable = (iterable_or_value, *others) if others else iterable_or_value
+
+ it = iter(iterable)
+
+ try:
+ lo = hi = next(it)
+ except StopIteration as e:
+ if default is _marker:
+ raise ValueError(
+ '`minmax()` argument is an empty iterable. '
+ 'Provide a `default` value to suppress this error.'
+ ) from e
+ return default
+
+ # Different branches depending on the presence of key. This saves a lot
+ # of unimportant copies which would slow the "key=None" branch
+ # significantly down.
+ if key is None:
+ for x, y in zip_longest(it, it, fillvalue=lo):
+ if y < x:
+ x, y = y, x
+ if x < lo:
+ lo = x
+ if hi < y:
+ hi = y
+
+ else:
+ lo_key = hi_key = key(lo)
+
+ for x, y in zip_longest(it, it, fillvalue=lo):
+
+ x_key, y_key = key(x), key(y)
+
+ if y_key < x_key:
+ x, y, x_key, y_key = y, x, y_key, x_key
+ if x_key < lo_key:
+ lo, lo_key = x, x_key
+ if hi_key < y_key:
+ hi, hi_key = y, y_key
+
+ return lo, hi
diff --git a/monEnvTP/lib/python3.8/site-packages/more_itertools/more.pyi b/monEnvTP/lib/python3.8/site-packages/more_itertools/more.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..fe7d4bdd7a822c713142dca8a3f4289cca89ee07
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/more_itertools/more.pyi
@@ -0,0 +1,664 @@
+"""Stubs for more_itertools.more"""
+
+from typing import (
+ Any,
+ Callable,
+ Container,
+ Dict,
+ Generic,
+ Hashable,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Reversible,
+ Sequence,
+ Sized,
+ Tuple,
+ Union,
+ TypeVar,
+ type_check_only,
+)
+from types import TracebackType
+from typing_extensions import ContextManager, Protocol, Type, overload
+
+# Type and type variable definitions
+_T = TypeVar('_T')
+_T1 = TypeVar('_T1')
+_T2 = TypeVar('_T2')
+_U = TypeVar('_U')
+_V = TypeVar('_V')
+_W = TypeVar('_W')
+_T_co = TypeVar('_T_co', covariant=True)
+_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[object]])
+_Raisable = Union[BaseException, 'Type[BaseException]']
+
+@type_check_only
+class _SizedIterable(Protocol[_T_co], Sized, Iterable[_T_co]): ...
+
+@type_check_only
+class _SizedReversible(Protocol[_T_co], Sized, Reversible[_T_co]): ...
+
+def chunked(
+ iterable: Iterable[_T], n: Optional[int], strict: bool = ...
+) -> Iterator[List[_T]]: ...
+@overload
+def first(iterable: Iterable[_T]) -> _T: ...
+@overload
+def first(iterable: Iterable[_T], default: _U) -> Union[_T, _U]: ...
+@overload
+def last(iterable: Iterable[_T]) -> _T: ...
+@overload
+def last(iterable: Iterable[_T], default: _U) -> Union[_T, _U]: ...
+@overload
+def nth_or_last(iterable: Iterable[_T], n: int) -> _T: ...
+@overload
+def nth_or_last(
+ iterable: Iterable[_T], n: int, default: _U
+) -> Union[_T, _U]: ...
+
+class peekable(Generic[_T], Iterator[_T]):
+ def __init__(self, iterable: Iterable[_T]) -> None: ...
+ def __iter__(self) -> peekable[_T]: ...
+ def __bool__(self) -> bool: ...
+ @overload
+ def peek(self) -> _T: ...
+ @overload
+ def peek(self, default: _U) -> Union[_T, _U]: ...
+ def prepend(self, *items: _T) -> None: ...
+ def __next__(self) -> _T: ...
+ @overload
+ def __getitem__(self, index: int) -> _T: ...
+ @overload
+ def __getitem__(self, index: slice) -> List[_T]: ...
+
+def collate(*iterables: Iterable[_T], **kwargs: Any) -> Iterable[_T]: ...
+def consumer(func: _GenFn) -> _GenFn: ...
+def ilen(iterable: Iterable[object]) -> int: ...
+def iterate(func: Callable[[_T], _T], start: _T) -> Iterator[_T]: ...
+def with_iter(
+ context_manager: ContextManager[Iterable[_T]],
+) -> Iterator[_T]: ...
+def one(
+ iterable: Iterable[_T],
+ too_short: Optional[_Raisable] = ...,
+ too_long: Optional[_Raisable] = ...,
+) -> _T: ...
+def raise_(exception: _Raisable, *args: Any) -> None: ...
+def strictly_n(
+ iterable: Iterable[_T],
+ n: int,
+ too_short: Optional[_GenFn] = ...,
+ too_long: Optional[_GenFn] = ...,
+) -> List[_T]: ...
+def distinct_permutations(
+ iterable: Iterable[_T], r: Optional[int] = ...
+) -> Iterator[Tuple[_T, ...]]: ...
+def intersperse(
+ e: _U, iterable: Iterable[_T], n: int = ...
+) -> Iterator[Union[_T, _U]]: ...
+def unique_to_each(*iterables: Iterable[_T]) -> List[List[_T]]: ...
+@overload
+def windowed(
+ seq: Iterable[_T], n: int, *, step: int = ...
+) -> Iterator[Tuple[Optional[_T], ...]]: ...
+@overload
+def windowed(
+ seq: Iterable[_T], n: int, fillvalue: _U, step: int = ...
+) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
+def substrings(iterable: Iterable[_T]) -> Iterator[Tuple[_T, ...]]: ...
+def substrings_indexes(
+ seq: Sequence[_T], reverse: bool = ...
+) -> Iterator[Tuple[Sequence[_T], int, int]]: ...
+
+class bucket(Generic[_T, _U], Container[_U]):
+ def __init__(
+ self,
+ iterable: Iterable[_T],
+ key: Callable[[_T], _U],
+ validator: Optional[Callable[[object], object]] = ...,
+ ) -> None: ...
+ def __contains__(self, value: object) -> bool: ...
+ def __iter__(self) -> Iterator[_U]: ...
+ def __getitem__(self, value: object) -> Iterator[_T]: ...
+
+def spy(
+ iterable: Iterable[_T], n: int = ...
+) -> Tuple[List[_T], Iterator[_T]]: ...
+def interleave(*iterables: Iterable[_T]) -> Iterator[_T]: ...
+def interleave_longest(*iterables: Iterable[_T]) -> Iterator[_T]: ...
+def interleave_evenly(
+ iterables: List[Iterable[_T]], lengths: Optional[List[int]] = ...
+) -> Iterator[_T]: ...
+def collapse(
+ iterable: Iterable[Any],
+ base_type: Optional[type] = ...,
+ levels: Optional[int] = ...,
+) -> Iterator[Any]: ...
+@overload
+def side_effect(
+ func: Callable[[_T], object],
+ iterable: Iterable[_T],
+ chunk_size: None = ...,
+ before: Optional[Callable[[], object]] = ...,
+ after: Optional[Callable[[], object]] = ...,
+) -> Iterator[_T]: ...
+@overload
+def side_effect(
+ func: Callable[[List[_T]], object],
+ iterable: Iterable[_T],
+ chunk_size: int,
+ before: Optional[Callable[[], object]] = ...,
+ after: Optional[Callable[[], object]] = ...,
+) -> Iterator[_T]: ...
+def sliced(
+ seq: Sequence[_T], n: int, strict: bool = ...
+) -> Iterator[Sequence[_T]]: ...
+def split_at(
+ iterable: Iterable[_T],
+ pred: Callable[[_T], object],
+ maxsplit: int = ...,
+ keep_separator: bool = ...,
+) -> Iterator[List[_T]]: ...
+def split_before(
+ iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ...
+) -> Iterator[List[_T]]: ...
+def split_after(
+ iterable: Iterable[_T], pred: Callable[[_T], object], maxsplit: int = ...
+) -> Iterator[List[_T]]: ...
+def split_when(
+ iterable: Iterable[_T],
+ pred: Callable[[_T, _T], object],
+ maxsplit: int = ...,
+) -> Iterator[List[_T]]: ...
+def split_into(
+ iterable: Iterable[_T], sizes: Iterable[Optional[int]]
+) -> Iterator[List[_T]]: ...
+@overload
+def padded(
+ iterable: Iterable[_T],
+ *,
+ n: Optional[int] = ...,
+ next_multiple: bool = ...
+) -> Iterator[Optional[_T]]: ...
+@overload
+def padded(
+ iterable: Iterable[_T],
+ fillvalue: _U,
+ n: Optional[int] = ...,
+ next_multiple: bool = ...,
+) -> Iterator[Union[_T, _U]]: ...
+@overload
+def repeat_last(iterable: Iterable[_T]) -> Iterator[_T]: ...
+@overload
+def repeat_last(
+ iterable: Iterable[_T], default: _U
+) -> Iterator[Union[_T, _U]]: ...
+def distribute(n: int, iterable: Iterable[_T]) -> List[Iterator[_T]]: ...
+@overload
+def stagger(
+ iterable: Iterable[_T],
+ offsets: _SizedIterable[int] = ...,
+ longest: bool = ...,
+) -> Iterator[Tuple[Optional[_T], ...]]: ...
+@overload
+def stagger(
+ iterable: Iterable[_T],
+ offsets: _SizedIterable[int] = ...,
+ longest: bool = ...,
+ fillvalue: _U = ...,
+) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
+
+class UnequalIterablesError(ValueError):
+ def __init__(
+ self, details: Optional[Tuple[int, int, int]] = ...
+ ) -> None: ...
+
+@overload
+def zip_equal(__iter1: Iterable[_T1]) -> Iterator[Tuple[_T1]]: ...
+@overload
+def zip_equal(
+ __iter1: Iterable[_T1], __iter2: Iterable[_T2]
+) -> Iterator[Tuple[_T1, _T2]]: ...
+@overload
+def zip_equal(
+ __iter1: Iterable[_T],
+ __iter2: Iterable[_T],
+ __iter3: Iterable[_T],
+ *iterables: Iterable[_T]
+) -> Iterator[Tuple[_T, ...]]: ...
+@overload
+def zip_offset(
+ __iter1: Iterable[_T1],
+ *,
+ offsets: _SizedIterable[int],
+ longest: bool = ...,
+ fillvalue: None = None
+) -> Iterator[Tuple[Optional[_T1]]]: ...
+@overload
+def zip_offset(
+ __iter1: Iterable[_T1],
+ __iter2: Iterable[_T2],
+ *,
+ offsets: _SizedIterable[int],
+ longest: bool = ...,
+ fillvalue: None = None
+) -> Iterator[Tuple[Optional[_T1], Optional[_T2]]]: ...
+@overload
+def zip_offset(
+ __iter1: Iterable[_T],
+ __iter2: Iterable[_T],
+ __iter3: Iterable[_T],
+ *iterables: Iterable[_T],
+ offsets: _SizedIterable[int],
+ longest: bool = ...,
+ fillvalue: None = None
+) -> Iterator[Tuple[Optional[_T], ...]]: ...
+@overload
+def zip_offset(
+ __iter1: Iterable[_T1],
+ *,
+ offsets: _SizedIterable[int],
+ longest: bool = ...,
+ fillvalue: _U,
+) -> Iterator[Tuple[Union[_T1, _U]]]: ...
+@overload
+def zip_offset(
+ __iter1: Iterable[_T1],
+ __iter2: Iterable[_T2],
+ *,
+ offsets: _SizedIterable[int],
+ longest: bool = ...,
+ fillvalue: _U,
+) -> Iterator[Tuple[Union[_T1, _U], Union[_T2, _U]]]: ...
+@overload
+def zip_offset(
+ __iter1: Iterable[_T],
+ __iter2: Iterable[_T],
+ __iter3: Iterable[_T],
+ *iterables: Iterable[_T],
+ offsets: _SizedIterable[int],
+ longest: bool = ...,
+ fillvalue: _U,
+) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
+def sort_together(
+ iterables: Iterable[Iterable[_T]],
+ key_list: Iterable[int] = ...,
+ key: Optional[Callable[..., Any]] = ...,
+ reverse: bool = ...,
+) -> List[Tuple[_T, ...]]: ...
+def unzip(iterable: Iterable[Sequence[_T]]) -> Tuple[Iterator[_T], ...]: ...
+def divide(n: int, iterable: Iterable[_T]) -> List[Iterator[_T]]: ...
+def always_iterable(
+ obj: object,
+ base_type: Union[
+ type, Tuple[Union[type, Tuple[Any, ...]], ...], None
+ ] = ...,
+) -> Iterator[Any]: ...
+def adjacent(
+ predicate: Callable[[_T], bool],
+ iterable: Iterable[_T],
+ distance: int = ...,
+) -> Iterator[Tuple[bool, _T]]: ...
+@overload
+def groupby_transform(
+ iterable: Iterable[_T],
+ keyfunc: None = None,
+ valuefunc: None = None,
+ reducefunc: None = None,
+) -> Iterator[Tuple[_T, Iterator[_T]]]: ...
+@overload
+def groupby_transform(
+ iterable: Iterable[_T],
+ keyfunc: Callable[[_T], _U],
+ valuefunc: None,
+ reducefunc: None,
+) -> Iterator[Tuple[_U, Iterator[_T]]]: ...
+@overload
+def groupby_transform(
+ iterable: Iterable[_T],
+ keyfunc: None,
+ valuefunc: Callable[[_T], _V],
+ reducefunc: None,
+) -> Iterable[Tuple[_T, Iterable[_V]]]: ...
+@overload
+def groupby_transform(
+ iterable: Iterable[_T],
+ keyfunc: Callable[[_T], _U],
+ valuefunc: Callable[[_T], _V],
+ reducefunc: None,
+) -> Iterable[Tuple[_U, Iterator[_V]]]: ...
+@overload
+def groupby_transform(
+ iterable: Iterable[_T],
+ keyfunc: None,
+ valuefunc: None,
+ reducefunc: Callable[[Iterator[_T]], _W],
+) -> Iterable[Tuple[_T, _W]]: ...
+@overload
+def groupby_transform(
+ iterable: Iterable[_T],
+ keyfunc: Callable[[_T], _U],
+ valuefunc: None,
+ reducefunc: Callable[[Iterator[_T]], _W],
+) -> Iterable[Tuple[_U, _W]]: ...
+@overload
+def groupby_transform(
+ iterable: Iterable[_T],
+ keyfunc: None,
+ valuefunc: Callable[[_T], _V],
+ reducefunc: Callable[[Iterable[_V]], _W],
+) -> Iterable[Tuple[_T, _W]]: ...
+@overload
+def groupby_transform(
+ iterable: Iterable[_T],
+ keyfunc: Callable[[_T], _U],
+ valuefunc: Callable[[_T], _V],
+ reducefunc: Callable[[Iterable[_V]], _W],
+) -> Iterable[Tuple[_U, _W]]: ...
+
+class numeric_range(Generic[_T, _U], Sequence[_T], Hashable, Reversible[_T]):
+ @overload
+ def __init__(self, __stop: _T) -> None: ...
+ @overload
+ def __init__(self, __start: _T, __stop: _T) -> None: ...
+ @overload
+ def __init__(self, __start: _T, __stop: _T, __step: _U) -> None: ...
+ def __bool__(self) -> bool: ...
+ def __contains__(self, elem: object) -> bool: ...
+ def __eq__(self, other: object) -> bool: ...
+ @overload
+ def __getitem__(self, key: int) -> _T: ...
+ @overload
+ def __getitem__(self, key: slice) -> numeric_range[_T, _U]: ...
+ def __hash__(self) -> int: ...
+ def __iter__(self) -> Iterator[_T]: ...
+ def __len__(self) -> int: ...
+ def __reduce__(
+ self,
+ ) -> Tuple[Type[numeric_range[_T, _U]], Tuple[_T, _T, _U]]: ...
+ def __repr__(self) -> str: ...
+ def __reversed__(self) -> Iterator[_T]: ...
+ def count(self, value: _T) -> int: ...
+ def index(self, value: _T) -> int: ... # type: ignore
+
+def count_cycle(
+ iterable: Iterable[_T], n: Optional[int] = ...
+) -> Iterable[Tuple[int, _T]]: ...
+def mark_ends(
+ iterable: Iterable[_T],
+) -> Iterable[Tuple[bool, bool, _T]]: ...
+def locate(
+ iterable: Iterable[object],
+ pred: Callable[..., Any] = ...,
+ window_size: Optional[int] = ...,
+) -> Iterator[int]: ...
+def lstrip(
+ iterable: Iterable[_T], pred: Callable[[_T], object]
+) -> Iterator[_T]: ...
+def rstrip(
+ iterable: Iterable[_T], pred: Callable[[_T], object]
+) -> Iterator[_T]: ...
+def strip(
+ iterable: Iterable[_T], pred: Callable[[_T], object]
+) -> Iterator[_T]: ...
+
+class islice_extended(Generic[_T], Iterator[_T]):
+ def __init__(
+ self, iterable: Iterable[_T], *args: Optional[int]
+ ) -> None: ...
+ def __iter__(self) -> islice_extended[_T]: ...
+ def __next__(self) -> _T: ...
+ def __getitem__(self, index: slice) -> islice_extended[_T]: ...
+
+def always_reversible(iterable: Iterable[_T]) -> Iterator[_T]: ...
+def consecutive_groups(
+ iterable: Iterable[_T], ordering: Callable[[_T], int] = ...
+) -> Iterator[Iterator[_T]]: ...
+@overload
+def difference(
+ iterable: Iterable[_T],
+ func: Callable[[_T, _T], _U] = ...,
+ *,
+ initial: None = ...
+) -> Iterator[Union[_T, _U]]: ...
+@overload
+def difference(
+ iterable: Iterable[_T], func: Callable[[_T, _T], _U] = ..., *, initial: _U
+) -> Iterator[_U]: ...
+
+class SequenceView(Generic[_T], Sequence[_T]):
+ def __init__(self, target: Sequence[_T]) -> None: ...
+ @overload
+ def __getitem__(self, index: int) -> _T: ...
+ @overload
+ def __getitem__(self, index: slice) -> Sequence[_T]: ...
+ def __len__(self) -> int: ...
+
+class seekable(Generic[_T], Iterator[_T]):
+ def __init__(
+ self, iterable: Iterable[_T], maxlen: Optional[int] = ...
+ ) -> None: ...
+ def __iter__(self) -> seekable[_T]: ...
+ def __next__(self) -> _T: ...
+ def __bool__(self) -> bool: ...
+ @overload
+ def peek(self) -> _T: ...
+ @overload
+ def peek(self, default: _U) -> Union[_T, _U]: ...
+ def elements(self) -> SequenceView[_T]: ...
+ def seek(self, index: int) -> None: ...
+
+class run_length:
+ @staticmethod
+ def encode(iterable: Iterable[_T]) -> Iterator[Tuple[_T, int]]: ...
+ @staticmethod
+ def decode(iterable: Iterable[Tuple[_T, int]]) -> Iterator[_T]: ...
+
+def exactly_n(
+ iterable: Iterable[_T], n: int, predicate: Callable[[_T], object] = ...
+) -> bool: ...
+def circular_shifts(iterable: Iterable[_T]) -> List[Tuple[_T, ...]]: ...
+def make_decorator(
+ wrapping_func: Callable[..., _U], result_index: int = ...
+) -> Callable[..., Callable[[Callable[..., Any]], Callable[..., _U]]]: ...
+@overload
+def map_reduce(
+ iterable: Iterable[_T],
+ keyfunc: Callable[[_T], _U],
+ valuefunc: None = ...,
+ reducefunc: None = ...,
+) -> Dict[_U, List[_T]]: ...
+@overload
+def map_reduce(
+ iterable: Iterable[_T],
+ keyfunc: Callable[[_T], _U],
+ valuefunc: Callable[[_T], _V],
+ reducefunc: None = ...,
+) -> Dict[_U, List[_V]]: ...
+@overload
+def map_reduce(
+ iterable: Iterable[_T],
+ keyfunc: Callable[[_T], _U],
+ valuefunc: None = ...,
+ reducefunc: Callable[[List[_T]], _W] = ...,
+) -> Dict[_U, _W]: ...
+@overload
+def map_reduce(
+ iterable: Iterable[_T],
+ keyfunc: Callable[[_T], _U],
+ valuefunc: Callable[[_T], _V],
+ reducefunc: Callable[[List[_V]], _W],
+) -> Dict[_U, _W]: ...
+def rlocate(
+ iterable: Iterable[_T],
+ pred: Callable[..., object] = ...,
+ window_size: Optional[int] = ...,
+) -> Iterator[int]: ...
+def replace(
+ iterable: Iterable[_T],
+ pred: Callable[..., object],
+ substitutes: Iterable[_U],
+ count: Optional[int] = ...,
+ window_size: int = ...,
+) -> Iterator[Union[_T, _U]]: ...
+def partitions(iterable: Iterable[_T]) -> Iterator[List[List[_T]]]: ...
+def set_partitions(
+ iterable: Iterable[_T], k: Optional[int] = ...
+) -> Iterator[List[List[_T]]]: ...
+
+class time_limited(Generic[_T], Iterator[_T]):
+ def __init__(
+ self, limit_seconds: float, iterable: Iterable[_T]
+ ) -> None: ...
+ def __iter__(self) -> islice_extended[_T]: ...
+ def __next__(self) -> _T: ...
+
+@overload
+def only(
+ iterable: Iterable[_T], *, too_long: Optional[_Raisable] = ...
+) -> Optional[_T]: ...
+@overload
+def only(
+ iterable: Iterable[_T], default: _U, too_long: Optional[_Raisable] = ...
+) -> Union[_T, _U]: ...
+def ichunked(iterable: Iterable[_T], n: int) -> Iterator[Iterator[_T]]: ...
+def distinct_combinations(
+ iterable: Iterable[_T], r: int
+) -> Iterator[Tuple[_T, ...]]: ...
+def filter_except(
+ validator: Callable[[Any], object],
+ iterable: Iterable[_T],
+ *exceptions: Type[BaseException]
+) -> Iterator[_T]: ...
+def map_except(
+ function: Callable[[Any], _U],
+ iterable: Iterable[_T],
+ *exceptions: Type[BaseException]
+) -> Iterator[_U]: ...
+def map_if(
+ iterable: Iterable[Any],
+ pred: Callable[[Any], bool],
+ func: Callable[[Any], Any],
+ func_else: Optional[Callable[[Any], Any]] = ...,
+) -> Iterator[Any]: ...
+def sample(
+ iterable: Iterable[_T],
+ k: int,
+ weights: Optional[Iterable[float]] = ...,
+) -> List[_T]: ...
+def is_sorted(
+ iterable: Iterable[_T],
+ key: Optional[Callable[[_T], _U]] = ...,
+ reverse: bool = False,
+ strict: bool = False,
+) -> bool: ...
+
+class AbortThread(BaseException):
+ pass
+
+class callback_iter(Generic[_T], Iterator[_T]):
+ def __init__(
+ self,
+ func: Callable[..., Any],
+ callback_kwd: str = ...,
+ wait_seconds: float = ...,
+ ) -> None: ...
+ def __enter__(self) -> callback_iter[_T]: ...
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_value: Optional[BaseException],
+ traceback: Optional[TracebackType],
+ ) -> Optional[bool]: ...
+ def __iter__(self) -> callback_iter[_T]: ...
+ def __next__(self) -> _T: ...
+ def _reader(self) -> Iterator[_T]: ...
+ @property
+ def done(self) -> bool: ...
+ @property
+ def result(self) -> Any: ...
+
+def windowed_complete(
+ iterable: Iterable[_T], n: int
+) -> Iterator[Tuple[_T, ...]]: ...
+def all_unique(
+ iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ...
+) -> bool: ...
+def nth_product(index: int, *args: Iterable[_T]) -> Tuple[_T, ...]: ...
+def nth_permutation(
+ iterable: Iterable[_T], r: int, index: int
+) -> Tuple[_T, ...]: ...
+def value_chain(*args: Union[_T, Iterable[_T]]) -> Iterable[_T]: ...
+def product_index(element: Iterable[_T], *args: Iterable[_T]) -> int: ...
+def combination_index(
+ element: Iterable[_T], iterable: Iterable[_T]
+) -> int: ...
+def permutation_index(
+ element: Iterable[_T], iterable: Iterable[_T]
+) -> int: ...
+def repeat_each(iterable: Iterable[_T], n: int = ...) -> Iterator[_T]: ...
+
+class countable(Generic[_T], Iterator[_T]):
+ def __init__(self, iterable: Iterable[_T]) -> None: ...
+ def __iter__(self) -> countable[_T]: ...
+ def __next__(self) -> _T: ...
+
+def chunked_even(iterable: Iterable[_T], n: int) -> Iterator[List[_T]]: ...
+def zip_broadcast(
+ *objects: Union[_T, Iterable[_T]],
+ scalar_types: Union[
+ type, Tuple[Union[type, Tuple[Any, ...]], ...], None
+ ] = ...,
+ strict: bool = ...
+) -> Iterable[Tuple[_T, ...]]: ...
+def unique_in_window(
+ iterable: Iterable[_T], n: int, key: Optional[Callable[[_T], _U]] = ...
+) -> Iterator[_T]: ...
+def duplicates_everseen(
+ iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ...
+) -> Iterator[_T]: ...
+def duplicates_justseen(
+ iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ...
+) -> Iterator[_T]: ...
+
+class _SupportsLessThan(Protocol):
+ def __lt__(self, __other: Any) -> bool: ...
+
+_SupportsLessThanT = TypeVar("_SupportsLessThanT", bound=_SupportsLessThan)
+
+@overload
+def minmax(
+ iterable_or_value: Iterable[_SupportsLessThanT], *, key: None = None
+) -> Tuple[_SupportsLessThanT, _SupportsLessThanT]: ...
+@overload
+def minmax(
+ iterable_or_value: Iterable[_T], *, key: Callable[[_T], _SupportsLessThan]
+) -> Tuple[_T, _T]: ...
+@overload
+def minmax(
+ iterable_or_value: Iterable[_SupportsLessThanT],
+ *,
+ key: None = None,
+ default: _U
+) -> Union[_U, Tuple[_SupportsLessThanT, _SupportsLessThanT]]: ...
+@overload
+def minmax(
+ iterable_or_value: Iterable[_T],
+ *,
+ key: Callable[[_T], _SupportsLessThan],
+ default: _U,
+) -> Union[_U, Tuple[_T, _T]]: ...
+@overload
+def minmax(
+ iterable_or_value: _SupportsLessThanT,
+ __other: _SupportsLessThanT,
+ *others: _SupportsLessThanT
+) -> Tuple[_SupportsLessThanT, _SupportsLessThanT]: ...
+@overload
+def minmax(
+ iterable_or_value: _T,
+ __other: _T,
+ *others: _T,
+ key: Callable[[_T], _SupportsLessThan]
+) -> Tuple[_T, _T]: ...
diff --git a/monEnvTP/lib/python3.8/site-packages/more_itertools/py.typed b/monEnvTP/lib/python3.8/site-packages/more_itertools/py.typed
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/more_itertools/recipes.py b/monEnvTP/lib/python3.8/site-packages/more_itertools/recipes.py
new file mode 100644
index 0000000000000000000000000000000000000000..a2596423a4c3dbd15a357241477a0af0a531f9ec
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/more_itertools/recipes.py
@@ -0,0 +1,698 @@
+"""Imported from the recipes section of the itertools documentation.
+
+All functions taken from the recipes section of the itertools library docs
+[1]_.
+Some backward-compatible usability improvements have been made.
+
+.. [1] http://docs.python.org/library/itertools.html#recipes
+
+"""
+import warnings
+from collections import deque
+from itertools import (
+ chain,
+ combinations,
+ count,
+ cycle,
+ groupby,
+ islice,
+ repeat,
+ starmap,
+ tee,
+ zip_longest,
+)
+import operator
+from random import randrange, sample, choice
+
+__all__ = [
+ 'all_equal',
+ 'before_and_after',
+ 'consume',
+ 'convolve',
+ 'dotproduct',
+ 'first_true',
+ 'flatten',
+ 'grouper',
+ 'iter_except',
+ 'ncycles',
+ 'nth',
+ 'nth_combination',
+ 'padnone',
+ 'pad_none',
+ 'pairwise',
+ 'partition',
+ 'powerset',
+ 'prepend',
+ 'quantify',
+ 'random_combination_with_replacement',
+ 'random_combination',
+ 'random_permutation',
+ 'random_product',
+ 'repeatfunc',
+ 'roundrobin',
+ 'sliding_window',
+ 'tabulate',
+ 'tail',
+ 'take',
+ 'triplewise',
+ 'unique_everseen',
+ 'unique_justseen',
+]
+
+
+def take(n, iterable):
+ """Return first *n* items of the iterable as a list.
+
+ >>> take(3, range(10))
+ [0, 1, 2]
+
+ If there are fewer than *n* items in the iterable, all of them are
+ returned.
+
+ >>> take(10, range(3))
+ [0, 1, 2]
+
+ """
+ return list(islice(iterable, n))
+
+
+def tabulate(function, start=0):
+ """Return an iterator over the results of ``func(start)``,
+ ``func(start + 1)``, ``func(start + 2)``...
+
+ *func* should be a function that accepts one integer argument.
+
+ If *start* is not specified it defaults to 0. It will be incremented each
+ time the iterator is advanced.
+
+ >>> square = lambda x: x ** 2
+ >>> iterator = tabulate(square, -3)
+ >>> take(4, iterator)
+ [9, 4, 1, 0]
+
+ """
+ return map(function, count(start))
+
+
+def tail(n, iterable):
+ """Return an iterator over the last *n* items of *iterable*.
+
+ >>> t = tail(3, 'ABCDEFG')
+ >>> list(t)
+ ['E', 'F', 'G']
+
+ """
+ return iter(deque(iterable, maxlen=n))
+
+
+def consume(iterator, n=None):
+ """Advance *iterable* by *n* steps. If *n* is ``None``, consume it
+ entirely.
+
+ Efficiently exhausts an iterator without returning values. Defaults to
+ consuming the whole iterator, but an optional second argument may be
+ provided to limit consumption.
+
+ >>> i = (x for x in range(10))
+ >>> next(i)
+ 0
+ >>> consume(i, 3)
+ >>> next(i)
+ 4
+ >>> consume(i)
+ >>> next(i)
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ StopIteration
+
+ If the iterator has fewer items remaining than the provided limit, the
+ whole iterator will be consumed.
+
+ >>> i = (x for x in range(3))
+ >>> consume(i, 5)
+ >>> next(i)
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ StopIteration
+
+ """
+ # Use functions that consume iterators at C speed.
+ if n is None:
+ # feed the entire iterator into a zero-length deque
+ deque(iterator, maxlen=0)
+ else:
+ # advance to the empty slice starting at position n
+ next(islice(iterator, n, n), None)
+
+
+def nth(iterable, n, default=None):
+ """Returns the nth item or a default value.
+
+ >>> l = range(10)
+ >>> nth(l, 3)
+ 3
+ >>> nth(l, 20, "zebra")
+ 'zebra'
+
+ """
+ return next(islice(iterable, n, None), default)
+
+
+def all_equal(iterable):
+ """
+ Returns ``True`` if all the elements are equal to each other.
+
+ >>> all_equal('aaaa')
+ True
+ >>> all_equal('aaab')
+ False
+
+ """
+ g = groupby(iterable)
+ return next(g, True) and not next(g, False)
+
+
+def quantify(iterable, pred=bool):
+ """Return the how many times the predicate is true.
+
+ >>> quantify([True, False, True])
+ 2
+
+ """
+ return sum(map(pred, iterable))
+
+
+def pad_none(iterable):
+ """Returns the sequence of elements and then returns ``None`` indefinitely.
+
+ >>> take(5, pad_none(range(3)))
+ [0, 1, 2, None, None]
+
+ Useful for emulating the behavior of the built-in :func:`map` function.
+
+ See also :func:`padded`.
+
+ """
+ return chain(iterable, repeat(None))
+
+
+padnone = pad_none
+
+
+def ncycles(iterable, n):
+ """Returns the sequence elements *n* times
+
+ >>> list(ncycles(["a", "b"], 3))
+ ['a', 'b', 'a', 'b', 'a', 'b']
+
+ """
+ return chain.from_iterable(repeat(tuple(iterable), n))
+
+
+def dotproduct(vec1, vec2):
+ """Returns the dot product of the two iterables.
+
+ >>> dotproduct([10, 10], [20, 20])
+ 400
+
+ """
+ return sum(map(operator.mul, vec1, vec2))
+
+
+def flatten(listOfLists):
+ """Return an iterator flattening one level of nesting in a list of lists.
+
+ >>> list(flatten([[0, 1], [2, 3]]))
+ [0, 1, 2, 3]
+
+ See also :func:`collapse`, which can flatten multiple levels of nesting.
+
+ """
+ return chain.from_iterable(listOfLists)
+
+
+def repeatfunc(func, times=None, *args):
+ """Call *func* with *args* repeatedly, returning an iterable over the
+ results.
+
+ If *times* is specified, the iterable will terminate after that many
+ repetitions:
+
+ >>> from operator import add
+ >>> times = 4
+ >>> args = 3, 5
+ >>> list(repeatfunc(add, times, *args))
+ [8, 8, 8, 8]
+
+ If *times* is ``None`` the iterable will not terminate:
+
+ >>> from random import randrange
+ >>> times = None
+ >>> args = 1, 11
+ >>> take(6, repeatfunc(randrange, times, *args)) # doctest:+SKIP
+ [2, 4, 8, 1, 8, 4]
+
+ """
+ if times is None:
+ return starmap(func, repeat(args))
+ return starmap(func, repeat(args, times))
+
+
+def _pairwise(iterable):
+ """Returns an iterator of paired items, overlapping, from the original
+
+ >>> take(4, pairwise(count()))
+ [(0, 1), (1, 2), (2, 3), (3, 4)]
+
+ On Python 3.10 and above, this is an alias for :func:`itertools.pairwise`.
+
+ """
+ a, b = tee(iterable)
+ next(b, None)
+ yield from zip(a, b)
+
+
+try:
+ from itertools import pairwise as itertools_pairwise
+except ImportError:
+ pairwise = _pairwise
+else:
+
+ def pairwise(iterable):
+ yield from itertools_pairwise(iterable)
+
+ pairwise.__doc__ = _pairwise.__doc__
+
+
+def grouper(iterable, n, fillvalue=None):
+ """Collect data into fixed-length chunks or blocks.
+
+ >>> list(grouper('ABCDEFG', 3, 'x'))
+ [('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')]
+
+ """
+ if isinstance(iterable, int):
+ warnings.warn(
+ "grouper expects iterable as first parameter", DeprecationWarning
+ )
+ n, iterable = iterable, n
+ args = [iter(iterable)] * n
+ return zip_longest(fillvalue=fillvalue, *args)
+
+
+def roundrobin(*iterables):
+ """Yields an item from each iterable, alternating between them.
+
+ >>> list(roundrobin('ABC', 'D', 'EF'))
+ ['A', 'D', 'E', 'B', 'F', 'C']
+
+ This function produces the same output as :func:`interleave_longest`, but
+ may perform better for some inputs (in particular when the number of
+ iterables is small).
+
+ """
+ # Recipe credited to George Sakkis
+ pending = len(iterables)
+ nexts = cycle(iter(it).__next__ for it in iterables)
+ while pending:
+ try:
+ for next in nexts:
+ yield next()
+ except StopIteration:
+ pending -= 1
+ nexts = cycle(islice(nexts, pending))
+
+
+def partition(pred, iterable):
+ """
+ Returns a 2-tuple of iterables derived from the input iterable.
+ The first yields the items that have ``pred(item) == False``.
+ The second yields the items that have ``pred(item) == True``.
+
+ >>> is_odd = lambda x: x % 2 != 0
+ >>> iterable = range(10)
+ >>> even_items, odd_items = partition(is_odd, iterable)
+ >>> list(even_items), list(odd_items)
+ ([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
+
+ If *pred* is None, :func:`bool` is used.
+
+ >>> iterable = [0, 1, False, True, '', ' ']
+ >>> false_items, true_items = partition(None, iterable)
+ >>> list(false_items), list(true_items)
+ ([0, False, ''], [1, True, ' '])
+
+ """
+ if pred is None:
+ pred = bool
+
+ evaluations = ((pred(x), x) for x in iterable)
+ t1, t2 = tee(evaluations)
+ return (
+ (x for (cond, x) in t1 if not cond),
+ (x for (cond, x) in t2 if cond),
+ )
+
+
+def powerset(iterable):
+ """Yields all possible subsets of the iterable.
+
+ >>> list(powerset([1, 2, 3]))
+ [(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
+
+ :func:`powerset` will operate on iterables that aren't :class:`set`
+ instances, so repeated elements in the input will produce repeated elements
+ in the output. Use :func:`unique_everseen` on the input to avoid generating
+ duplicates:
+
+ >>> seq = [1, 1, 0]
+ >>> list(powerset(seq))
+ [(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)]
+ >>> from more_itertools import unique_everseen
+ >>> list(powerset(unique_everseen(seq)))
+ [(), (1,), (0,), (1, 0)]
+
+ """
+ s = list(iterable)
+ return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1))
+
+
+def unique_everseen(iterable, key=None):
+ """
+ Yield unique elements, preserving order.
+
+ >>> list(unique_everseen('AAAABBBCCDAABBB'))
+ ['A', 'B', 'C', 'D']
+ >>> list(unique_everseen('ABBCcAD', str.lower))
+ ['A', 'B', 'C', 'D']
+
+ Sequences with a mix of hashable and unhashable items can be used.
+ The function will be slower (i.e., `O(n^2)`) for unhashable items.
+
+ Remember that ``list`` objects are unhashable - you can use the *key*
+ parameter to transform the list to a tuple (which is hashable) to
+ avoid a slowdown.
+
+ >>> iterable = ([1, 2], [2, 3], [1, 2])
+ >>> list(unique_everseen(iterable)) # Slow
+ [[1, 2], [2, 3]]
+ >>> list(unique_everseen(iterable, key=tuple)) # Faster
+ [[1, 2], [2, 3]]
+
+ Similary, you may want to convert unhashable ``set`` objects with
+ ``key=frozenset``. For ``dict`` objects,
+ ``key=lambda x: frozenset(x.items())`` can be used.
+
+ """
+ seenset = set()
+ seenset_add = seenset.add
+ seenlist = []
+ seenlist_add = seenlist.append
+ use_key = key is not None
+
+ for element in iterable:
+ k = key(element) if use_key else element
+ try:
+ if k not in seenset:
+ seenset_add(k)
+ yield element
+ except TypeError:
+ if k not in seenlist:
+ seenlist_add(k)
+ yield element
+
+
+def unique_justseen(iterable, key=None):
+ """Yields elements in order, ignoring serial duplicates
+
+ >>> list(unique_justseen('AAAABBBCCDAABBB'))
+ ['A', 'B', 'C', 'D', 'A', 'B']
+ >>> list(unique_justseen('ABBCcAD', str.lower))
+ ['A', 'B', 'C', 'A', 'D']
+
+ """
+ return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
+
+
+def iter_except(func, exception, first=None):
+ """Yields results from a function repeatedly until an exception is raised.
+
+ Converts a call-until-exception interface to an iterator interface.
+ Like ``iter(func, sentinel)``, but uses an exception instead of a sentinel
+ to end the loop.
+
+ >>> l = [0, 1, 2]
+ >>> list(iter_except(l.pop, IndexError))
+ [2, 1, 0]
+
+ Multiple exceptions can be specified as a stopping condition:
+
+ >>> l = [1, 2, 3, '...', 4, 5, 6]
+ >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
+ [7, 6, 5]
+ >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
+ [4, 3, 2]
+ >>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
+ []
+
+ """
+ try:
+ if first is not None:
+ yield first()
+ while 1:
+ yield func()
+ except exception:
+ pass
+
+
+def first_true(iterable, default=None, pred=None):
+ """
+ Returns the first true value in the iterable.
+
+ If no true value is found, returns *default*
+
+ If *pred* is not None, returns the first item for which
+ ``pred(item) == True`` .
+
+ >>> first_true(range(10))
+ 1
+ >>> first_true(range(10), pred=lambda x: x > 5)
+ 6
+ >>> first_true(range(10), default='missing', pred=lambda x: x > 9)
+ 'missing'
+
+ """
+ return next(filter(pred, iterable), default)
+
+
+def random_product(*args, repeat=1):
+ """Draw an item at random from each of the input iterables.
+
+ >>> random_product('abc', range(4), 'XYZ') # doctest:+SKIP
+ ('c', 3, 'Z')
+
+ If *repeat* is provided as a keyword argument, that many items will be
+ drawn from each iterable.
+
+ >>> random_product('abcd', range(4), repeat=2) # doctest:+SKIP
+ ('a', 2, 'd', 3)
+
+ This equivalent to taking a random selection from
+ ``itertools.product(*args, **kwarg)``.
+
+ """
+ pools = [tuple(pool) for pool in args] * repeat
+ return tuple(choice(pool) for pool in pools)
+
+
+def random_permutation(iterable, r=None):
+ """Return a random *r* length permutation of the elements in *iterable*.
+
+ If *r* is not specified or is ``None``, then *r* defaults to the length of
+ *iterable*.
+
+ >>> random_permutation(range(5)) # doctest:+SKIP
+ (3, 4, 0, 1, 2)
+
+ This equivalent to taking a random selection from
+ ``itertools.permutations(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ r = len(pool) if r is None else r
+ return tuple(sample(pool, r))
+
+
+def random_combination(iterable, r):
+ """Return a random *r* length subsequence of the elements in *iterable*.
+
+ >>> random_combination(range(5), 3) # doctest:+SKIP
+ (2, 3, 4)
+
+ This equivalent to taking a random selection from
+ ``itertools.combinations(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ indices = sorted(sample(range(n), r))
+ return tuple(pool[i] for i in indices)
+
+
+def random_combination_with_replacement(iterable, r):
+ """Return a random *r* length subsequence of elements in *iterable*,
+ allowing individual elements to be repeated.
+
+ >>> random_combination_with_replacement(range(3), 5) # doctest:+SKIP
+ (0, 0, 1, 2, 2)
+
+ This equivalent to taking a random selection from
+ ``itertools.combinations_with_replacement(iterable, r)``.
+
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ indices = sorted(randrange(n) for i in range(r))
+ return tuple(pool[i] for i in indices)
+
+
+def nth_combination(iterable, r, index):
+ """Equivalent to ``list(combinations(iterable, r))[index]``.
+
+ The subsequences of *iterable* that are of length *r* can be ordered
+ lexicographically. :func:`nth_combination` computes the subsequence at
+ sort position *index* directly, without computing the previous
+ subsequences.
+
+ >>> nth_combination(range(5), 3, 5)
+ (0, 3, 4)
+
+ ``ValueError`` will be raised If *r* is negative or greater than the length
+ of *iterable*.
+ ``IndexError`` will be raised if the given *index* is invalid.
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ if (r < 0) or (r > n):
+ raise ValueError
+
+ c = 1
+ k = min(r, n - r)
+ for i in range(1, k + 1):
+ c = c * (n - k + i) // i
+
+ if index < 0:
+ index += c
+
+ if (index < 0) or (index >= c):
+ raise IndexError
+
+ result = []
+ while r:
+ c, n, r = c * r // n, n - 1, r - 1
+ while index >= c:
+ index -= c
+ c, n = c * (n - r) // n, n - 1
+ result.append(pool[-1 - n])
+
+ return tuple(result)
+
+
+def prepend(value, iterator):
+ """Yield *value*, followed by the elements in *iterator*.
+
+ >>> value = '0'
+ >>> iterator = ['1', '2', '3']
+ >>> list(prepend(value, iterator))
+ ['0', '1', '2', '3']
+
+ To prepend multiple values, see :func:`itertools.chain`
+ or :func:`value_chain`.
+
+ """
+ return chain([value], iterator)
+
+
+def convolve(signal, kernel):
+ """Convolve the iterable *signal* with the iterable *kernel*.
+
+ >>> signal = (1, 2, 3, 4, 5)
+ >>> kernel = [3, 2, 1]
+ >>> list(convolve(signal, kernel))
+ [3, 8, 14, 20, 26, 14, 5]
+
+ Note: the input arguments are not interchangeable, as the *kernel*
+ is immediately consumed and stored.
+
+ """
+ kernel = tuple(kernel)[::-1]
+ n = len(kernel)
+ window = deque([0], maxlen=n) * n
+ for x in chain(signal, repeat(0, n - 1)):
+ window.append(x)
+ yield sum(map(operator.mul, kernel, window))
+
+
+def before_and_after(predicate, it):
+ """A variant of :func:`takewhile` that allows complete access to the
+ remainder of the iterator.
+
+ >>> it = iter('ABCdEfGhI')
+ >>> all_upper, remainder = before_and_after(str.isupper, it)
+ >>> ''.join(all_upper)
+ 'ABC'
+ >>> ''.join(remainder) # takewhile() would lose the 'd'
+ 'dEfGhI'
+
+ Note that the first iterator must be fully consumed before the second
+ iterator can generate valid results.
+ """
+ it = iter(it)
+ transition = []
+
+ def true_iterator():
+ for elem in it:
+ if predicate(elem):
+ yield elem
+ else:
+ transition.append(elem)
+ return
+
+ def remainder_iterator():
+ yield from transition
+ yield from it
+
+ return true_iterator(), remainder_iterator()
+
+
+def triplewise(iterable):
+ """Return overlapping triplets from *iterable*.
+
+ >>> list(triplewise('ABCDE'))
+ [('A', 'B', 'C'), ('B', 'C', 'D'), ('C', 'D', 'E')]
+
+ """
+ for (a, _), (b, c) in pairwise(pairwise(iterable)):
+ yield a, b, c
+
+
+def sliding_window(iterable, n):
+ """Return a sliding window of width *n* over *iterable*.
+
+ >>> list(sliding_window(range(6), 4))
+ [(0, 1, 2, 3), (1, 2, 3, 4), (2, 3, 4, 5)]
+
+ If *iterable* has fewer than *n* items, then nothing is yielded:
+
+ >>> list(sliding_window(range(3), 4))
+ []
+
+ For a variant with more features, see :func:`windowed`.
+ """
+ it = iter(iterable)
+ window = deque(islice(it, n), maxlen=n)
+ if len(window) == n:
+ yield tuple(window)
+ for x in it:
+ window.append(x)
+ yield tuple(window)
diff --git a/monEnvTP/lib/python3.8/site-packages/more_itertools/recipes.pyi b/monEnvTP/lib/python3.8/site-packages/more_itertools/recipes.pyi
new file mode 100644
index 0000000000000000000000000000000000000000..4648a41b5e51155d4db10cd5e221e8a7a9449201
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/more_itertools/recipes.pyi
@@ -0,0 +1,112 @@
+"""Stubs for more_itertools.recipes"""
+from typing import (
+ Any,
+ Callable,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Tuple,
+ TypeVar,
+ Union,
+)
+from typing_extensions import overload, Type
+
+# Type and type variable definitions
+_T = TypeVar('_T')
+_U = TypeVar('_U')
+
+def take(n: int, iterable: Iterable[_T]) -> List[_T]: ...
+def tabulate(
+ function: Callable[[int], _T], start: int = ...
+) -> Iterator[_T]: ...
+def tail(n: int, iterable: Iterable[_T]) -> Iterator[_T]: ...
+def consume(iterator: Iterable[object], n: Optional[int] = ...) -> None: ...
+@overload
+def nth(iterable: Iterable[_T], n: int) -> Optional[_T]: ...
+@overload
+def nth(iterable: Iterable[_T], n: int, default: _U) -> Union[_T, _U]: ...
+def all_equal(iterable: Iterable[object]) -> bool: ...
+def quantify(
+ iterable: Iterable[_T], pred: Callable[[_T], bool] = ...
+) -> int: ...
+def pad_none(iterable: Iterable[_T]) -> Iterator[Optional[_T]]: ...
+def padnone(iterable: Iterable[_T]) -> Iterator[Optional[_T]]: ...
+def ncycles(iterable: Iterable[_T], n: int) -> Iterator[_T]: ...
+def dotproduct(vec1: Iterable[object], vec2: Iterable[object]) -> object: ...
+def flatten(listOfLists: Iterable[Iterable[_T]]) -> Iterator[_T]: ...
+def repeatfunc(
+ func: Callable[..., _U], times: Optional[int] = ..., *args: Any
+) -> Iterator[_U]: ...
+def pairwise(iterable: Iterable[_T]) -> Iterator[Tuple[_T, _T]]: ...
+@overload
+def grouper(
+ iterable: Iterable[_T], n: int
+) -> Iterator[Tuple[Optional[_T], ...]]: ...
+@overload
+def grouper(
+ iterable: Iterable[_T], n: int, fillvalue: _U
+) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
+@overload
+def grouper( # Deprecated interface
+ iterable: int, n: Iterable[_T]
+) -> Iterator[Tuple[Optional[_T], ...]]: ...
+@overload
+def grouper( # Deprecated interface
+ iterable: int, n: Iterable[_T], fillvalue: _U
+) -> Iterator[Tuple[Union[_T, _U], ...]]: ...
+def roundrobin(*iterables: Iterable[_T]) -> Iterator[_T]: ...
+def partition(
+ pred: Optional[Callable[[_T], object]], iterable: Iterable[_T]
+) -> Tuple[Iterator[_T], Iterator[_T]]: ...
+def powerset(iterable: Iterable[_T]) -> Iterator[Tuple[_T, ...]]: ...
+def unique_everseen(
+ iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = ...
+) -> Iterator[_T]: ...
+def unique_justseen(
+ iterable: Iterable[_T], key: Optional[Callable[[_T], object]] = ...
+) -> Iterator[_T]: ...
+@overload
+def iter_except(
+ func: Callable[[], _T],
+ exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
+ first: None = ...,
+) -> Iterator[_T]: ...
+@overload
+def iter_except(
+ func: Callable[[], _T],
+ exception: Union[Type[BaseException], Tuple[Type[BaseException], ...]],
+ first: Callable[[], _U],
+) -> Iterator[Union[_T, _U]]: ...
+@overload
+def first_true(
+ iterable: Iterable[_T], *, pred: Optional[Callable[[_T], object]] = ...
+) -> Optional[_T]: ...
+@overload
+def first_true(
+ iterable: Iterable[_T],
+ default: _U,
+ pred: Optional[Callable[[_T], object]] = ...,
+) -> Union[_T, _U]: ...
+def random_product(
+ *args: Iterable[_T], repeat: int = ...
+) -> Tuple[_T, ...]: ...
+def random_permutation(
+ iterable: Iterable[_T], r: Optional[int] = ...
+) -> Tuple[_T, ...]: ...
+def random_combination(iterable: Iterable[_T], r: int) -> Tuple[_T, ...]: ...
+def random_combination_with_replacement(
+ iterable: Iterable[_T], r: int
+) -> Tuple[_T, ...]: ...
+def nth_combination(
+ iterable: Iterable[_T], r: int, index: int
+) -> Tuple[_T, ...]: ...
+def prepend(value: _T, iterator: Iterable[_U]) -> Iterator[Union[_T, _U]]: ...
+def convolve(signal: Iterable[_T], kernel: Iterable[_T]) -> Iterator[_T]: ...
+def before_and_after(
+ predicate: Callable[[_T], bool], it: Iterable[_T]
+) -> Tuple[Iterator[_T], Iterator[_T]]: ...
+def triplewise(iterable: Iterable[_T]) -> Iterator[Tuple[_T, _T, _T]]: ...
+def sliding_window(
+ iterable: Iterable[_T], n: int
+) -> Iterator[Tuple[_T, ...]]: ...
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/__init__.py b/monEnvTP/lib/python3.8/site-packages/mysql/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..256cc56eeb947d1f967ea417e1126e3912d73c83
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__init__.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..192940be4e5c6d216a6c4b3dea6fe3e82e0a6896
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__init__.py
@@ -0,0 +1,301 @@
+# Copyright (c) 2009, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""
+MySQL Connector/Python - MySQL driver written in Python
+"""
+
+try:
+ import _mysql_connector # pylint: disable=F0401
+ from .connection_cext import CMySQLConnection
+except ImportError:
+ HAVE_CEXT = False
+else:
+ HAVE_CEXT = True
+
+try:
+ import dns.resolver
+ import dns.exception
+except ImportError:
+ HAVE_DNSPYTHON = False
+else:
+ HAVE_DNSPYTHON = True
+
+import random
+import warnings
+
+from . import version
+from .connection import MySQLConnection
+from .constants import DEFAULT_CONFIGURATION
+from .errors import ( # pylint: disable=W0622
+ Error, Warning, InterfaceError, DatabaseError,
+ NotSupportedError, DataError, IntegrityError, ProgrammingError,
+ OperationalError, InternalError, custom_error_exception, PoolError)
+from .constants import FieldFlag, FieldType, CharacterSet, \
+ RefreshOption, ClientFlag
+from .dbapi import (
+ Date, Time, Timestamp, Binary, DateFromTicks,
+ TimestampFromTicks, TimeFromTicks,
+ STRING, BINARY, NUMBER, DATETIME, ROWID,
+ apilevel, threadsafety, paramstyle)
+from .optionfiles import read_option_files
+
+
+_CONNECTION_POOLS = {}
+
+ERROR_NO_CEXT = "MySQL Connector/Python C Extension not available"
+
+
+def _get_pooled_connection(**kwargs):
+ """Return a pooled MySQL connection"""
+ # If no pool name specified, generate one
+ from .pooling import (
+ MySQLConnectionPool, generate_pool_name,
+ CONNECTION_POOL_LOCK)
+
+ try:
+ pool_name = kwargs['pool_name']
+ except KeyError:
+ pool_name = generate_pool_name(**kwargs)
+
+ if 'use_pure' in kwargs:
+ if not kwargs['use_pure'] and not HAVE_CEXT:
+ raise ImportError(ERROR_NO_CEXT)
+
+ # Setup the pool, ensuring only 1 thread can update at a time
+ with CONNECTION_POOL_LOCK:
+ if pool_name not in _CONNECTION_POOLS:
+ _CONNECTION_POOLS[pool_name] = MySQLConnectionPool(**kwargs)
+ elif isinstance(_CONNECTION_POOLS[pool_name], MySQLConnectionPool):
+ # pool_size must be the same
+ check_size = _CONNECTION_POOLS[pool_name].pool_size
+ if ('pool_size' in kwargs
+ and kwargs['pool_size'] != check_size):
+ raise PoolError("Size can not be changed "
+ "for active pools.")
+
+ # Return pooled connection
+ try:
+ return _CONNECTION_POOLS[pool_name].get_connection()
+ except AttributeError:
+ raise InterfaceError(
+ "Failed getting connection from pool '{0}'".format(pool_name))
+
+
+def _get_failover_connection(**kwargs):
+ """Return a MySQL connection and try to failover if needed
+
+ An InterfaceError is raise when no MySQL is available. ValueError is
+ raised when the failover server configuration contains an illegal
+ connection argument. Supported arguments are user, password, host, port,
+ unix_socket and database. ValueError is also raised when the failover
+ argument was not provided.
+
+ Returns MySQLConnection instance.
+ """
+ config = kwargs.copy()
+ try:
+ failover = config['failover']
+ except KeyError:
+ raise ValueError('failover argument not provided')
+ del config['failover']
+
+ support_cnx_args = set(
+ ['user', 'password', 'host', 'port', 'unix_socket',
+ 'database', 'pool_name', 'pool_size', 'priority'])
+
+ # First check if we can add all use the configuration
+ priority_count = 0
+ for server in failover:
+ diff = set(server.keys()) - support_cnx_args
+ if diff:
+ raise ValueError(
+ "Unsupported connection argument {0} in failover: {1}".format(
+ 's' if len(diff) > 1 else '',
+ ', '.join(diff)))
+ if hasattr(server, "priority"):
+ priority_count += 1
+
+ server["priority"] = server.get("priority", 100)
+ if server["priority"] < 0 or server["priority"] > 100:
+ raise InterfaceError(
+ "Priority value should be in the range of 0 to 100, "
+ "got : {}".format(server["priority"]))
+ if not isinstance(server["priority"], int):
+ raise InterfaceError(
+ "Priority value should be an integer in the range of 0 to "
+ "100, got : {}".format(server["priority"]))
+
+ if 0 < priority_count < len(failover):
+ raise ProgrammingError("You must either assign no priority to any "
+ "of the routers or give a priority for "
+ "every router")
+
+ failover.sort(key=lambda x: x['priority'], reverse=True)
+
+ server_directory = {}
+ server_priority_list = []
+ for server in failover:
+ if server["priority"] not in server_directory:
+ server_directory[server["priority"]] = [server]
+ server_priority_list.append(server["priority"])
+ else:
+ server_directory[server["priority"]].append(server)
+
+ for priority in server_priority_list:
+ failover_list = server_directory[priority]
+ for _ in range(len(failover_list)):
+ last = len(failover_list) - 1
+ index = random.randint(0, last)
+ server = failover_list.pop(index)
+ new_config = config.copy()
+ new_config.update(server)
+ new_config.pop('priority', None)
+ try:
+ return connect(**new_config)
+ except Error:
+ # If we failed to connect, we try the next server
+ pass
+
+ raise InterfaceError("Unable to connect to any of the target hosts")
+
+
+def connect(*args, **kwargs):
+ """Create or get a MySQL connection object
+
+ In its simpliest form, Connect() will open a connection to a
+ MySQL server and return a MySQLConnection object.
+
+ When any connection pooling arguments are given, for example pool_name
+ or pool_size, a pool is created or a previously one is used to return
+ a PooledMySQLConnection.
+
+ Returns MySQLConnection or PooledMySQLConnection.
+ """
+ # DNS SRV
+ dns_srv = kwargs.pop('dns_srv') if 'dns_srv' in kwargs else False
+
+ if not isinstance(dns_srv, bool):
+ raise InterfaceError("The value of 'dns-srv' must be a boolean")
+
+ if dns_srv:
+ if not HAVE_DNSPYTHON:
+ raise InterfaceError('MySQL host configuration requested DNS '
+ 'SRV. This requires the Python dnspython '
+ 'module. Please refer to documentation')
+ if 'unix_socket' in kwargs:
+ raise InterfaceError('Using Unix domain sockets with DNS SRV '
+ 'lookup is not allowed')
+ if 'port' in kwargs:
+ raise InterfaceError('Specifying a port number with DNS SRV '
+ 'lookup is not allowed')
+ if 'failover' in kwargs:
+ raise InterfaceError('Specifying multiple hostnames with DNS '
+ 'SRV look up is not allowed')
+ if 'host' not in kwargs:
+ kwargs['host'] = DEFAULT_CONFIGURATION['host']
+
+ try:
+ srv_records = dns.resolver.query(kwargs['host'], 'SRV')
+ except dns.exception.DNSException:
+ raise InterfaceError("Unable to locate any hosts for '{0}'"
+ "".format(kwargs['host']))
+
+ failover = []
+ for srv in srv_records:
+ failover.append({
+ 'host': srv.target.to_text(omit_final_dot=True),
+ 'port': srv.port,
+ 'priority': srv.priority,
+ 'weight': srv.weight
+ })
+
+ failover.sort(key=lambda x: (x['priority'], -x['weight']))
+ kwargs['failover'] = [{'host': srv['host'],
+ 'port': srv['port']} for srv in failover]
+
+ # Option files
+ if 'read_default_file' in kwargs:
+ kwargs['option_files'] = kwargs['read_default_file']
+ kwargs.pop('read_default_file')
+
+ if 'option_files' in kwargs:
+ new_config = read_option_files(**kwargs)
+ return connect(**new_config)
+
+ # Failover
+ if 'failover' in kwargs:
+ return _get_failover_connection(**kwargs)
+
+ # Pooled connections
+ try:
+ from .constants import CNX_POOL_ARGS
+ if any([key in kwargs for key in CNX_POOL_ARGS]):
+ return _get_pooled_connection(**kwargs)
+ except NameError:
+ # No pooling
+ pass
+
+ # Use C Extension by default
+ use_pure = kwargs.get('use_pure', False)
+ if 'use_pure' in kwargs:
+ del kwargs['use_pure'] # Remove 'use_pure' from kwargs
+ if not use_pure and not HAVE_CEXT:
+ raise ImportError(ERROR_NO_CEXT)
+
+ if HAVE_CEXT and not use_pure:
+ return CMySQLConnection(*args, **kwargs)
+ return MySQLConnection(*args, **kwargs)
+Connect = connect # pylint: disable=C0103
+
+__version_info__ = version.VERSION
+__version__ = version.VERSION_TEXT
+
+__all__ = [
+ 'MySQLConnection', 'Connect', 'custom_error_exception',
+
+ # Some useful constants
+ 'FieldType', 'FieldFlag', 'ClientFlag', 'CharacterSet', 'RefreshOption',
+ 'HAVE_CEXT',
+
+ # Error handling
+ 'Error', 'Warning',
+ 'InterfaceError', 'DatabaseError',
+ 'NotSupportedError', 'DataError', 'IntegrityError', 'ProgrammingError',
+ 'OperationalError', 'InternalError',
+
+ # DBAPI PEP 249 required exports
+ 'connect', 'apilevel', 'threadsafety', 'paramstyle',
+ 'Date', 'Time', 'Timestamp', 'Binary',
+ 'DateFromTicks', 'DateFromTicks', 'TimestampFromTicks', 'TimeFromTicks',
+ 'STRING', 'BINARY', 'NUMBER',
+ 'DATETIME', 'ROWID',
+
+ # C Extension
+ 'CMySQLConnection',
+ ]
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..06fbc77d288162f7766e26fc695e0ce164dfbb80
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/abstracts.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/abstracts.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ee2a29828190452252ff8cc3d6eec9de045fd8a9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/abstracts.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/authentication.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/authentication.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..61479d49d2fc8766f45366d5d5777a3ac93d9cf4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/authentication.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/charsets.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/charsets.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9ce42ca71e6ee66711532771203aba28f732ce63
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/charsets.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/connection.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/connection.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..dfc31cf2215071e1d07686dc72d8accd93080c08
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/connection.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/connection_cext.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/connection_cext.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cbcf6bd93e4fbfe27118c1fd62d704cc74de87f1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/connection_cext.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/constants.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/constants.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e9713344d24b00aff5ce9289ba785894c97ab27a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/constants.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/conversion.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/conversion.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2df878802f75e6af44f95f56a564ed0acf0af215
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/conversion.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/cursor.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/cursor.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9ce9d5b3672e885292049c44618b1aea9765758e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/cursor.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/cursor_cext.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/cursor_cext.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8d7f6828371d86fadf11dc3cb5b811ea1e29d0d5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/cursor_cext.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/custom_types.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/custom_types.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4f497846073d627e729c0276403c33b51514ce66
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/custom_types.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/dbapi.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/dbapi.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6a02b2baea9374ff7941fe311e4cdf6f644e9bc8
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/dbapi.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/errorcode.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/errorcode.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d3fdbb453b6543b8d6dd80f0cb4ffd821680849e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/errorcode.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/errors.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/errors.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0e4083f655708c946bfc4743907fcc3f94298c36
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/errors.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/network.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/network.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..639214c5633f409bab7e308a9cef71877c331d55
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/network.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/optionfiles.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/optionfiles.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e6b51b10d5738c89d3546220053a07510e44392e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/optionfiles.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/pooling.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/pooling.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fb702591c43e11bd36a7d2907034e798f23b0f62
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/pooling.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/protocol.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/protocol.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..155fa4ce39b755587181ec3057c0fadee66c28c2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/protocol.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/utils.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/utils.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3ea49f081ea9a5bcb030d8ec78385390e229d75c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/utils.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/version.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/version.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..dddb24ab8f1a397cfb6558283ff2dbec4c3548c3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/__pycache__/version.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/abstracts.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/abstracts.py
new file mode 100644
index 0000000000000000000000000000000000000000..7c1b5a39c27ce79a4f56da761e3d3713de90a955
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/abstracts.py
@@ -0,0 +1,1508 @@
+# Copyright (c) 2014, 2021, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Module gathering all abstract base classes"""
+
+from abc import ABCMeta, abstractmethod, abstractproperty
+from decimal import Decimal
+from time import sleep
+from datetime import date, datetime, time, timedelta
+import os
+import re
+import weakref
+TLS_V1_3_SUPPORTED = False
+try:
+ import ssl
+ if hasattr(ssl, "HAS_TLSv1_3") and ssl.HAS_TLSv1_3:
+ TLS_V1_3_SUPPORTED = True
+except:
+ # If import fails, we don't have SSL support.
+ pass
+
+from .conversion import MySQLConverterBase
+from .constants import (ClientFlag, CharacterSet, CONN_ATTRS_DN,
+ DEFAULT_CONFIGURATION, DEPRECATED_TLS_VERSIONS,
+ OPENSSL_CS_NAMES, TLS_CIPHER_SUITES, TLS_VERSIONS)
+from .optionfiles import MySQLOptionsParser
+from .utils import make_abc
+from . import errors
+
+NAMED_TUPLE_CACHE = weakref.WeakValueDictionary()
+
+DUPLICATED_IN_LIST_ERROR = (
+ "The '{list}' list must not contain repeated values, the value "
+ "'{value}' is duplicated.")
+
+TLS_VERSION_ERROR = ("The given tls_version: '{}' is not recognized as a valid "
+ "TLS protocol version (should be one of {}).")
+
+TLS_VERSION_DEPRECATED_ERROR = ("The given tls_version: '{}' are no longer "
+ "allowed (should be one of {}).")
+
+TLS_VER_NO_SUPPORTED = ("No supported TLS protocol version found in the "
+ "'tls-versions' list '{}'. ")
+
+KRB_SERVICE_PINCIPAL_ERROR = (
+ 'Option "krb_service_principal" {error}, must be a string in the form '
+ '"primary/instance@realm" e.g "ldap/ldapauth@MYSQL.COM" where "@realm" '
+ 'is optional and if it is not given will be assumed to belong to the '
+ 'default realm, as configured in the krb5.conf file.')
+
+MYSQL_PY_TYPES = (
+ (int, str, bytes, Decimal, float, datetime, date, timedelta, time,))
+
+
+@make_abc(ABCMeta)
+class MySQLConnectionAbstract(object):
+
+ """Abstract class for classes connecting to a MySQL server"""
+
+ def __init__(self, **kwargs):
+ """Initialize"""
+ self._client_flags = ClientFlag.get_default()
+ self._charset_id = 45
+ self._sql_mode = None
+ self._time_zone = None
+ self._autocommit = False
+ self._server_version = None
+ self._handshake = None
+ self._conn_attrs = {}
+
+ self._user = ''
+ self._password = ''
+ self._password1 = ''
+ self._password2 = ''
+ self._password3 = ''
+ self._database = ''
+ self._host = '127.0.0.1'
+ self._port = 3306
+ self._unix_socket = None
+ self._client_host = ''
+ self._client_port = 0
+ self._ssl = {}
+ self._ssl_disabled = DEFAULT_CONFIGURATION["ssl_disabled"]
+ self._force_ipv6 = False
+ self._oci_config_file = None
+
+ self._use_unicode = True
+ self._get_warnings = False
+ self._raise_on_warnings = False
+ self._connection_timeout = DEFAULT_CONFIGURATION["connect_timeout"]
+ self._buffered = False
+ self._unread_result = False
+ self._have_next_result = False
+ self._raw = False
+ self._in_transaction = False
+ self._allow_local_infile = DEFAULT_CONFIGURATION["allow_local_infile"]
+ self._allow_local_infile_in_path = (
+ DEFAULT_CONFIGURATION["allow_local_infile_in_path"])
+
+ self._prepared_statements = None
+ self._query_attrs = []
+
+ self._ssl_active = False
+ self._auth_plugin = None
+ self._pool_config_version = None
+ self.converter = None
+ self._converter_class = None
+ self._converter_str_fallback = False
+ self._compress = False
+
+ self._consume_results = False
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+
+ def _get_self(self):
+ """Return self for weakref.proxy
+
+ This method is used when the original object is needed when using
+ weakref.proxy.
+ """
+ return self
+
+ def _read_option_files(self, config):
+ """
+ Read option files for connection parameters.
+
+ Checks if connection arguments contain option file arguments, and then
+ reads option files accordingly.
+ """
+ if 'option_files' in config:
+ try:
+ if isinstance(config['option_groups'], str):
+ config['option_groups'] = [config['option_groups']]
+ groups = config['option_groups']
+ del config['option_groups']
+ except KeyError:
+ groups = ['client', 'connector_python']
+
+ if isinstance(config['option_files'], str):
+ config['option_files'] = [config['option_files']]
+ option_parser = MySQLOptionsParser(list(config['option_files']),
+ keep_dashes=False)
+ del config['option_files']
+
+ config_from_file = option_parser.get_groups_as_dict_with_priority(
+ *groups)
+ config_options = {}
+ for group in groups:
+ try:
+ for option, value in config_from_file[group].items():
+ try:
+ if option == 'socket':
+ option = 'unix_socket'
+ # pylint: disable=W0104
+ DEFAULT_CONFIGURATION[option]
+ # pylint: enable=W0104
+
+ if (option not in config_options or
+ config_options[option][1] <= value[1]):
+ config_options[option] = value
+ except KeyError:
+ if group == 'connector_python':
+ raise AttributeError("Unsupported argument "
+ "'{0}'".format(option))
+ except KeyError:
+ continue
+
+ for option, value in config_options.items():
+ if option not in config:
+ try:
+ config[option] = eval(value[0]) # pylint: disable=W0123
+ except (NameError, SyntaxError):
+ config[option] = value[0]
+ return config
+
+ def _validate_tls_ciphersuites(self):
+ """Validates the tls_ciphersuites option.
+ """
+ tls_ciphersuites = []
+ tls_cs = self._ssl["tls_ciphersuites"]
+
+ if isinstance(tls_cs, str):
+ if not (tls_cs.startswith("[") and
+ tls_cs.endswith("]")):
+ raise AttributeError("tls_ciphersuites must be a list, "
+ "found: '{}'".format(tls_cs))
+ else:
+ tls_css = tls_cs[1:-1].split(",")
+ if not tls_css:
+ raise AttributeError("No valid cipher suite found "
+ "in 'tls_ciphersuites' list.")
+ for _tls_cs in tls_css:
+ _tls_cs = tls_cs.strip().upper()
+ if _tls_cs:
+ tls_ciphersuites.append(_tls_cs)
+
+ elif isinstance(tls_cs, list):
+ tls_ciphersuites = [tls_cs for tls_cs in tls_cs if tls_cs]
+
+ elif isinstance(tls_cs, set):
+ for tls_cs in tls_ciphersuites:
+ if tls_cs:
+ tls_ciphersuites.append(tls_cs)
+ else:
+ raise AttributeError(
+ "tls_ciphersuites should be a list with one or more "
+ "ciphersuites. Found: '{}'".format(tls_cs))
+
+ tls_versions = TLS_VERSIONS[:] if self._ssl.get("tls_versions", None) \
+ is None else self._ssl["tls_versions"][:]
+
+ # A newer TLS version can use a cipher introduced on
+ # an older version.
+ tls_versions.sort(reverse=True)
+ newer_tls_ver = tls_versions[0]
+ # translated_names[0] belongs to TLSv1, TLSv1.1 and TLSv1.2
+ # translated_names[1] are TLSv1.3 only
+ translated_names = [[],[]]
+ iani_cipher_suites_names = {}
+ ossl_cipher_suites_names = []
+
+ # Old ciphers can work with new TLS versions.
+ # Find all the ciphers introduced on previous TLS versions.
+ for tls_ver in TLS_VERSIONS[:TLS_VERSIONS.index(newer_tls_ver) + 1]:
+ iani_cipher_suites_names.update(TLS_CIPHER_SUITES[tls_ver])
+ ossl_cipher_suites_names.extend(OPENSSL_CS_NAMES[tls_ver])
+
+ for name in tls_ciphersuites:
+ if "-" in name and name in ossl_cipher_suites_names:
+ if name in OPENSSL_CS_NAMES["TLSv1.3"]:
+ translated_names[1].append(name)
+ else:
+ translated_names[0].append(name)
+ elif name in iani_cipher_suites_names:
+ translated_name = iani_cipher_suites_names[name]
+ if translated_name in translated_names:
+ raise AttributeError(
+ DUPLICATED_IN_LIST_ERROR.format(
+ list="tls_ciphersuites", value=translated_name))
+ else:
+ if name in TLS_CIPHER_SUITES["TLSv1.3"]:
+ translated_names[1].append(
+ iani_cipher_suites_names[name])
+ else:
+ translated_names[0].append(
+ iani_cipher_suites_names[name])
+ else:
+ raise AttributeError(
+ "The value '{}' in tls_ciphersuites is not a valid "
+ "cipher suite".format(name))
+ if not translated_names[0] and not translated_names[1]:
+ raise AttributeError("No valid cipher suite found in the "
+ "'tls_ciphersuites' list.")
+ translated_names = [":".join(translated_names[0]),
+ ":".join(translated_names[1])]
+ self._ssl["tls_ciphersuites"] = translated_names
+
+ def _validate_tls_versions(self):
+ """Validates the tls_versions option.
+ """
+ tls_versions = []
+ tls_version = self._ssl["tls_versions"]
+
+ if isinstance(tls_version, str):
+ if not (tls_version.startswith("[") and tls_version.endswith("]")):
+ raise AttributeError("tls_versions must be a list, found: '{}'"
+ "".format(tls_version))
+ else:
+ tls_vers = tls_version[1:-1].split(",")
+ for tls_ver in tls_vers:
+ tls_version = tls_ver.strip()
+ if tls_version == "":
+ continue
+ elif tls_version in tls_versions:
+ raise AttributeError(
+ DUPLICATED_IN_LIST_ERROR.format(
+ list="tls_versions", value=tls_version))
+ tls_versions.append(tls_version)
+ if tls_vers == ["TLSv1.3"] and not TLS_V1_3_SUPPORTED:
+ raise AttributeError(
+ TLS_VER_NO_SUPPORTED.format(tls_version, TLS_VERSIONS))
+ elif isinstance(tls_version, list):
+ if not tls_version:
+ raise AttributeError(
+ "At least one TLS protocol version must be specified in "
+ "'tls_versions' list.")
+ for tls_ver in tls_version:
+ if tls_ver in tls_versions:
+ raise AttributeError(
+ DUPLICATED_IN_LIST_ERROR.format(
+ list="tls_versions", value=tls_ver))
+ else:
+ tls_versions.append(tls_ver)
+ elif isinstance(tls_version, set):
+ for tls_ver in tls_version:
+ tls_versions.append(tls_ver)
+ else:
+ raise AttributeError(
+ "tls_versions should be a list with one or more of versions in "
+ "{}. found: '{}'".format(", ".join(TLS_VERSIONS), tls_versions))
+
+ if not tls_versions:
+ raise AttributeError(
+ "At least one TLS protocol version must be specified "
+ "in 'tls_versions' list when this option is given.")
+
+ use_tls_versions = []
+ deprecated_tls_versions = []
+ invalid_tls_versions = []
+ for tls_ver in tls_versions:
+ if tls_ver in TLS_VERSIONS:
+ use_tls_versions.append(tls_ver)
+ if tls_ver in DEPRECATED_TLS_VERSIONS:
+ deprecated_tls_versions.append(tls_ver)
+ else:
+ invalid_tls_versions.append(tls_ver)
+
+ if use_tls_versions:
+ if use_tls_versions == ["TLSv1.3"] and not TLS_V1_3_SUPPORTED:
+ raise errors.NotSupportedError(
+ TLS_VER_NO_SUPPORTED.format(tls_version, TLS_VERSIONS))
+ use_tls_versions.sort()
+ self._ssl["tls_versions"] = use_tls_versions
+ elif deprecated_tls_versions:
+ raise errors.NotSupportedError(
+ TLS_VERSION_DEPRECATED_ERROR.format(deprecated_tls_versions,
+ TLS_VERSIONS))
+ elif invalid_tls_versions:
+ raise AttributeError(
+ TLS_VERSION_ERROR.format(tls_ver, TLS_VERSIONS))
+
+ @property
+ def user(self):
+ """User used while connecting to MySQL"""
+ return self._user
+
+ @property
+ def server_host(self):
+ """MySQL server IP address or name"""
+ return self._host
+
+ @property
+ def server_port(self):
+ "MySQL server TCP/IP port"
+ return self._port
+
+ @property
+ def unix_socket(self):
+ "MySQL Unix socket file location"
+ return self._unix_socket
+
+ @abstractproperty
+ def database(self):
+ """Get the current database"""
+ pass
+
+ @database.setter
+ def database(self, value):
+ """Set the current database"""
+ self.cmd_query("USE %s" % value)
+
+ @property
+ def can_consume_results(self):
+ """Returns whether to consume results"""
+ return self._consume_results
+
+ def config(self, **kwargs):
+ """Configure the MySQL Connection
+
+ This method allows you to configure the MySQLConnection instance.
+
+ Raises on errors.
+ """
+ config = kwargs.copy()
+ if 'dsn' in config:
+ raise errors.NotSupportedError("Data source name is not supported")
+
+ # Read option files
+ self._read_option_files(config)
+
+ # Configure how we handle MySQL warnings
+ try:
+ self.get_warnings = config['get_warnings']
+ del config['get_warnings']
+ except KeyError:
+ pass # Leave what was set or default
+ try:
+ self.raise_on_warnings = config['raise_on_warnings']
+ del config['raise_on_warnings']
+ except KeyError:
+ pass # Leave what was set or default
+
+ # Configure client flags
+ try:
+ default = ClientFlag.get_default()
+ self.set_client_flags(config['client_flags'] or default)
+ del config['client_flags']
+ except KeyError:
+ pass # Missing client_flags-argument is OK
+
+ try:
+ if config['compress']:
+ self._compress = True
+ self.set_client_flags([ClientFlag.COMPRESS])
+ except KeyError:
+ pass # Missing compress argument is OK
+
+ self._allow_local_infile = config.get(
+ 'allow_local_infile', DEFAULT_CONFIGURATION['allow_local_infile'])
+ self._allow_local_infile_in_path = config.get(
+ 'allow_local_infile_in_path',
+ DEFAULT_CONFIGURATION['allow_local_infile_in_path'])
+ infile_in_path = None
+ if self._allow_local_infile_in_path:
+ infile_in_path = os.path.abspath(self._allow_local_infile_in_path)
+ if infile_in_path and os.path.exists(infile_in_path) and \
+ not os.path.isdir(infile_in_path) or \
+ os.path.islink(infile_in_path):
+ raise AttributeError("allow_local_infile_in_path must be a "
+ "directory")
+ if self._allow_local_infile or self._allow_local_infile_in_path:
+ self.set_client_flags([ClientFlag.LOCAL_FILES])
+ else:
+ self.set_client_flags([-ClientFlag.LOCAL_FILES])
+
+ try:
+ if not config['consume_results']:
+ self._consume_results = False
+ else:
+ self._consume_results = True
+ except KeyError:
+ self._consume_results = False
+
+ # Configure auth_plugin
+ try:
+ self._auth_plugin = config['auth_plugin']
+ del config['auth_plugin']
+ except KeyError:
+ self._auth_plugin = ''
+
+ # Configure character set and collation
+ if 'charset' in config or 'collation' in config:
+ try:
+ charset = config['charset']
+ del config['charset']
+ except KeyError:
+ charset = None
+ try:
+ collation = config['collation']
+ del config['collation']
+ except KeyError:
+ collation = None
+ self._charset_id = CharacterSet.get_charset_info(charset,
+ collation)[0]
+
+ # Set converter class
+ try:
+ self.set_converter_class(config['converter_class'])
+ except KeyError:
+ pass # Using default converter class
+ except TypeError:
+ raise AttributeError("Converter class should be a subclass "
+ "of conversion.MySQLConverterBase.")
+
+ # Compatible configuration with other drivers
+ compat_map = [
+ # (<other driver argument>,<translates to>)
+ ('db', 'database'),
+ ('username', 'user'),
+ ('passwd', 'password'),
+ ('connect_timeout', 'connection_timeout'),
+ ('read_default_file', 'option_files'),
+ ]
+ for compat, translate in compat_map:
+ try:
+ if translate not in config:
+ config[translate] = config[compat]
+ del config[compat]
+ except KeyError:
+ pass # Missing compat argument is OK
+
+ # Configure login information
+ if 'user' in config or 'password' in config:
+ try:
+ user = config['user']
+ del config['user']
+ except KeyError:
+ user = self._user
+ try:
+ password = config['password']
+ del config['password']
+ except KeyError:
+ password = self._password
+ self.set_login(user, password)
+
+ # Configure host information
+ if 'host' in config and config['host']:
+ self._host = config['host']
+
+ # Check network locations
+ try:
+ self._port = int(config['port'])
+ del config['port']
+ except KeyError:
+ pass # Missing port argument is OK
+ except ValueError:
+ raise errors.InterfaceError(
+ "TCP/IP port number should be an integer")
+
+ if "ssl_disabled" in config:
+ self._ssl_disabled = config.pop("ssl_disabled")
+
+ if self._ssl_disabled and self._auth_plugin == "mysql_clear_password":
+ raise errors.InterfaceError("Clear password authentication is not "
+ "supported over insecure channels")
+
+ # Other configuration
+ set_ssl_flag = False
+ for key, value in config.items():
+ try:
+ DEFAULT_CONFIGURATION[key]
+ except KeyError:
+ raise AttributeError("Unsupported argument '{0}'".format(key))
+ # SSL Configuration
+ if key.startswith('ssl_'):
+ set_ssl_flag = True
+ self._ssl.update({key.replace('ssl_', ''): value})
+ elif key.startswith('tls_'):
+ set_ssl_flag = True
+ self._ssl.update({key: value})
+ else:
+ attribute = '_' + key
+ try:
+ setattr(self, attribute, value.strip())
+ except AttributeError:
+ setattr(self, attribute, value)
+
+ if set_ssl_flag:
+ if 'verify_cert' not in self._ssl:
+ self._ssl['verify_cert'] = \
+ DEFAULT_CONFIGURATION['ssl_verify_cert']
+ if 'verify_identity' not in self._ssl:
+ self._ssl['verify_identity'] = \
+ DEFAULT_CONFIGURATION['ssl_verify_identity']
+ # Make sure both ssl_key/ssl_cert are set, or neither (XOR)
+ if 'ca' not in self._ssl or self._ssl['ca'] is None:
+ self._ssl['ca'] = ""
+ if bool('key' in self._ssl) != bool('cert' in self._ssl):
+ raise AttributeError(
+ "ssl_key and ssl_cert need to be both "
+ "specified, or neither."
+ )
+ # Make sure key/cert are set to None
+ elif not set(('key', 'cert')) <= set(self._ssl):
+ self._ssl['key'] = None
+ self._ssl['cert'] = None
+ elif (self._ssl['key'] is None) != (self._ssl['cert'] is None):
+ raise AttributeError(
+ "ssl_key and ssl_cert need to be both "
+ "set, or neither."
+ )
+ if "tls_versions" in self._ssl and \
+ self._ssl["tls_versions"] is not None:
+ if self._ssl_disabled:
+ raise AttributeError("The tls_versions option can not be "
+ "used along with ssl_disabled.")
+ self._validate_tls_versions()
+
+ if "tls_ciphersuites" in self._ssl and self._ssl["tls_ciphersuites"] is not None:
+ if self._ssl_disabled:
+ raise AttributeError("The tls_ciphersuites option can not "
+ "be used along with ssl_disabled.")
+ self._validate_tls_ciphersuites()
+
+ if self._conn_attrs is None:
+ self._conn_attrs = {}
+ elif not isinstance(self._conn_attrs, dict):
+ raise errors.InterfaceError('conn_attrs must be of type dict.')
+ else:
+ for attr_name in self._conn_attrs:
+ if attr_name in CONN_ATTRS_DN:
+ continue
+ # Validate name type
+ if not isinstance(attr_name, str):
+ raise errors.InterfaceError(
+ "Attribute name should be a string, found: '{}' in '{}'"
+ "".format(attr_name, self._conn_attrs))
+ # Validate attribute name limit 32 characters
+ if len(attr_name) > 32:
+ raise errors.InterfaceError(
+ "Attribute name '{}' exceeds 32 characters limit size."
+ "".format(attr_name))
+ # Validate names in connection attributes cannot start with "_"
+ if attr_name.startswith("_"):
+ raise errors.InterfaceError(
+ "Key names in connection attributes cannot start with "
+ "'_', found: '{}'".format(attr_name))
+ # Validate value type
+ attr_value = self._conn_attrs[attr_name]
+ if not isinstance(attr_value, str):
+ raise errors.InterfaceError(
+ "Attribute '{}' value: '{}' must be a string type."
+ "".format(attr_name, attr_value))
+ # Validate attribute value limit 1024 characters
+ if len(attr_value) > 1024:
+ raise errors.InterfaceError(
+ "Attribute '{}' value: '{}' exceeds 1024 characters "
+ "limit size".format(attr_name, attr_value))
+
+ if self._client_flags & ClientFlag.CONNECT_ARGS:
+ self._add_default_conn_attrs()
+ if "krb_service_principal" in config and \
+ config["krb_service_principal"] is not None:
+ self._krb_service_principal = config["krb_service_principal"]
+ if not isinstance(self._krb_service_principal, str):
+ raise errors.InterfaceError(KRB_SERVICE_PINCIPAL_ERROR.format(
+ error="is not a string"))
+ if self._krb_service_principal == "":
+ raise errors.InterfaceError(KRB_SERVICE_PINCIPAL_ERROR.format(
+ error="can not be an empty string"))
+ if "/" not in self._krb_service_principal:
+ raise errors.InterfaceError(KRB_SERVICE_PINCIPAL_ERROR.format(
+ error="is incorrectly formatted"))
+
+ def _add_default_conn_attrs(self):
+ """Add the default connection attributes."""
+ pass
+
+ def _check_server_version(self, server_version):
+ """Check the MySQL version
+
+ This method will check the MySQL version and raise an InterfaceError
+ when it is not supported or invalid. It will return the version
+ as a tuple with major, minor and patch.
+
+ Raises InterfaceError if invalid server version.
+
+ Returns tuple
+ """
+ if isinstance(server_version, (bytearray, bytes)):
+ server_version = server_version.decode()
+
+ # pylint: disable=W1401
+ regex_ver = re.compile(r"^(\d{1,2})\.(\d{1,2})\.(\d{1,3})(.*)")
+ # pylint: enable=W1401
+ match = regex_ver.match(server_version)
+ if not match:
+ raise errors.InterfaceError("Failed parsing MySQL version")
+
+ version = tuple([int(v) for v in match.groups()[0:3]])
+ if version < (4, 1):
+ raise errors.InterfaceError(
+ "MySQL Version '{0}' is not supported.".format(server_version))
+
+ return version
+
+ def get_server_version(self):
+ """Get the MySQL version
+
+ This method returns the MySQL server version as a tuple. If not
+ previously connected, it will return None.
+
+ Returns a tuple or None.
+ """
+ return self._server_version
+
+ def get_server_info(self):
+ """Get the original MySQL version information
+
+ This method returns the original MySQL server as text. If not
+ previously connected, it will return None.
+
+ Returns a string or None.
+ """
+ try:
+ return self._handshake['server_version_original']
+ except (TypeError, KeyError):
+ return None
+
+ @abstractproperty
+ def in_transaction(self):
+ """MySQL session has started a transaction"""
+ pass
+
+ def set_client_flags(self, flags):
+ """Set the client flags
+
+ The flags-argument can be either an int or a list (or tuple) of
+ ClientFlag-values. If it is an integer, it will set client_flags
+ to flags as is.
+ If flags is a list (or tuple), each flag will be set or unset
+ when it's negative.
+
+ set_client_flags([ClientFlag.FOUND_ROWS,-ClientFlag.LONG_FLAG])
+
+ Raises ProgrammingError when the flags argument is not a set or
+ an integer bigger than 0.
+
+ Returns self.client_flags
+ """
+ if isinstance(flags, int) and flags > 0:
+ self._client_flags = flags
+ elif isinstance(flags, (tuple, list)):
+ for flag in flags:
+ if flag < 0:
+ self._client_flags &= ~abs(flag)
+ else:
+ self._client_flags |= flag
+ else:
+ raise errors.ProgrammingError(
+ "set_client_flags expect integer (>0) or set")
+ return self._client_flags
+
+ def isset_client_flag(self, flag):
+ """Check if a client flag is set"""
+ if (self._client_flags & flag) > 0:
+ return True
+ return False
+
+ @property
+ def time_zone(self):
+ """Get the current time zone"""
+ return self.info_query("SELECT @@session.time_zone")[0]
+
+ @time_zone.setter
+ def time_zone(self, value):
+ """Set the time zone"""
+ self.cmd_query("SET @@session.time_zone = '{0}'".format(value))
+ self._time_zone = value
+
+ @property
+ def sql_mode(self):
+ """Get the SQL mode"""
+ return self.info_query("SELECT @@session.sql_mode")[0]
+
+ @sql_mode.setter
+ def sql_mode(self, value):
+ """Set the SQL mode
+
+ This method sets the SQL Mode for the current connection. The value
+ argument can be either a string with comma separate mode names, or
+ a sequence of mode names.
+
+ It is good practice to use the constants class SQLMode:
+ from mysql.connector.constants import SQLMode
+ cnx.sql_mode = [SQLMode.NO_ZERO_DATE, SQLMode.REAL_AS_FLOAT]
+ """
+ if isinstance(value, (list, tuple)):
+ value = ','.join(value)
+ self.cmd_query("SET @@session.sql_mode = '{0}'".format(value))
+ self._sql_mode = value
+
+ @abstractmethod
+ def info_query(self, query):
+ """Send a query which only returns 1 row"""
+ pass
+
+ def set_login(self, username=None, password=None):
+ """Set login information for MySQL
+
+ Set the username and/or password for the user connecting to
+ the MySQL Server.
+ """
+ if username is not None:
+ self._user = username.strip()
+ else:
+ self._user = ''
+ if password is not None:
+ self._password = password
+ else:
+ self._password = ''
+
+ def set_unicode(self, value=True):
+ """Toggle unicode mode
+
+ Set whether we return string fields as unicode or not.
+ Default is True.
+ """
+ self._use_unicode = value
+ if self.converter:
+ self.converter.set_unicode(value)
+
+ @property
+ def autocommit(self):
+ """Get whether autocommit is on or off"""
+ value = self.info_query("SELECT @@session.autocommit")[0]
+ return True if value == 1 else False
+
+ @autocommit.setter
+ def autocommit(self, value):
+ """Toggle autocommit"""
+ switch = 'ON' if value else 'OFF'
+ self.cmd_query("SET @@session.autocommit = {0}".format(switch))
+ self._autocommit = value
+
+ @property
+ def get_warnings(self):
+ """Get whether this connection retrieves warnings automatically
+
+ This method returns whether this connection retrieves warnings
+ automatically.
+
+ Returns True, or False when warnings are not retrieved.
+ """
+ return self._get_warnings
+
+ @get_warnings.setter
+ def get_warnings(self, value):
+ """Set whether warnings should be automatically retrieved
+
+ The toggle-argument must be a boolean. When True, cursors for this
+ connection will retrieve information about warnings (if any).
+
+ Raises ValueError on error.
+ """
+ if not isinstance(value, bool):
+ raise ValueError("Expected a boolean type")
+ self._get_warnings = value
+
+ @property
+ def raise_on_warnings(self):
+ """Get whether this connection raises an error on warnings
+
+ This method returns whether this connection will raise errors when
+ MySQL reports warnings.
+
+ Returns True or False.
+ """
+ return self._raise_on_warnings
+
+ @raise_on_warnings.setter
+ def raise_on_warnings(self, value):
+ """Set whether warnings raise an error
+
+ The toggle-argument must be a boolean. When True, cursors for this
+ connection will raise an error when MySQL reports warnings.
+
+ Raising on warnings implies retrieving warnings automatically. In
+ other words: warnings will be set to True. If set to False, warnings
+ will be also set to False.
+
+ Raises ValueError on error.
+ """
+ if not isinstance(value, bool):
+ raise ValueError("Expected a boolean type")
+ self._raise_on_warnings = value
+ self._get_warnings = value
+
+
+ @property
+ def unread_result(self):
+ """Get whether there is an unread result
+
+ This method is used by cursors to check whether another cursor still
+ needs to retrieve its result set.
+
+ Returns True, or False when there is no unread result.
+ """
+ return self._unread_result
+
+ @unread_result.setter
+ def unread_result(self, value):
+ """Set whether there is an unread result
+
+ This method is used by cursors to let other cursors know there is
+ still a result set that needs to be retrieved.
+
+ Raises ValueError on errors.
+ """
+ if not isinstance(value, bool):
+ raise ValueError("Expected a boolean type")
+ self._unread_result = value
+
+ @property
+ def charset(self):
+ """Returns the character set for current connection
+
+ This property returns the character set name of the current connection.
+ The server is queried when the connection is active. If not connected,
+ the configured character set name is returned.
+
+ Returns a string.
+ """
+ return CharacterSet.get_info(self._charset_id)[0]
+
+ @property
+ def python_charset(self):
+ """Returns the Python character set for current connection
+
+ This property returns the character set name of the current connection.
+ Note that, unlike property charset, this checks if the previously set
+ character set is supported by Python and if not, it returns the
+ equivalent character set that Python supports.
+
+ Returns a string.
+ """
+ encoding = CharacterSet.get_info(self._charset_id)[0]
+ if encoding in ('utf8mb4', 'binary'):
+ return 'utf8'
+ return encoding
+
+ def set_charset_collation(self, charset=None, collation=None):
+ """Sets the character set and collation for the current connection
+
+ This method sets the character set and collation to be used for
+ the current connection. The charset argument can be either the
+ name of a character set as a string, or the numerical equivalent
+ as defined in constants.CharacterSet.
+
+ When the collation is not given, the default will be looked up and
+ used.
+
+ For example, the following will set the collation for the latin1
+ character set to latin1_general_ci:
+
+ set_charset('latin1','latin1_general_ci')
+
+ """
+ if charset:
+ if isinstance(charset, int):
+ (self._charset_id, charset_name, collation_name) = \
+ CharacterSet.get_charset_info(charset)
+ elif isinstance(charset, str):
+ (self._charset_id, charset_name, collation_name) = \
+ CharacterSet.get_charset_info(charset, collation)
+ else:
+ raise ValueError(
+ "charset should be either integer, string or None")
+ elif collation:
+ (self._charset_id, charset_name, collation_name) = \
+ CharacterSet.get_charset_info(collation=collation)
+
+ self._execute_query("SET NAMES '{0}' COLLATE '{1}'".format(
+ charset_name, collation_name))
+
+ try:
+ # Required for C Extension
+ self.set_character_set_name(charset_name) # pylint: disable=E1101
+ except AttributeError:
+ # Not required for pure Python connection
+ pass
+
+ if self.converter:
+ self.converter.set_charset(charset_name)
+
+ @property
+ def collation(self):
+ """Returns the collation for current connection
+
+ This property returns the collation name of the current connection.
+ The server is queried when the connection is active. If not connected,
+ the configured collation name is returned.
+
+ Returns a string.
+ """
+ return CharacterSet.get_charset_info(self._charset_id)[2]
+
+ @abstractmethod
+ def _do_handshake(self):
+ """Gather information of the MySQL server before authentication"""
+ pass
+
+ @abstractmethod
+ def _open_connection(self):
+ """Open the connection to the MySQL server"""
+ pass
+
+ def _post_connection(self):
+ """Executes commands after connection has been established
+
+ This method executes commands after the connection has been
+ established. Some setting like autocommit, character set, and SQL mode
+ are set using this method.
+ """
+ self.set_charset_collation(self._charset_id)
+ self.autocommit = self._autocommit
+ if self._time_zone:
+ self.time_zone = self._time_zone
+ if self._sql_mode:
+ self.sql_mode = self._sql_mode
+
+ @abstractmethod
+ def disconnect(self):
+ """Disconnect from the MySQL server"""
+ pass
+ close = disconnect
+
+ def connect(self, **kwargs):
+ """Connect to the MySQL server
+
+ This method sets up the connection to the MySQL server. If no
+ arguments are given, it will use the already configured or default
+ values.
+ """
+ if kwargs:
+ self.config(**kwargs)
+
+ self.disconnect()
+ self._open_connection()
+ # Server does not allow to run any other statement different from ALTER
+ # when user's password has been expired.
+ if not self._client_flags & ClientFlag.CAN_HANDLE_EXPIRED_PASSWORDS:
+ self._post_connection()
+
+ def reconnect(self, attempts=1, delay=0):
+ """Attempt to reconnect to the MySQL server
+
+ The argument attempts should be the number of times a reconnect
+ is tried. The delay argument is the number of seconds to wait between
+ each retry.
+
+ You may want to set the number of attempts higher and use delay when
+ you expect the MySQL server to be down for maintenance or when you
+ expect the network to be temporary unavailable.
+
+ Raises InterfaceError on errors.
+ """
+ counter = 0
+ while counter != attempts:
+ counter = counter + 1
+ try:
+ self.disconnect()
+ self.connect()
+ if self.is_connected():
+ break
+ except Exception as err: # pylint: disable=W0703
+ if counter == attempts:
+ msg = "Can not reconnect to MySQL after {0} "\
+ "attempt(s): {1}".format(attempts, str(err))
+ raise errors.InterfaceError(msg)
+ if delay > 0:
+ sleep(delay)
+
+ @abstractmethod
+ def is_connected(self):
+ """Reports whether the connection to MySQL Server is available"""
+ pass
+
+ @abstractmethod
+ def ping(self, reconnect=False, attempts=1, delay=0):
+ """Check availability of the MySQL server"""
+ pass
+
+ @abstractmethod
+ def commit(self):
+ """Commit current transaction"""
+ pass
+
+ @abstractmethod
+ def cursor(self, buffered=None, raw=None, prepared=None, cursor_class=None,
+ dictionary=None, named_tuple=None):
+ """Instantiates and returns a cursor"""
+ pass
+
+ @abstractmethod
+ def _execute_query(self, query):
+ """Execute a query"""
+ pass
+
+ @abstractmethod
+ def rollback(self):
+ """Rollback current transaction"""
+ pass
+
+ def start_transaction(self, consistent_snapshot=False,
+ isolation_level=None, readonly=None):
+ """Start a transaction
+
+ This method explicitly starts a transaction sending the
+ START TRANSACTION statement to the MySQL server. You can optionally
+ set whether there should be a consistent snapshot, which
+ isolation level you need or which access mode i.e. READ ONLY or
+ READ WRITE.
+
+ For example, to start a transaction with isolation level SERIALIZABLE,
+ you would do the following:
+ >>> cnx = mysql.connector.connect(..)
+ >>> cnx.start_transaction(isolation_level='SERIALIZABLE')
+
+ Raises ProgrammingError when a transaction is already in progress
+ and when ValueError when isolation_level specifies an Unknown
+ level.
+ """
+ if self.in_transaction:
+ raise errors.ProgrammingError("Transaction already in progress")
+
+ if isolation_level:
+ level = isolation_level.strip().replace('-', ' ').upper()
+ levels = ['READ UNCOMMITTED', 'READ COMMITTED', 'REPEATABLE READ',
+ 'SERIALIZABLE']
+
+ if level not in levels:
+ raise ValueError(
+ 'Unknown isolation level "{0}"'.format(isolation_level))
+
+ self._execute_query(
+ "SET TRANSACTION ISOLATION LEVEL {0}".format(level))
+
+ if readonly is not None:
+ if self._server_version < (5, 6, 5):
+ raise ValueError(
+ "MySQL server version {0} does not support "
+ "this feature".format(self._server_version))
+
+ if readonly:
+ access_mode = 'READ ONLY'
+ else:
+ access_mode = 'READ WRITE'
+ self._execute_query(
+ "SET TRANSACTION {0}".format(access_mode))
+
+ query = "START TRANSACTION"
+ if consistent_snapshot:
+ query += " WITH CONSISTENT SNAPSHOT"
+ self.cmd_query(query)
+
+ def reset_session(self, user_variables=None, session_variables=None):
+ """Clears the current active session
+
+ This method resets the session state, if the MySQL server is 5.7.3
+ or later active session will be reset without re-authenticating.
+ For other server versions session will be reset by re-authenticating.
+
+ It is possible to provide a sequence of variables and their values to
+ be set after clearing the session. This is possible for both user
+ defined variables and session variables.
+ This method takes two arguments user_variables and session_variables
+ which are dictionaries.
+
+ Raises OperationalError if not connected, InternalError if there are
+ unread results and InterfaceError on errors.
+ """
+ if not self.is_connected():
+ raise errors.OperationalError("MySQL Connection not available.")
+
+ try:
+ self.cmd_reset_connection()
+ except (errors.NotSupportedError, NotImplementedError):
+ if self._compress:
+ raise errors.NotSupportedError(
+ "Reset session is not supported with compression for "
+ "MySQL server version 5.7.2 or earlier.")
+ else:
+ self.cmd_change_user(self._user, self._password,
+ self._database, self._charset_id)
+
+ if user_variables or session_variables:
+ cur = self.cursor()
+ if user_variables:
+ for key, value in user_variables.items():
+ cur.execute("SET @`{0}` = %s".format(key), (value,))
+ if session_variables:
+ for key, value in session_variables.items():
+ cur.execute("SET SESSION `{0}` = %s".format(key), (value,))
+ cur.close()
+
+ def set_converter_class(self, convclass):
+ """
+ Set the converter class to be used. This should be a class overloading
+ methods and members of conversion.MySQLConverter.
+ """
+ if convclass and issubclass(convclass, MySQLConverterBase):
+ charset_name = CharacterSet.get_info(self._charset_id)[0]
+ self._converter_class = convclass
+ self.converter = convclass(charset_name, self._use_unicode)
+ self.converter.str_fallback = self._converter_str_fallback
+ else:
+ raise TypeError("Converter class should be a subclass "
+ "of conversion.MySQLConverterBase.")
+
+ @abstractmethod
+ def get_rows(self, count=None, binary=False, columns=None, raw=None,
+ prep_stmt=None):
+ """Get all rows returned by the MySQL server"""
+ pass
+
+ def cmd_init_db(self, database):
+ """Change the current database"""
+ raise NotImplementedError
+
+ def cmd_query(self, query, raw=False, buffered=False, raw_as_string=False):
+ """Send a query to the MySQL server"""
+ raise NotImplementedError
+
+ def cmd_query_iter(self, statements):
+ """Send one or more statements to the MySQL server"""
+ raise NotImplementedError
+
+ def cmd_refresh(self, options):
+ """Send the Refresh command to the MySQL server"""
+ raise NotImplementedError
+
+ def cmd_quit(self):
+ """Close the current connection with the server"""
+ raise NotImplementedError
+
+ def cmd_shutdown(self, shutdown_type=None):
+ """Shut down the MySQL Server"""
+ raise NotImplementedError
+
+ def cmd_statistics(self):
+ """Send the statistics command to the MySQL Server"""
+ raise NotImplementedError
+
+ def cmd_process_info(self):
+ """Get the process list of the MySQL Server
+
+ This method is a placeholder to notify that the PROCESS_INFO command
+ is not supported by raising the NotSupportedError. The command
+ "SHOW PROCESSLIST" should be send using the cmd_query()-method or
+ using the INFORMATION_SCHEMA database.
+
+ Raises NotSupportedError exception
+ """
+ raise errors.NotSupportedError(
+ "Not implemented. Use SHOW PROCESSLIST or INFORMATION_SCHEMA")
+
+ def cmd_process_kill(self, mysql_pid):
+ """Kill a MySQL process"""
+ raise NotImplementedError
+
+ def cmd_debug(self):
+ """Send the DEBUG command"""
+ raise NotImplementedError
+
+ def cmd_ping(self):
+ """Send the PING command"""
+ raise NotImplementedError
+
+ def cmd_change_user(self, username='', password='', database='',
+ charset=45, password1='', password2='', password3=''):
+ """Change the current logged in user"""
+ raise NotImplementedError
+
+ def cmd_stmt_prepare(self, statement):
+ """Prepare a MySQL statement"""
+ raise NotImplementedError
+
+ def cmd_stmt_execute(self, statement_id, data=(), parameters=(), flags=0):
+ """Execute a prepared MySQL statement"""
+ raise NotImplementedError
+
+ def cmd_stmt_close(self, statement_id):
+ """Deallocate a prepared MySQL statement"""
+ raise NotImplementedError
+
+ def cmd_stmt_send_long_data(self, statement_id, param_id, data):
+ """Send data for a column"""
+ raise NotImplementedError
+
+ def cmd_stmt_reset(self, statement_id):
+ """Reset data for prepared statement sent as long data"""
+ raise NotImplementedError
+
+ def cmd_reset_connection(self):
+ """Resets the session state without re-authenticating"""
+ raise NotImplementedError
+
+
+@make_abc(ABCMeta)
+class MySQLCursorAbstract(object):
+ """Abstract cursor class
+
+ Abstract class defining cursor class with method and members
+ required by the Python Database API Specification v2.0.
+ """
+ def __init__(self):
+ """Initialization"""
+ self._description = None
+ self._rowcount = -1
+ self._last_insert_id = None
+ self._warnings = None
+ self.arraysize = 1
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+
+ @abstractmethod
+ def callproc(self, procname, args=()):
+ """Calls a stored procedure with the given arguments
+
+ The arguments will be set during this session, meaning
+ they will be called like _<procname>__arg<nr> where
+ <nr> is an enumeration (+1) of the arguments.
+
+ Coding Example:
+ 1) Defining the Stored Routine in MySQL:
+ CREATE PROCEDURE multiply(IN pFac1 INT, IN pFac2 INT, OUT pProd INT)
+ BEGIN
+ SET pProd := pFac1 * pFac2;
+ END
+
+ 2) Executing in Python:
+ args = (5,5,0) # 0 is to hold pprod
+ cursor.callproc('multiply', args)
+ print(cursor.fetchone())
+
+ Does not return a value, but a result set will be
+ available when the CALL-statement execute successfully.
+ Raises exceptions when something is wrong.
+ """
+ pass
+
+ @abstractmethod
+ def close(self):
+ """Close the cursor."""
+ pass
+
+ @abstractmethod
+ def execute(self, operation, params=(), multi=False):
+ """Executes the given operation
+
+ Executes the given operation substituting any markers with
+ the given parameters.
+
+ For example, getting all rows where id is 5:
+ cursor.execute("SELECT * FROM t1 WHERE id = %s", (5,))
+
+ The multi argument should be set to True when executing multiple
+ statements in one operation. If not set and multiple results are
+ found, an InterfaceError will be raised.
+
+ If warnings where generated, and connection.get_warnings is True, then
+ self._warnings will be a list containing these warnings.
+
+ Returns an iterator when multi is True, otherwise None.
+ """
+ pass
+
+ @abstractmethod
+ def executemany(self, operation, seq_params):
+ """Execute the given operation multiple times
+
+ The executemany() method will execute the operation iterating
+ over the list of parameters in seq_params.
+
+ Example: Inserting 3 new employees and their phone number
+
+ data = [
+ ('Jane','555-001'),
+ ('Joe', '555-001'),
+ ('John', '555-003')
+ ]
+ stmt = "INSERT INTO employees (name, phone) VALUES ('%s','%s')"
+ cursor.executemany(stmt, data)
+
+ INSERT statements are optimized by batching the data, that is
+ using the MySQL multiple rows syntax.
+
+ Results are discarded. If they are needed, consider looping over
+ data using the execute() method.
+ """
+ pass
+
+ @abstractmethod
+ def fetchone(self):
+ """Returns next row of a query result set
+
+ Returns a tuple or None.
+ """
+ pass
+
+ @abstractmethod
+ def fetchmany(self, size=1):
+ """Returns the next set of rows of a query result, returning a
+ list of tuples. When no more rows are available, it returns an
+ empty list.
+
+ The number of rows returned can be specified using the size argument,
+ which defaults to one
+ """
+ pass
+
+ @abstractmethod
+ def fetchall(self):
+ """Returns all rows of a query result set
+
+ Returns a list of tuples.
+ """
+ pass
+
+ def nextset(self):
+ """Not Implemented."""
+ pass
+
+ def setinputsizes(self, sizes):
+ """Not Implemented."""
+ pass
+
+ def setoutputsize(self, size, column=None):
+ """Not Implemented."""
+ pass
+
+ def reset(self, free=True):
+ """Reset the cursor to default"""
+ pass
+
+ @abstractproperty
+ def description(self):
+ """Returns description of columns in a result
+
+ This property returns a list of tuples describing the columns in
+ in a result set. A tuple is described as follows::
+
+ (column_name,
+ type,
+ None,
+ None,
+ None,
+ None,
+ null_ok,
+ column_flags) # Addition to PEP-249 specs
+
+ Returns a list of tuples.
+ """
+ return self._description
+
+ @abstractproperty
+ def rowcount(self):
+ """Returns the number of rows produced or affected
+
+ This property returns the number of rows produced by queries
+ such as a SELECT, or affected rows when executing DML statements
+ like INSERT or UPDATE.
+
+ Note that for non-buffered cursors it is impossible to know the
+ number of rows produced before having fetched them all. For those,
+ the number of rows will be -1 right after execution, and
+ incremented when fetching rows.
+
+ Returns an integer.
+ """
+ return self._rowcount
+
+ @abstractproperty
+ def lastrowid(self):
+ """Returns the value generated for an AUTO_INCREMENT column
+
+ Returns the value generated for an AUTO_INCREMENT column by
+ the previous INSERT or UPDATE statement or None when there is
+ no such value available.
+
+ Returns a long value or None.
+ """
+ return self._last_insert_id
+
+ def fetchwarnings(self):
+ """Returns Warnings."""
+ return self._warnings
+
+ def get_attributes(self):
+ """Get the added query attributes so far."""
+ if hasattr(self, "_cnx"):
+ return self._cnx._query_attrs
+ elif hasattr(self, "_connection"):
+ return self._connection._query_attrs
+
+ def add_attribute(self, name, value):
+ """Add a query attribute and his value."""
+ if not isinstance(name, str):
+ raise errors.ProgrammingError(
+ "Parameter `name` must be a string type.")
+ if value is not None and not isinstance(value, MYSQL_PY_TYPES):
+ raise errors.ProgrammingError(
+ f"Object {value} cannot be converted to a MySQL type.")
+ if hasattr(self, "_cnx"):
+ self._cnx._query_attrs.append((name, value))
+ elif hasattr(self, "_connection"):
+ self._connection._query_attrs.append((name, value))
+
+ def clear_attributes(self):
+ """Remove all the query attributes."""
+ if hasattr(self, "_cnx"):
+ self._cnx._query_attrs = []
+ elif hasattr(self, "_connection"):
+ self._connection._query_attrs = []
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/authentication.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/authentication.py
new file mode 100644
index 0000000000000000000000000000000000000000..bd23da8ab8c6f11e8c17ee0220abc422f47867bf
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/authentication.py
@@ -0,0 +1,1035 @@
+# Copyright (c) 2014, 2021, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Implementing support for MySQL Authentication Plugins"""
+
+from base64 import b64encode, b64decode
+from hashlib import sha1, sha256
+import getpass
+import hmac
+import logging
+import os
+import struct
+
+
+from urllib.parse import quote
+from uuid import uuid4
+
+try:
+ from cryptography.exceptions import UnsupportedAlgorithm
+ from cryptography.hazmat.primitives import hashes, serialization
+ from cryptography.hazmat.primitives.asymmetric import padding
+ CRYPTOGRAPHY_AVAILABLE = True
+except ImportError:
+ CRYPTOGRAPHY_AVAILABLE = False
+
+try:
+ import gssapi
+except:
+ gssapi = None
+
+from . import errors
+from .utils import (normalize_unicode_string as norm_ustr,
+ validate_normalized_unicode_string as valid_norm)
+
+logging.getLogger(__name__).addHandler(logging.NullHandler())
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class BaseAuthPlugin(object):
+ """Base class for authentication plugins
+
+
+ Classes inheriting from BaseAuthPlugin should implement the method
+ prepare_password(). When instantiating, auth_data argument is
+ required. The username, password and database are optional. The
+ ssl_enabled argument can be used to tell the plugin whether SSL is
+ active or not.
+
+ The method auth_response() method is used to retrieve the password
+ which was prepared by prepare_password().
+ """
+
+ requires_ssl = False
+ plugin_name = ''
+
+ def __init__(self, auth_data, username=None, password=None, database=None,
+ ssl_enabled=False, instance=None):
+ """Initialization"""
+ self._auth_data = auth_data
+ self._username = username
+ self._password = password
+ self._database = database
+ self._ssl_enabled = ssl_enabled
+
+ def prepare_password(self):
+ """Prepares and returns password to be send to MySQL
+
+ This method needs to be implemented by classes inheriting from
+ this class. It is used by the auth_response() method.
+
+ Raises NotImplementedError.
+ """
+ raise NotImplementedError
+
+ def auth_response(self):
+ """Returns the prepared password to send to MySQL
+
+ Raises InterfaceError on errors. For example, when SSL is required
+ by not enabled.
+
+ Returns str
+ """
+ if self.requires_ssl and not self._ssl_enabled:
+ raise errors.InterfaceError("{name} requires SSL".format(
+ name=self.plugin_name))
+ return self.prepare_password()
+
+
+class MySQLNativePasswordAuthPlugin(BaseAuthPlugin):
+ """Class implementing the MySQL Native Password authentication plugin"""
+
+ requires_ssl = False
+ plugin_name = 'mysql_native_password'
+
+ def prepare_password(self):
+ """Prepares and returns password as native MySQL 4.1+ password"""
+ if not self._auth_data:
+ raise errors.InterfaceError("Missing authentication data (seed)")
+
+ if not self._password:
+ return b''
+ password = self._password
+
+ if isinstance(self._password, str):
+ password = self._password.encode('utf-8')
+ else:
+ password = self._password
+
+ auth_data = self._auth_data
+
+ hash4 = None
+ try:
+ hash1 = sha1(password).digest()
+ hash2 = sha1(hash1).digest()
+ hash3 = sha1(auth_data + hash2).digest()
+ xored = [h1 ^ h3 for (h1, h3) in zip(hash1, hash3)]
+ hash4 = struct.pack('20B', *xored)
+ except Exception as exc:
+ raise errors.InterfaceError(
+ "Failed scrambling password; {0}".format(exc))
+
+ return hash4
+
+
+class MySQLClearPasswordAuthPlugin(BaseAuthPlugin):
+ """Class implementing the MySQL Clear Password authentication plugin"""
+
+ requires_ssl = True
+ plugin_name = 'mysql_clear_password'
+
+ def prepare_password(self):
+ """Returns password as as clear text"""
+ if not self._password:
+ return b'\x00'
+ password = self._password
+
+ if isinstance(password, str):
+ password = password.encode('utf8')
+
+ return password + b'\x00'
+
+
+class MySQLSHA256PasswordAuthPlugin(BaseAuthPlugin):
+ """Class implementing the MySQL SHA256 authentication plugin
+
+ Note that encrypting using RSA is not supported since the Python
+ Standard Library does not provide this OpenSSL functionality.
+ """
+
+ requires_ssl = True
+ plugin_name = 'sha256_password'
+
+ def prepare_password(self):
+ """Returns password as as clear text"""
+ if not self._password:
+ return b'\x00'
+ password = self._password
+
+ if isinstance(password, str):
+ password = password.encode('utf8')
+
+ return password + b'\x00'
+
+
+class MySQLCachingSHA2PasswordAuthPlugin(BaseAuthPlugin):
+ """Class implementing the MySQL caching_sha2_password authentication plugin
+
+ Note that encrypting using RSA is not supported since the Python
+ Standard Library does not provide this OpenSSL functionality.
+ """
+ requires_ssl = False
+ plugin_name = 'caching_sha2_password'
+ perform_full_authentication = 4
+ fast_auth_success = 3
+
+ def _scramble(self):
+ """ Returns a scramble of the password using a Nonce sent by the
+ server.
+
+ The scramble is of the form:
+ XOR(SHA2(password), SHA2(SHA2(SHA2(password)), Nonce))
+ """
+ if not self._auth_data:
+ raise errors.InterfaceError("Missing authentication data (seed)")
+
+ if not self._password:
+ return b''
+
+ password = self._password.encode('utf-8') \
+ if isinstance(self._password, str) else self._password
+ auth_data = self._auth_data
+
+ hash1 = sha256(password).digest()
+ hash2 = sha256()
+ hash2.update(sha256(hash1).digest())
+ hash2.update(auth_data)
+ hash2 = hash2.digest()
+ xored = [h1 ^ h2 for (h1, h2) in zip(hash1, hash2)]
+ hash3 = struct.pack('32B', *xored)
+
+ return hash3
+
+ def prepare_password(self):
+ if len(self._auth_data) > 1:
+ return self._scramble()
+ elif self._auth_data[0] == self.perform_full_authentication:
+ return self._full_authentication()
+ return None
+
+ def _full_authentication(self):
+ """Returns password as as clear text"""
+ if not self._ssl_enabled:
+ raise errors.InterfaceError("{name} requires SSL".format(
+ name=self.plugin_name))
+
+ if not self._password:
+ return b'\x00'
+ password = self._password
+
+ if isinstance(password, str):
+ password = password.encode('utf8')
+
+ return password + b'\x00'
+
+
+class MySQLLdapSaslPasswordAuthPlugin(BaseAuthPlugin):
+ """Class implementing the MySQL ldap sasl authentication plugin.
+
+ The MySQL's ldap sasl authentication plugin support two authentication
+ methods SCRAM-SHA-1 and GSSAPI (using Kerberos). This implementation only
+ support SCRAM-SHA-1 and SCRAM-SHA-256.
+
+ SCRAM-SHA-1 amd SCRAM-SHA-256
+ This method requires 2 messages from client and 2 responses from
+ server.
+
+ The first message from client will be generated by prepare_password(),
+ after receive the response from the server, it is required that this
+ response is passed back to auth_continue() which will return the
+ second message from the client. After send this second message to the
+ server, the second server respond needs to be passed to auth_finalize()
+ to finish the authentication process.
+ """
+ sasl_mechanisms = ['SCRAM-SHA-1', 'SCRAM-SHA-256', 'GSSAPI']
+ requires_ssl = False
+ plugin_name = 'authentication_ldap_sasl_client'
+ def_digest_mode = sha1
+ client_nonce = None
+ client_salt = None
+ server_salt = None
+ krb_service_principal = None
+ iterations = 0
+ server_auth_var = None
+
+ def _xor(self, bytes1, bytes2):
+ return bytes([b1 ^ b2 for b1, b2 in zip(bytes1, bytes2)])
+
+ def _hmac(self, password, salt):
+ digest_maker = hmac.new(password, salt, self.def_digest_mode)
+ return digest_maker.digest()
+
+ def _hi(self, password, salt, count):
+ """Prepares Hi
+ Hi(password, salt, iterations) where Hi(p,s,i) is defined as
+ PBKDF2 (HMAC, p, s, i, output length of H).
+
+ """
+ pw = password.encode()
+ hi = self._hmac(pw, salt + b'\x00\x00\x00\x01')
+ aux = hi
+ for _ in range(count - 1):
+ aux = self._hmac(pw, aux)
+ hi = self._xor(hi, aux)
+ return hi
+
+ def _normalize(self, string):
+ norm_str = norm_ustr(string)
+ broken_rule = valid_norm(norm_str)
+ if broken_rule is not None:
+ raise errors.InterfaceError("broken_rule: {}".format(broken_rule))
+ char, rule = broken_rule
+ raise errors.InterfaceError(
+ "Unable to normalice character: `{}` in `{}` due to {}"
+ "".format(char, string, rule))
+ return norm_str
+
+ def _first_message(self):
+ """This method generates the first message to the server to start the
+
+ The client-first message consists of a gs2-header,
+ the desired username, and a randomly generated client nonce cnonce.
+
+ The first message from the server has the form:
+ b'n,a=<user_name>,n=<user_name>,r=<client_nonce>
+
+ Returns client's first message
+ """
+ cfm_fprnat = "n,a={user_name},n={user_name},r={client_nonce}"
+ self.client_nonce = str(uuid4()).replace("-", "")
+ cfm = cfm_fprnat.format(user_name=self._normalize(self._username),
+ client_nonce=self.client_nonce)
+
+ if isinstance(cfm, str):
+ cfm = cfm.encode('utf8')
+ return cfm
+
+ def _first_message_krb(self):
+ """Get a TGT Authentication request and initiates security context.
+
+ This method will contact the Kerberos KDC in order of obtain a TGT.
+ """
+ _LOGGER.debug("# user name: %s", self._username)
+ user_name = gssapi.raw.names.import_name(self._username.encode('utf8'),
+ name_type=gssapi.NameType.user)
+
+ # Use defaults store = {'ccache': 'FILE:/tmp/krb5cc_1000'}#, 'keytab':'/etc/some.keytab' }
+ # Attempt to retrieve credential from default cache file.
+ try:
+ cred = gssapi.Credentials()
+ _LOGGER.debug("# Stored credentials found, if password was given it"
+ " will be ignored.")
+ try:
+ # validate credentials has not expired.
+ cred.lifetime
+ except gssapi.raw.exceptions.ExpiredCredentialsError as err:
+ _LOGGER.warning(" Credentials has expired: %s", err)
+ cred.acquire(user_name)
+ raise errors.InterfaceError("Credentials has expired: {}".format(err))
+ except gssapi.raw.misc.GSSError as err:
+ if not self._password:
+ _LOGGER.error(" Unable to retrieve stored credentials: %s", err)
+ raise errors.InterfaceError(
+ "Unable to retrieve stored credentials error: {}".format(err))
+ else:
+ try:
+ _LOGGER.debug("# Attempt to retrieve credentials with "
+ "given password")
+ acquire_cred_result = gssapi.raw.acquire_cred_with_password(
+ user_name, self._password.encode('utf8'), usage="initiate")
+ cred = acquire_cred_result[0]
+ except gssapi.raw.misc.GSSError as err:
+ _LOGGER.error(" Unable to retrieve credentials with the given "
+ "password: %s", err)
+ raise errors.ProgrammingError(
+ "Unable to retrieve credentials with the given password: "
+ "{}".format(err))
+
+ flags_l = (gssapi.RequirementFlag.mutual_authentication,
+ gssapi.RequirementFlag.extended_error,
+ gssapi.RequirementFlag.delegate_to_peer
+ )
+
+ if self.krb_service_principal:
+ service_principal = self.krb_service_principal
+ else:
+ service_principal = "ldap/ldapauth"
+ _LOGGER.debug("# service principal: %s", service_principal)
+ servk = gssapi.Name(service_principal, name_type=gssapi.NameType.kerberos_principal)
+ self.target_name = servk
+ self.ctx = gssapi.SecurityContext(name=servk,
+ creds=cred,
+ flags=sum(flags_l),
+ usage='initiate')
+
+ try:
+ initial_client_token = self.ctx.step()
+ except gssapi.raw.misc.GSSError as err:
+ _LOGGER.error("Unable to initiate security context: %s", err)
+ raise errors.InterfaceError("Unable to initiate security context: {}".format(err))
+
+ _LOGGER.debug("# initial client token: %s", initial_client_token)
+ return initial_client_token
+
+
+ def auth_continue_krb(self, tgt_auth_challenge):
+ """Continue with the Kerberos TGT service request.
+
+ With the TGT authentication service given response generate a TGT
+ service request. This method must be invoked sequentially (in a loop)
+ until the security context is completed and an empty response needs to
+ be send to acknowledge the server.
+
+ Args:
+ tgt_auth_challenge the challenge for the negotiation.
+
+ Returns: tuple (bytearray TGS service request,
+ bool True if context is completed otherwise False).
+ """
+ _LOGGER.debug("tgt_auth challenge: %s", tgt_auth_challenge)
+
+ resp = self.ctx.step(tgt_auth_challenge)
+ _LOGGER.debug("# context step response: %s", resp)
+ _LOGGER.debug("# context completed?: %s", self.ctx.complete)
+
+ return resp, self.ctx.complete
+
+ def auth_accept_close_handshake(self, message):
+ """Accept handshake and generate closing handshake message for server.
+
+ This method verifies the server authenticity from the given message
+ and included signature and generates the closing handshake for the
+ server.
+
+ When this method is invoked the security context is already established
+ and the client and server can send GSSAPI formated secure messages.
+
+ To finish the authentication handshake the server sends a message
+ with the security layer availability and the maximum buffer size.
+
+ Since the connector only uses the GSSAPI authentication mechanism to
+ authenticate the user with the server, the server will verify clients
+ message signature and terminate the GSSAPI authentication and send two
+ messages; an authentication acceptance b'\x01\x00\x00\x08\x01' and a
+ OK packet (that must be received after sent the returned message from
+ this method).
+
+ Args:
+ message a wrapped hssapi message from the server.
+
+ Returns: bytearray closing handshake message to be send to the server.
+ """
+ if not self.ctx.complete:
+ raise errors.ProgrammingError("Security context is not completed.")
+ _LOGGER.debug("# servers message: %s", message)
+ _LOGGER.debug("# GSSAPI flags in use: %s", self.ctx.actual_flags)
+ try:
+ unwraped = self.ctx.unwrap(message)
+ _LOGGER.debug("# unwraped: %s", unwraped)
+ except gssapi.raw.exceptions.BadMICError as err:
+ _LOGGER.debug("Unable to unwrap server message: %s", err)
+ raise errors.InterfaceError("Unable to unwrap server message: {}"
+ "".format(err))
+
+ _LOGGER.debug("# unwrapped server message: %s", unwraped)
+ # The message contents for the clients closing message:
+ # - security level 1 byte, must be always 1.
+ # - conciliated buffer size 3 bytes, without importance as no
+ # further GSSAPI messages will be sends.
+ response = bytearray(b"\x01\x00\x00\00")
+ # Closing handshake must not be encrypted.
+ _LOGGER.debug("# message response: %s", response)
+ wraped = self.ctx.wrap(response, encrypt=False)
+ _LOGGER.debug("# wrapped message response: %s, length: %d",
+ wraped[0], len(wraped[0]))
+
+ return wraped.message
+
+ def auth_response(self, krb_service_principal=None):
+ """This method will prepare the fist message to the server.
+
+ Returns bytes to send to the server as the first message.
+ """
+ auth_mechanism = self._auth_data.decode()
+ self.krb_service_principal = krb_service_principal
+ _LOGGER.debug("read_method_name_from_server: %s", auth_mechanism)
+ if auth_mechanism not in self.sasl_mechanisms:
+ raise errors.InterfaceError(
+ 'The sasl authentication method "{}" requested from the server '
+ 'is not supported. Only "{}" and "{}" are supported'.format(
+ auth_mechanism, '", "'.join(self.sasl_mechanisms[:-1]),
+ self.sasl_mechanisms[-1]))
+
+ if b'GSSAPI' in self._auth_data:
+ if not gssapi:
+ raise errors.ProgrammingError(
+ "Module gssapi is required for GSSAPI authentication "
+ "mechanism but was not found. Unable to authenticate "
+ "with the server")
+ return self._first_message_krb()
+
+ if self._auth_data == b'SCRAM-SHA-256':
+ self.def_digest_mode = sha256
+
+ return self._first_message()
+
+ def _second_message(self):
+ """This method generates the second message to the server
+
+ Second message consist on the concatenation of the client and the
+ server nonce, and cproof.
+
+ c=<n,a=<user_name>>,r=<server_nonce>,p=<client_proof>
+ where:
+ <client_proof>: xor(<client_key>, <client_signature>)
+
+ <client_key>: hmac(salted_password, b"Client Key")
+ <client_signature>: hmac(<stored_key>, <auth_msg>)
+ <stored_key>: h(<client_key>)
+ <auth_msg>: <client_first_no_header>,<servers_first>,
+ c=<client_header>,r=<server_nonce>
+ <client_first_no_header>: n=<username>r=<client_nonce>
+ """
+ if not self._auth_data:
+ raise errors.InterfaceError("Missing authentication data (seed)")
+
+ passw = self._normalize(self._password)
+ salted_password = self._hi(passw,
+ b64decode(self.server_salt),
+ self.iterations)
+
+ _LOGGER.debug("salted_password: %s",
+ b64encode(salted_password).decode())
+
+ client_key = self._hmac(salted_password, b"Client Key")
+ _LOGGER.debug("client_key: %s", b64encode(client_key).decode())
+
+ stored_key = self.def_digest_mode(client_key).digest()
+ _LOGGER.debug("stored_key: %s", b64encode(stored_key).decode())
+
+ server_key = self._hmac(salted_password, b"Server Key")
+ _LOGGER.debug("server_key: %s", b64encode(server_key).decode())
+
+ client_first_no_header = ",".join([
+ "n={}".format(self._normalize(self._username)),
+ "r={}".format(self.client_nonce)])
+ _LOGGER.debug("client_first_no_header: %s", client_first_no_header)
+ auth_msg = ','.join([
+ client_first_no_header,
+ self.servers_first,
+ "c={}".format(b64encode("n,a={},".format(
+ self._normalize(self._username)).encode()).decode()),
+ "r={}".format(self.server_nonce)])
+ _LOGGER.debug("auth_msg: %s", auth_msg)
+
+ client_signature = self._hmac(stored_key, auth_msg.encode())
+ _LOGGER.debug("client_signature: %s",
+ b64encode(client_signature).decode())
+
+ client_proof = self._xor(client_key, client_signature)
+ _LOGGER.debug("client_proof: %s", b64encode(client_proof).decode())
+
+ self.server_auth_var = b64encode(
+ self._hmac(server_key, auth_msg.encode())).decode()
+ _LOGGER.debug("server_auth_var: %s", self.server_auth_var)
+
+ client_header = b64encode(
+ "n,a={},".format(self._normalize(self._username)).encode()).decode()
+ msg = ",".join(["c={}".format(client_header),
+ "r={}".format(self.server_nonce),
+ "p={}".format(b64encode(client_proof).decode())])
+ _LOGGER.debug("second_message: %s", msg)
+ return msg.encode()
+
+ def _validate_first_reponse(self, servers_first):
+ """Validates first message from the server.
+
+ Extracts the server's salt and iterations from the servers 1st response.
+ First message from the server is in the form:
+ <server_salt>,i=<iterations>
+ """
+ if not servers_first or not isinstance(servers_first, (bytearray, bytes)):
+ raise errors.InterfaceError("Unexpected server message: {}"
+ "".format(servers_first))
+ try:
+ servers_first = servers_first.decode()
+ self.servers_first = servers_first
+ r_server_nonce, s_salt, i_counter = servers_first.split(",")
+ except ValueError:
+ raise errors.InterfaceError("Unexpected server message: {}"
+ "".format(servers_first))
+ if not r_server_nonce.startswith("r=") or \
+ not s_salt.startswith("s=") or \
+ not i_counter.startswith("i="):
+ raise errors.InterfaceError("Incomplete reponse from the server: {}"
+ "".format(servers_first))
+ if self.client_nonce in r_server_nonce:
+ self.server_nonce = r_server_nonce[2:]
+ _LOGGER.debug("server_nonce: %s", self.server_nonce)
+ else:
+ raise errors.InterfaceError("Unable to authenticate response: "
+ "response not well formed {}"
+ "".format(servers_first))
+ self.server_salt = s_salt[2:]
+ _LOGGER.debug("server_salt: %s length: %s", self.server_salt,
+ len(self.server_salt))
+ try:
+ i_counter = i_counter[2:]
+ _LOGGER.debug("iterations: {}".format(i_counter))
+ self.iterations = int(i_counter)
+ except:
+ raise errors.InterfaceError("Unable to authenticate: iterations "
+ "not found {}".format(servers_first))
+
+ def auth_continue(self, servers_first_response):
+ """return the second message from the client.
+
+ Returns bytes to send to the server as the second message.
+ """
+ self._validate_first_reponse(servers_first_response)
+ return self._second_message()
+
+ def _validate_second_reponse(self, servers_second):
+ """Validates second message from the server.
+
+ The client and the server prove to each other they have the same Auth
+ variable.
+
+ The second message from the server consist of the server's proof:
+ server_proof = HMAC(<server_key>, <auth_msg>)
+ where:
+ <server_key>: hmac(<salted_password>, b"Server Key")
+ <auth_msg>: <client_first_no_header>,<servers_first>,
+ c=<client_header>,r=<server_nonce>
+
+ Our server_proof must be equal to the Auth variable send on this second
+ response.
+ """
+ if not servers_second or not isinstance(servers_second, bytearray) or \
+ len(servers_second)<=2 or not servers_second.startswith(b"v="):
+ raise errors.InterfaceError("The server's proof is not well formated.")
+ server_var = servers_second[2:].decode()
+ _LOGGER.debug("server auth variable: %s", server_var)
+ return self.server_auth_var == server_var
+
+ def auth_finalize(self, servers_second_response):
+ """finalize the authentication process.
+
+ Raises errors.InterfaceError if the ervers_second_response is invalid.
+
+ Returns True in succesfull authentication False otherwise.
+ """
+ if not self._validate_second_reponse(servers_second_response):
+ raise errors.InterfaceError("Authentication failed: Unable to "
+ "proof server identity.")
+ return True
+
+
+class MySQLKerberosAuthPlugin(BaseAuthPlugin):
+ """Implement the MySQL Kerberos authentication plugin."""
+
+ plugin_name = "authentication_kerberos_client"
+ requires_ssl = False
+ context = None
+
+ @staticmethod
+ def get_user_from_credentials():
+ """Get user from credentials without realm."""
+ try:
+ creds = gssapi.Credentials(usage="initiate")
+ user = str(creds.name)
+ if user.find("@") != -1:
+ user, _ = user.split("@", 1)
+ return user
+ except gssapi.raw.misc.GSSError as err:
+ return getpass.getuser()
+
+ def _acquire_cred_with_password(self, upn):
+ """Acquire credentials through provided password."""
+ _LOGGER.debug(
+ "Attempt to acquire credentials through provided password"
+ )
+
+ username = gssapi.raw.names.import_name(
+ upn.encode("utf-8"),
+ name_type=gssapi.NameType.user
+ )
+
+ try:
+ acquire_cred_result = (
+ gssapi.raw.acquire_cred_with_password(
+ username,
+ self._password.encode("utf-8"),
+ usage="initiate"
+ )
+ )
+ except gssapi.raw.misc.GSSError as err:
+ raise errors.ProgrammingError(
+ f"Unable to acquire credentials with the given password: {err}"
+ )
+ creds = acquire_cred_result[0]
+ return creds
+
+ def _parse_auth_data(self, packet):
+ """Parse authentication data.
+
+ Get the SPN and REALM from the authentication data packet.
+
+ Format:
+ SPN string length two bytes <B1> <B2> +
+ SPN string +
+ UPN realm string length two bytes <B1> <B2> +
+ UPN realm string
+
+ Returns:
+ tuple: With 'spn' and 'realm'.
+ """
+ spn_len = struct.unpack("<H", packet[:2])[0]
+ packet = packet[2:]
+
+ spn = struct.unpack(f"<{spn_len}s", packet[:spn_len])[0]
+ packet = packet[spn_len:]
+
+ realm_len = struct.unpack("<H", packet[:2])[0]
+ realm = struct.unpack(f"<{realm_len}s", packet[2:])[0]
+
+ return spn.decode(), realm.decode()
+
+ def prepare_password(self):
+ """Return password as as clear text."""
+ if not self._password:
+ return b"\x00"
+ password = self._password
+
+ if isinstance(password, str):
+ password = password.encode("utf8")
+
+ return password + b"\x00"
+
+ def auth_response(self, auth_data=None):
+ """Prepare the fist message to the server."""
+ spn = None
+ realm = None
+
+ if auth_data:
+ try:
+ spn, realm = self._parse_auth_data(auth_data)
+ except struct.error as err:
+ raise InterruptedError(f"Invalid authentication data: {err}")
+
+ if spn is None:
+ return self.prepare_password()
+
+ upn = f"{self._username}@{realm}" if self._username else None
+
+ _LOGGER.debug("Service Principal: %s", spn)
+ _LOGGER.debug("Realm: %s", realm)
+ _LOGGER.debug("Username: %s", self._username)
+
+ try:
+ # Attempt to retrieve credentials from default cache file
+ creds = gssapi.Credentials(usage="initiate")
+ creds_upn = str(creds.name)
+
+ _LOGGER.debug("Cached credentials found")
+ _LOGGER.debug("Cached credentials UPN: %s", creds_upn)
+
+ # Remove the realm from user
+ if creds_upn.find("@") != -1:
+ creds_user, creds_realm = creds_upn.split("@", 1)
+ else:
+ creds_user = creds_upn
+ creds_realm = None
+
+ upn = f"{self._username}@{realm}" if self._username else creds_upn
+
+ # The user from cached credentials matches with the given user?
+ if self._username and self._username != creds_user:
+ _LOGGER.debug(
+ "The user from cached credentials doesn't match with the "
+ "given user"
+ )
+ if self._password is not None:
+ creds = self._acquire_cred_with_password(upn)
+ if (
+ creds_realm and creds_realm != realm and
+ self._password is not None
+ ):
+ creds = self._acquire_cred_with_password(upn)
+ except gssapi.raw.exceptions.ExpiredCredentialsError as err:
+ if upn and self._password is not None:
+ creds = self._acquire_cred_with_password(upn)
+ else:
+ raise errors.InterfaceError(f"Credentials has expired: {err}")
+ except gssapi.raw.misc.GSSError as err:
+ if upn and self._password is not None:
+ creds = self._acquire_cred_with_password(upn)
+ else:
+ raise errors.InterfaceError(
+ f"Unable to retrieve cached credentials error: {err}"
+ )
+
+ flags = (
+ gssapi.RequirementFlag.mutual_authentication,
+ gssapi.RequirementFlag.extended_error,
+ gssapi.RequirementFlag.delegate_to_peer
+ )
+ name = gssapi.Name(
+ spn,
+ name_type=gssapi.NameType.kerberos_principal
+ )
+ cname = name.canonicalize(gssapi.MechType.kerberos)
+ self.context = gssapi.SecurityContext(
+ name=cname,
+ creds=creds,
+ flags=sum(flags),
+ usage="initiate"
+ )
+
+ try:
+ initial_client_token = self.context.step()
+ except gssapi.raw.misc.GSSError as err:
+ raise errors.InterfaceError(
+ f"Unable to initiate security context: {err}"
+ )
+
+ _LOGGER.debug("Initial client token: %s", initial_client_token)
+ return initial_client_token
+
+ def auth_continue(self, tgt_auth_challenge):
+ """Continue with the Kerberos TGT service request.
+
+ With the TGT authentication service given response generate a TGT
+ service request. This method must be invoked sequentially (in a loop)
+ until the security context is completed and an empty response needs to
+ be send to acknowledge the server.
+
+ Args:
+ tgt_auth_challenge: the challenge for the negotiation.
+
+ Returns:
+ tuple (bytearray TGS service request,
+ bool True if context is completed otherwise False).
+ """
+ _LOGGER.debug("tgt_auth challenge: %s", tgt_auth_challenge)
+
+ resp = self.context.step(tgt_auth_challenge)
+
+ _LOGGER.debug("Context step response: %s", resp)
+ _LOGGER.debug("Context completed?: %s", self.context.complete)
+
+ return resp, self.context.complete
+
+ def auth_accept_close_handshake(self, message):
+ """Accept handshake and generate closing handshake message for server.
+
+ This method verifies the server authenticity from the given message
+ and included signature and generates the closing handshake for the
+ server.
+
+ When this method is invoked the security context is already established
+ and the client and server can send GSSAPI formated secure messages.
+
+ To finish the authentication handshake the server sends a message
+ with the security layer availability and the maximum buffer size.
+
+ Since the connector only uses the GSSAPI authentication mechanism to
+ authenticate the user with the server, the server will verify clients
+ message signature and terminate the GSSAPI authentication and send two
+ messages; an authentication acceptance b'\x01\x00\x00\x08\x01' and a
+ OK packet (that must be received after sent the returned message from
+ this method).
+
+ Args:
+ message: a wrapped gssapi message from the server.
+
+ Returns:
+ bytearray (closing handshake message to be send to the server).
+ """
+ if not self.context.complete:
+ raise errors.ProgrammingError("Security context is not completed")
+ _LOGGER.debug("Server message: %s", message)
+ _LOGGER.debug("GSSAPI flags in use: %s", self.context.actual_flags)
+ try:
+ unwraped = self.context.unwrap(message)
+ _LOGGER.debug("Unwraped: %s", unwraped)
+ except gssapi.raw.exceptions.BadMICError as err:
+ _LOGGER.debug("Unable to unwrap server message: %s", err)
+ raise errors.InterfaceError(
+ "Unable to unwrap server message: {}".format(err)
+ )
+
+ _LOGGER.debug("Unwrapped server message: %s", unwraped)
+ # The message contents for the clients closing message:
+ # - security level 1 byte, must be always 1.
+ # - conciliated buffer size 3 bytes, without importance as no
+ # further GSSAPI messages will be sends.
+ response = bytearray(b"\x01\x00\x00\00")
+ # Closing handshake must not be encrypted.
+ _LOGGER.debug("Message response: %s", response)
+ wraped = self.context.wrap(response, encrypt=False)
+ _LOGGER.debug(
+ "Wrapped message response: %s, length: %d",
+ wraped[0],
+ len(wraped[0])
+ )
+
+ return wraped.message
+
+
+class MySQL_OCI_AuthPlugin(BaseAuthPlugin):
+ """Implement the MySQL OCI IAM authentication plugin."""
+
+ plugin_name = "authentication_oci_client"
+ requires_ssl = False
+ context = None
+
+ def _prepare_auth_response(self, signature, oci_config):
+ """Prepare client's authentication response
+
+ Prepares client's authentication response in JSON format
+ Args:
+ signature: server's nonce to be signed by client.
+ oci_config: OCI configuration object.
+
+ Returns:
+ JSON_STRING {"fingerprint": string, "signature": string}
+ """
+ signature_64 = b64encode(signature)
+ auth_response = {
+ "fingerprint": oci_config["fingerprint"],
+ "signature": signature_64.decode()
+ }
+ return repr(auth_response).replace(" ", "").replace("'", '"')
+
+ def _get_private_key(self, key_path):
+ """Get the private_key form the given location"""
+ if not CRYPTOGRAPHY_AVAILABLE:
+ raise errors.ProgrammingError(
+ "Package 'cryptography' is not installed"
+ )
+ try:
+ with open(os.path.expanduser(key_path), "rb") as key_file:
+ private_key = serialization.load_pem_private_key(
+ key_file.read(),
+ password=None,
+ )
+ except (TypeError, OSError, ValueError, UnsupportedAlgorithm) as err:
+ raise errors.ProgrammingError(
+ f'An error occurred while reading the API_KEY from "{key_path}":'
+ f" {err}")
+
+ return private_key
+
+ def _get_valid_oci_config(self, oci_path=None, profile_name="DEFAULT"):
+ """Get a valid OCI config from the given configuration file path"""
+ try:
+ from oci import config, exceptions
+ except ImportError:
+ raise errors.ProgrammingError(
+ 'Package "oci" (Oracle Cloud Infrastructure Python SDK)'
+ ' is not installed.')
+ if not oci_path:
+ oci_path = config.DEFAULT_LOCATION
+
+ error_list = []
+ req_keys = {
+ "fingerprint": (lambda x: len(x) > 32),
+ "key_file": (lambda x: os.path.exists(os.path.expanduser(x)))
+ }
+
+ try:
+ # key_file is validated by oci.config if present
+ oci_config = config.from_file(oci_path, profile_name)
+ for req_key in req_keys:
+ try:
+ # Verify parameter in req_key is present and valid
+ if oci_config[req_key] \
+ and not req_keys[req_key](oci_config[req_key]):
+ error_list.append(f'Parameter "{req_key}" is invalid')
+ except KeyError as err:
+ error_list.append(f'Does not contain parameter {req_key}')
+ except (
+ exceptions.ConfigFileNotFound,
+ exceptions.InvalidConfig,
+ exceptions.InvalidKeyFilePath,
+ exceptions.InvalidPrivateKey,
+ exceptions.MissingPrivateKeyPassphrase,
+ exceptions.ProfileNotFound
+ ) as err:
+ error_list.append(str(err))
+
+ # Raise errors if any
+ if error_list:
+ raise errors.ProgrammingError(
+ f'Invalid profile {profile_name} in: "{oci_path}". '
+ f" Errors found: {error_list}")
+
+ return oci_config
+
+ def auth_response(self, oci_path=None):
+ """Prepare authentication string for the server."""
+ if not CRYPTOGRAPHY_AVAILABLE:
+ raise errors.ProgrammingError(
+ "Package 'cryptography' is not installed"
+ )
+ _LOGGER.debug("server nonce: %s, len %d",
+ self._auth_data, len(self._auth_data))
+ _LOGGER.debug("OCI configuration file location: %s", oci_path)
+
+ oci_config = self._get_valid_oci_config(oci_path)
+
+ private_key = self._get_private_key(oci_config['key_file'])
+ signature = private_key.sign(
+ self._auth_data,
+ padding.PKCS1v15(),
+ hashes.SHA256()
+ )
+
+ auth_response = self._prepare_auth_response(signature, oci_config)
+ _LOGGER.debug("authentication response: %s", auth_response)
+ return auth_response.encode()
+
+
+def get_auth_plugin(plugin_name):
+ """Return authentication class based on plugin name
+
+ This function returns the class for the authentication plugin plugin_name.
+ The returned class is a subclass of BaseAuthPlugin.
+
+ Raises errors.NotSupportedError when plugin_name is not supported.
+
+ Returns subclass of BaseAuthPlugin.
+ """
+ for authclass in BaseAuthPlugin.__subclasses__(): # pylint: disable=E1101
+ if authclass.plugin_name == plugin_name:
+ return authclass
+
+ raise errors.NotSupportedError(
+ "Authentication plugin '{0}' is not supported".format(plugin_name))
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/charsets.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/charsets.py
new file mode 100644
index 0000000000000000000000000000000000000000..76ebaeb7eded192921df8313d5783c704e5826b9
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/charsets.py
@@ -0,0 +1,350 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2013, 2019, Oracle and/or its affiliates. All rights reserved.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+# This file was auto-generated.
+_GENERATED_ON = '2019-04-29'
+_MYSQL_VERSION = (8, 0, 17)
+
+"""This module contains the MySQL Server Character Sets"""
+
+MYSQL_CHARACTER_SETS = [
+ # (character set name, collation, default)
+ None,
+ ("big5", "big5_chinese_ci", True), # 1
+ ("latin2", "latin2_czech_cs", False), # 2
+ ("dec8", "dec8_swedish_ci", True), # 3
+ ("cp850", "cp850_general_ci", True), # 4
+ ("latin1", "latin1_german1_ci", False), # 5
+ ("hp8", "hp8_english_ci", True), # 6
+ ("koi8r", "koi8r_general_ci", True), # 7
+ ("latin1", "latin1_swedish_ci", True), # 8
+ ("latin2", "latin2_general_ci", True), # 9
+ ("swe7", "swe7_swedish_ci", True), # 10
+ ("ascii", "ascii_general_ci", True), # 11
+ ("ujis", "ujis_japanese_ci", True), # 12
+ ("sjis", "sjis_japanese_ci", True), # 13
+ ("cp1251", "cp1251_bulgarian_ci", False), # 14
+ ("latin1", "latin1_danish_ci", False), # 15
+ ("hebrew", "hebrew_general_ci", True), # 16
+ None,
+ ("tis620", "tis620_thai_ci", True), # 18
+ ("euckr", "euckr_korean_ci", True), # 19
+ ("latin7", "latin7_estonian_cs", False), # 20
+ ("latin2", "latin2_hungarian_ci", False), # 21
+ ("koi8u", "koi8u_general_ci", True), # 22
+ ("cp1251", "cp1251_ukrainian_ci", False), # 23
+ ("gb2312", "gb2312_chinese_ci", True), # 24
+ ("greek", "greek_general_ci", True), # 25
+ ("cp1250", "cp1250_general_ci", True), # 26
+ ("latin2", "latin2_croatian_ci", False), # 27
+ ("gbk", "gbk_chinese_ci", True), # 28
+ ("cp1257", "cp1257_lithuanian_ci", False), # 29
+ ("latin5", "latin5_turkish_ci", True), # 30
+ ("latin1", "latin1_german2_ci", False), # 31
+ ("armscii8", "armscii8_general_ci", True), # 32
+ ("utf8", "utf8_general_ci", True), # 33
+ ("cp1250", "cp1250_czech_cs", False), # 34
+ ("ucs2", "ucs2_general_ci", True), # 35
+ ("cp866", "cp866_general_ci", True), # 36
+ ("keybcs2", "keybcs2_general_ci", True), # 37
+ ("macce", "macce_general_ci", True), # 38
+ ("macroman", "macroman_general_ci", True), # 39
+ ("cp852", "cp852_general_ci", True), # 40
+ ("latin7", "latin7_general_ci", True), # 41
+ ("latin7", "latin7_general_cs", False), # 42
+ ("macce", "macce_bin", False), # 43
+ ("cp1250", "cp1250_croatian_ci", False), # 44
+ ("utf8mb4", "utf8mb4_general_ci", False), # 45
+ ("utf8mb4", "utf8mb4_bin", False), # 46
+ ("latin1", "latin1_bin", False), # 47
+ ("latin1", "latin1_general_ci", False), # 48
+ ("latin1", "latin1_general_cs", False), # 49
+ ("cp1251", "cp1251_bin", False), # 50
+ ("cp1251", "cp1251_general_ci", True), # 51
+ ("cp1251", "cp1251_general_cs", False), # 52
+ ("macroman", "macroman_bin", False), # 53
+ ("utf16", "utf16_general_ci", True), # 54
+ ("utf16", "utf16_bin", False), # 55
+ ("utf16le", "utf16le_general_ci", True), # 56
+ ("cp1256", "cp1256_general_ci", True), # 57
+ ("cp1257", "cp1257_bin", False), # 58
+ ("cp1257", "cp1257_general_ci", True), # 59
+ ("utf32", "utf32_general_ci", True), # 60
+ ("utf32", "utf32_bin", False), # 61
+ ("utf16le", "utf16le_bin", False), # 62
+ ("binary", "binary", True), # 63
+ ("armscii8", "armscii8_bin", False), # 64
+ ("ascii", "ascii_bin", False), # 65
+ ("cp1250", "cp1250_bin", False), # 66
+ ("cp1256", "cp1256_bin", False), # 67
+ ("cp866", "cp866_bin", False), # 68
+ ("dec8", "dec8_bin", False), # 69
+ ("greek", "greek_bin", False), # 70
+ ("hebrew", "hebrew_bin", False), # 71
+ ("hp8", "hp8_bin", False), # 72
+ ("keybcs2", "keybcs2_bin", False), # 73
+ ("koi8r", "koi8r_bin", False), # 74
+ ("koi8u", "koi8u_bin", False), # 75
+ ("utf8", "utf8_tolower_ci", False), # 76
+ ("latin2", "latin2_bin", False), # 77
+ ("latin5", "latin5_bin", False), # 78
+ ("latin7", "latin7_bin", False), # 79
+ ("cp850", "cp850_bin", False), # 80
+ ("cp852", "cp852_bin", False), # 81
+ ("swe7", "swe7_bin", False), # 82
+ ("utf8", "utf8_bin", False), # 83
+ ("big5", "big5_bin", False), # 84
+ ("euckr", "euckr_bin", False), # 85
+ ("gb2312", "gb2312_bin", False), # 86
+ ("gbk", "gbk_bin", False), # 87
+ ("sjis", "sjis_bin", False), # 88
+ ("tis620", "tis620_bin", False), # 89
+ ("ucs2", "ucs2_bin", False), # 90
+ ("ujis", "ujis_bin", False), # 91
+ ("geostd8", "geostd8_general_ci", True), # 92
+ ("geostd8", "geostd8_bin", False), # 93
+ ("latin1", "latin1_spanish_ci", False), # 94
+ ("cp932", "cp932_japanese_ci", True), # 95
+ ("cp932", "cp932_bin", False), # 96
+ ("eucjpms", "eucjpms_japanese_ci", True), # 97
+ ("eucjpms", "eucjpms_bin", False), # 98
+ ("cp1250", "cp1250_polish_ci", False), # 99
+ None,
+ ("utf16", "utf16_unicode_ci", False), # 101
+ ("utf16", "utf16_icelandic_ci", False), # 102
+ ("utf16", "utf16_latvian_ci", False), # 103
+ ("utf16", "utf16_romanian_ci", False), # 104
+ ("utf16", "utf16_slovenian_ci", False), # 105
+ ("utf16", "utf16_polish_ci", False), # 106
+ ("utf16", "utf16_estonian_ci", False), # 107
+ ("utf16", "utf16_spanish_ci", False), # 108
+ ("utf16", "utf16_swedish_ci", False), # 109
+ ("utf16", "utf16_turkish_ci", False), # 110
+ ("utf16", "utf16_czech_ci", False), # 111
+ ("utf16", "utf16_danish_ci", False), # 112
+ ("utf16", "utf16_lithuanian_ci", False), # 113
+ ("utf16", "utf16_slovak_ci", False), # 114
+ ("utf16", "utf16_spanish2_ci", False), # 115
+ ("utf16", "utf16_roman_ci", False), # 116
+ ("utf16", "utf16_persian_ci", False), # 117
+ ("utf16", "utf16_esperanto_ci", False), # 118
+ ("utf16", "utf16_hungarian_ci", False), # 119
+ ("utf16", "utf16_sinhala_ci", False), # 120
+ ("utf16", "utf16_german2_ci", False), # 121
+ ("utf16", "utf16_croatian_ci", False), # 122
+ ("utf16", "utf16_unicode_520_ci", False), # 123
+ ("utf16", "utf16_vietnamese_ci", False), # 124
+ None,
+ None,
+ None,
+ ("ucs2", "ucs2_unicode_ci", False), # 128
+ ("ucs2", "ucs2_icelandic_ci", False), # 129
+ ("ucs2", "ucs2_latvian_ci", False), # 130
+ ("ucs2", "ucs2_romanian_ci", False), # 131
+ ("ucs2", "ucs2_slovenian_ci", False), # 132
+ ("ucs2", "ucs2_polish_ci", False), # 133
+ ("ucs2", "ucs2_estonian_ci", False), # 134
+ ("ucs2", "ucs2_spanish_ci", False), # 135
+ ("ucs2", "ucs2_swedish_ci", False), # 136
+ ("ucs2", "ucs2_turkish_ci", False), # 137
+ ("ucs2", "ucs2_czech_ci", False), # 138
+ ("ucs2", "ucs2_danish_ci", False), # 139
+ ("ucs2", "ucs2_lithuanian_ci", False), # 140
+ ("ucs2", "ucs2_slovak_ci", False), # 141
+ ("ucs2", "ucs2_spanish2_ci", False), # 142
+ ("ucs2", "ucs2_roman_ci", False), # 143
+ ("ucs2", "ucs2_persian_ci", False), # 144
+ ("ucs2", "ucs2_esperanto_ci", False), # 145
+ ("ucs2", "ucs2_hungarian_ci", False), # 146
+ ("ucs2", "ucs2_sinhala_ci", False), # 147
+ ("ucs2", "ucs2_german2_ci", False), # 148
+ ("ucs2", "ucs2_croatian_ci", False), # 149
+ ("ucs2", "ucs2_unicode_520_ci", False), # 150
+ ("ucs2", "ucs2_vietnamese_ci", False), # 151
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ ("ucs2", "ucs2_general_mysql500_ci", False), # 159
+ ("utf32", "utf32_unicode_ci", False), # 160
+ ("utf32", "utf32_icelandic_ci", False), # 161
+ ("utf32", "utf32_latvian_ci", False), # 162
+ ("utf32", "utf32_romanian_ci", False), # 163
+ ("utf32", "utf32_slovenian_ci", False), # 164
+ ("utf32", "utf32_polish_ci", False), # 165
+ ("utf32", "utf32_estonian_ci", False), # 166
+ ("utf32", "utf32_spanish_ci", False), # 167
+ ("utf32", "utf32_swedish_ci", False), # 168
+ ("utf32", "utf32_turkish_ci", False), # 169
+ ("utf32", "utf32_czech_ci", False), # 170
+ ("utf32", "utf32_danish_ci", False), # 171
+ ("utf32", "utf32_lithuanian_ci", False), # 172
+ ("utf32", "utf32_slovak_ci", False), # 173
+ ("utf32", "utf32_spanish2_ci", False), # 174
+ ("utf32", "utf32_roman_ci", False), # 175
+ ("utf32", "utf32_persian_ci", False), # 176
+ ("utf32", "utf32_esperanto_ci", False), # 177
+ ("utf32", "utf32_hungarian_ci", False), # 178
+ ("utf32", "utf32_sinhala_ci", False), # 179
+ ("utf32", "utf32_german2_ci", False), # 180
+ ("utf32", "utf32_croatian_ci", False), # 181
+ ("utf32", "utf32_unicode_520_ci", False), # 182
+ ("utf32", "utf32_vietnamese_ci", False), # 183
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ ("utf8", "utf8_unicode_ci", False), # 192
+ ("utf8", "utf8_icelandic_ci", False), # 193
+ ("utf8", "utf8_latvian_ci", False), # 194
+ ("utf8", "utf8_romanian_ci", False), # 195
+ ("utf8", "utf8_slovenian_ci", False), # 196
+ ("utf8", "utf8_polish_ci", False), # 197
+ ("utf8", "utf8_estonian_ci", False), # 198
+ ("utf8", "utf8_spanish_ci", False), # 199
+ ("utf8", "utf8_swedish_ci", False), # 200
+ ("utf8", "utf8_turkish_ci", False), # 201
+ ("utf8", "utf8_czech_ci", False), # 202
+ ("utf8", "utf8_danish_ci", False), # 203
+ ("utf8", "utf8_lithuanian_ci", False), # 204
+ ("utf8", "utf8_slovak_ci", False), # 205
+ ("utf8", "utf8_spanish2_ci", False), # 206
+ ("utf8", "utf8_roman_ci", False), # 207
+ ("utf8", "utf8_persian_ci", False), # 208
+ ("utf8", "utf8_esperanto_ci", False), # 209
+ ("utf8", "utf8_hungarian_ci", False), # 210
+ ("utf8", "utf8_sinhala_ci", False), # 211
+ ("utf8", "utf8_german2_ci", False), # 212
+ ("utf8", "utf8_croatian_ci", False), # 213
+ ("utf8", "utf8_unicode_520_ci", False), # 214
+ ("utf8", "utf8_vietnamese_ci", False), # 215
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ ("utf8", "utf8_general_mysql500_ci", False), # 223
+ ("utf8mb4", "utf8mb4_unicode_ci", False), # 224
+ ("utf8mb4", "utf8mb4_icelandic_ci", False), # 225
+ ("utf8mb4", "utf8mb4_latvian_ci", False), # 226
+ ("utf8mb4", "utf8mb4_romanian_ci", False), # 227
+ ("utf8mb4", "utf8mb4_slovenian_ci", False), # 228
+ ("utf8mb4", "utf8mb4_polish_ci", False), # 229
+ ("utf8mb4", "utf8mb4_estonian_ci", False), # 230
+ ("utf8mb4", "utf8mb4_spanish_ci", False), # 231
+ ("utf8mb4", "utf8mb4_swedish_ci", False), # 232
+ ("utf8mb4", "utf8mb4_turkish_ci", False), # 233
+ ("utf8mb4", "utf8mb4_czech_ci", False), # 234
+ ("utf8mb4", "utf8mb4_danish_ci", False), # 235
+ ("utf8mb4", "utf8mb4_lithuanian_ci", False), # 236
+ ("utf8mb4", "utf8mb4_slovak_ci", False), # 237
+ ("utf8mb4", "utf8mb4_spanish2_ci", False), # 238
+ ("utf8mb4", "utf8mb4_roman_ci", False), # 239
+ ("utf8mb4", "utf8mb4_persian_ci", False), # 240
+ ("utf8mb4", "utf8mb4_esperanto_ci", False), # 241
+ ("utf8mb4", "utf8mb4_hungarian_ci", False), # 242
+ ("utf8mb4", "utf8mb4_sinhala_ci", False), # 243
+ ("utf8mb4", "utf8mb4_german2_ci", False), # 244
+ ("utf8mb4", "utf8mb4_croatian_ci", False), # 245
+ ("utf8mb4", "utf8mb4_unicode_520_ci", False), # 246
+ ("utf8mb4", "utf8mb4_vietnamese_ci", False), # 247
+ ("gb18030", "gb18030_chinese_ci", True), # 248
+ ("gb18030", "gb18030_bin", False), # 249
+ ("gb18030", "gb18030_unicode_520_ci", False), # 250
+ None,
+ None,
+ None,
+ None,
+ ("utf8mb4", "utf8mb4_0900_ai_ci", True), # 255
+ ("utf8mb4", "utf8mb4_de_pb_0900_ai_ci", False), # 256
+ ("utf8mb4", "utf8mb4_is_0900_ai_ci", False), # 257
+ ("utf8mb4", "utf8mb4_lv_0900_ai_ci", False), # 258
+ ("utf8mb4", "utf8mb4_ro_0900_ai_ci", False), # 259
+ ("utf8mb4", "utf8mb4_sl_0900_ai_ci", False), # 260
+ ("utf8mb4", "utf8mb4_pl_0900_ai_ci", False), # 261
+ ("utf8mb4", "utf8mb4_et_0900_ai_ci", False), # 262
+ ("utf8mb4", "utf8mb4_es_0900_ai_ci", False), # 263
+ ("utf8mb4", "utf8mb4_sv_0900_ai_ci", False), # 264
+ ("utf8mb4", "utf8mb4_tr_0900_ai_ci", False), # 265
+ ("utf8mb4", "utf8mb4_cs_0900_ai_ci", False), # 266
+ ("utf8mb4", "utf8mb4_da_0900_ai_ci", False), # 267
+ ("utf8mb4", "utf8mb4_lt_0900_ai_ci", False), # 268
+ ("utf8mb4", "utf8mb4_sk_0900_ai_ci", False), # 269
+ ("utf8mb4", "utf8mb4_es_trad_0900_ai_ci", False), # 270
+ ("utf8mb4", "utf8mb4_la_0900_ai_ci", False), # 271
+ None,
+ ("utf8mb4", "utf8mb4_eo_0900_ai_ci", False), # 273
+ ("utf8mb4", "utf8mb4_hu_0900_ai_ci", False), # 274
+ ("utf8mb4", "utf8mb4_hr_0900_ai_ci", False), # 275
+ None,
+ ("utf8mb4", "utf8mb4_vi_0900_ai_ci", False), # 277
+ ("utf8mb4", "utf8mb4_0900_as_cs", False), # 278
+ ("utf8mb4", "utf8mb4_de_pb_0900_as_cs", False), # 279
+ ("utf8mb4", "utf8mb4_is_0900_as_cs", False), # 280
+ ("utf8mb4", "utf8mb4_lv_0900_as_cs", False), # 281
+ ("utf8mb4", "utf8mb4_ro_0900_as_cs", False), # 282
+ ("utf8mb4", "utf8mb4_sl_0900_as_cs", False), # 283
+ ("utf8mb4", "utf8mb4_pl_0900_as_cs", False), # 284
+ ("utf8mb4", "utf8mb4_et_0900_as_cs", False), # 285
+ ("utf8mb4", "utf8mb4_es_0900_as_cs", False), # 286
+ ("utf8mb4", "utf8mb4_sv_0900_as_cs", False), # 287
+ ("utf8mb4", "utf8mb4_tr_0900_as_cs", False), # 288
+ ("utf8mb4", "utf8mb4_cs_0900_as_cs", False), # 289
+ ("utf8mb4", "utf8mb4_da_0900_as_cs", False), # 290
+ ("utf8mb4", "utf8mb4_lt_0900_as_cs", False), # 291
+ ("utf8mb4", "utf8mb4_sk_0900_as_cs", False), # 292
+ ("utf8mb4", "utf8mb4_es_trad_0900_as_cs", False), # 293
+ ("utf8mb4", "utf8mb4_la_0900_as_cs", False), # 294
+ None,
+ ("utf8mb4", "utf8mb4_eo_0900_as_cs", False), # 296
+ ("utf8mb4", "utf8mb4_hu_0900_as_cs", False), # 297
+ ("utf8mb4", "utf8mb4_hr_0900_as_cs", False), # 298
+ None,
+ ("utf8mb4", "utf8mb4_vi_0900_as_cs", False), # 300
+ None,
+ None,
+ ("utf8mb4", "utf8mb4_ja_0900_as_cs", False), # 303
+ ("utf8mb4", "utf8mb4_ja_0900_as_cs_ks", False), # 304
+ ("utf8mb4", "utf8mb4_0900_as_ci", False), # 305
+ ("utf8mb4", "utf8mb4_ru_0900_ai_ci", False), # 306
+ ("utf8mb4", "utf8mb4_ru_0900_as_cs", False), # 307
+ ("utf8mb4", "utf8mb4_zh_0900_as_cs", False), # 308
+ ("utf8mb4", "utf8mb4_0900_bin", False), # 309
+]
+
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/connection.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/connection.py
new file mode 100644
index 0000000000000000000000000000000000000000..7516fcfd5119b6bbac121bd1d2f0d2527ff677d1
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/connection.py
@@ -0,0 +1,1548 @@
+# Copyright (c) 2009, 2021, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Implementing communication with MySQL servers.
+"""
+
+from decimal import Decimal
+from io import IOBase
+import datetime
+import getpass
+import logging
+import os
+import socket
+import struct
+import time
+import warnings
+
+from .authentication import get_auth_plugin
+from .constants import (
+ ClientFlag, ServerCmd, ServerFlag, FieldType,
+ flag_is_set, ShutdownType, NET_BUFFER_LENGTH
+)
+
+from . import errors, version
+from .conversion import MySQLConverter
+from .cursor import (
+ CursorBase, MySQLCursor, MySQLCursorRaw,
+ MySQLCursorBuffered, MySQLCursorBufferedRaw, MySQLCursorPrepared,
+ MySQLCursorDict, MySQLCursorBufferedDict, MySQLCursorNamedTuple,
+ MySQLCursorBufferedNamedTuple)
+from .network import MySQLUnixSocket, MySQLTCPSocket
+from .protocol import MySQLProtocol
+from .utils import int1store, int4store, lc_int, get_platform
+from .abstracts import MySQLConnectionAbstract
+
+logging.getLogger(__name__).addHandler(logging.NullHandler())
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class MySQLConnection(MySQLConnectionAbstract):
+ """Connection to a MySQL Server"""
+ def __init__(self, *args, **kwargs):
+ self._protocol = None
+ self._socket = None
+ self._handshake = None
+ super(MySQLConnection, self).__init__(*args, **kwargs)
+
+ self._converter_class = MySQLConverter
+
+ self._client_flags = ClientFlag.get_default()
+ self._charset_id = 45
+ self._sql_mode = None
+ self._time_zone = None
+ self._autocommit = False
+
+ self._user = ''
+ self._password = ''
+ self._database = ''
+ self._host = '127.0.0.1'
+ self._port = 3306
+ self._unix_socket = None
+ self._client_host = ''
+ self._client_port = 0
+ self._ssl = {}
+ self._force_ipv6 = False
+
+ self._use_unicode = True
+ self._get_warnings = False
+ self._raise_on_warnings = False
+ self._buffered = False
+ self._unread_result = False
+ self._have_next_result = False
+ self._raw = False
+ self._in_transaction = False
+
+ self._prepared_statements = None
+
+ self._ssl_active = False
+ self._auth_plugin = None
+ self._krb_service_principal = None
+ self._pool_config_version = None
+ self._query_attrs_supported = False
+
+ self._columns_desc = []
+ self._mfa_nfactor = 1
+
+ if kwargs:
+ try:
+ self.connect(**kwargs)
+ except:
+ # Tidy-up underlying socket on failure
+ self.close()
+ self._socket = None
+ raise
+
+ def _add_default_conn_attrs(self):
+ """Add the default connection attributes."""
+ platform = get_platform()
+ license_chunks = version.LICENSE.split(" ")
+ if license_chunks[0] == "GPLv2":
+ client_license = "GPL-2.0"
+ else:
+ client_license = "Commercial"
+ default_conn_attrs = {
+ "_pid": str(os.getpid()),
+ "_platform": platform["arch"],
+ "_source_host": socket.gethostname(),
+ "_client_name": "mysql-connector-python",
+ "_client_license": client_license,
+ "_client_version": ".".join(
+ [str(x) for x in version.VERSION[0:3]]
+ ),
+ "_os": platform["version"],
+ }
+
+ self._conn_attrs.update((default_conn_attrs))
+
+ def _do_handshake(self):
+ """Get the handshake from the MySQL server"""
+ packet = self._socket.recv()
+ if packet[4] == 255:
+ raise errors.get_exception(packet)
+
+ self._handshake = None
+ try:
+ handshake = self._protocol.parse_handshake(packet)
+ except Exception as err:
+ # pylint: disable=E1101
+ raise errors.get_mysql_exception(msg=err.msg, errno=err.errno,
+ sqlstate=err.sqlstate)
+
+ self._server_version = self._check_server_version(
+ handshake['server_version_original'])
+
+ if not handshake['capabilities'] & ClientFlag.SSL:
+ if self._auth_plugin == "mysql_clear_password":
+ err_msg = ("Clear password authentication is not supported "
+ "over insecure channels")
+ raise errors.InterfaceError(err_msg)
+ if self._ssl.get('verify_cert'):
+ raise errors.InterfaceError("SSL is required but the server "
+ "doesn't support it", errno=2026)
+ self._client_flags &= ~ClientFlag.SSL
+ elif not self._ssl_disabled:
+ self._client_flags |= ClientFlag.SSL
+
+ if handshake['capabilities'] & ClientFlag.PLUGIN_AUTH:
+ self.set_client_flags([ClientFlag.PLUGIN_AUTH])
+
+ if handshake['capabilities'] & ClientFlag.CLIENT_QUERY_ATTRIBUTES:
+ self._query_attrs_supported = True
+ self.set_client_flags([ClientFlag.CLIENT_QUERY_ATTRIBUTES])
+
+ if handshake['capabilities'] & ClientFlag.MULTI_FACTOR_AUTHENTICATION:
+ self.set_client_flags([ClientFlag.MULTI_FACTOR_AUTHENTICATION])
+
+ self._handshake = handshake
+
+ def _do_auth(self, username=None, password=None, database=None,
+ client_flags=0, charset=45, ssl_options=None, conn_attrs=None):
+ """Authenticate with the MySQL server
+
+ Authentication happens in two parts. We first send a response to the
+ handshake. The MySQL server will then send either an AuthSwitchRequest
+ or an error packet.
+
+ Raises NotSupportedError when we get the old, insecure password
+ reply back. Raises any error coming from MySQL.
+ """
+ self._ssl_active = False
+ if client_flags & ClientFlag.SSL:
+ packet = self._protocol.make_auth_ssl(charset=charset,
+ client_flags=client_flags)
+ self._socket.send(packet)
+ if ssl_options.get('tls_ciphersuites') is not None:
+ tls_ciphersuites = ":".join(ssl_options.get('tls_ciphersuites'))
+ else:
+ tls_ciphersuites = ""
+ self._socket.switch_to_ssl(ssl_options.get('ca'),
+ ssl_options.get('cert'),
+ ssl_options.get('key'),
+ ssl_options.get('verify_cert') or False,
+ ssl_options.get('verify_identity') or
+ False,
+ tls_ciphersuites,
+ ssl_options.get('tls_versions'))
+ self._ssl_active = True
+
+ if self._password1 and password != self._password1:
+ password = self._password1
+
+ _LOGGER.debug(
+ "# _do_auth(): user: %s", username)
+ _LOGGER.debug(
+ "# _do_auth(): self._auth_plugin: %s", self._auth_plugin)
+ if self._auth_plugin.startswith("authentication_oci") and not username:
+ username = getpass.getuser()
+ _LOGGER.debug(
+ "MySQL user is empty, OS user: %s will be used for "
+ "authentication_oci_client", username)
+
+ packet = self._protocol.make_auth(
+ handshake=self._handshake,
+ username=username, password=password, database=database,
+ charset=charset, client_flags=client_flags,
+ ssl_enabled=self._ssl_active,
+ auth_plugin=self._auth_plugin,
+ conn_attrs=conn_attrs)
+ self._socket.send(packet)
+ self._auth_switch_request(username, password)
+
+ if not (client_flags & ClientFlag.CONNECT_WITH_DB) and database:
+ self.cmd_init_db(database)
+
+ return True
+
+ def _auth_switch_request(self, username=None, password=None):
+ """Handle second part of authentication
+
+ Raises NotSupportedError when we get the old, insecure password
+ reply back. Raises any error coming from MySQL.
+ """
+ auth = None
+ new_auth_plugin = self._auth_plugin or self._handshake["auth_plugin"]
+ _LOGGER.debug("new_auth_plugin: %s", new_auth_plugin)
+ packet = self._socket.recv()
+ if packet[4] == 254 and len(packet) == 5:
+ raise errors.NotSupportedError(
+ "Authentication with old (insecure) passwords "
+ "is not supported. For more information, lookup "
+ "Password Hashing in the latest MySQL manual")
+ elif packet[4] == 254:
+ # AuthSwitchRequest
+ (new_auth_plugin,
+ auth_data) = self._protocol.parse_auth_switch_request(packet)
+ auth = get_auth_plugin(new_auth_plugin)(auth_data,
+ username=self._user, password=password,
+ ssl_enabled=self._ssl_active)
+ packet = self._auth_continue(auth, new_auth_plugin, auth_data)
+
+ if packet[4] == 1:
+ auth_data = self._protocol.parse_auth_more_data(packet)
+ auth = get_auth_plugin(new_auth_plugin)(
+ auth_data, password=password, ssl_enabled=self._ssl_active)
+ if new_auth_plugin == "caching_sha2_password":
+ response = auth.auth_response()
+ if response:
+ self._socket.send(response)
+ packet = self._socket.recv()
+
+ if packet[4] == 0:
+ return self._handle_ok(packet)
+ elif packet[4] == 2:
+ return self._handle_mfa(packet)
+ elif packet[4] == 255:
+ raise errors.get_exception(packet)
+ return None
+
+ def _handle_mfa(self, packet):
+ """Handle Multi Factor Authentication."""
+ self._mfa_nfactor += 1
+ if self._mfa_nfactor == 2:
+ password = self._password2
+ elif self._mfa_nfactor == 3:
+ password = self._password3
+ else:
+ raise errors.InterfaceError(
+ "Failed Multi Factor Authentication (invalid N factor)"
+ )
+
+ _LOGGER.debug("# MFA N Factor #%d", self._mfa_nfactor)
+
+ packet, auth_plugin = self._protocol.parse_auth_next_factor(
+ packet[4:]
+ )
+ auth = get_auth_plugin(auth_plugin)(
+ None,
+ username=self._user,
+ password=password,
+ ssl_enabled=self._ssl_active,
+ )
+ packet = self._auth_continue(auth, auth_plugin, packet)
+
+ if packet[4] == 1:
+ auth_data = self._protocol.parse_auth_more_data(packet)
+ auth = get_auth_plugin(auth_plugin)(
+ auth_data, password=password, ssl_enabled=self._ssl_active)
+ if auth_plugin == "caching_sha2_password":
+ response = auth.auth_response()
+ if response:
+ self._socket.send(response)
+ packet = self._socket.recv()
+
+ if packet[4] == 0:
+ return self._handle_ok(packet)
+ elif packet[4] == 2:
+ return self._handle_mfa(packet)
+ elif packet[4] == 255:
+ raise errors.get_exception(packet)
+ return None
+
+ def _auth_continue(self, auth, auth_plugin, auth_data):
+ """Continue with the authentication."""
+ if auth_plugin == "authentication_ldap_sasl_client":
+ _LOGGER.debug("# auth_data: %s", auth_data)
+ response = auth.auth_response(self._krb_service_principal)
+ elif auth_plugin == "authentication_kerberos_client":
+ _LOGGER.debug("# auth_data: %s", auth_data)
+ response = auth.auth_response(auth_data)
+ elif auth_plugin == "authentication_oci_client":
+ _LOGGER.debug(
+ "# oci configuration file path: %s", self._oci_config_file
+ )
+ response = auth.auth_response(self._oci_config_file)
+ else:
+ response = auth.auth_response()
+
+ _LOGGER.debug("# request: %s size: %s", response, len(response))
+ self._socket.send(response)
+ packet = self._socket.recv()
+ _LOGGER.debug("# server response packet: %s", packet)
+ if (
+ auth_plugin == "authentication_ldap_sasl_client"
+ and len(packet) >= 6 and packet[5] == 114 and packet[6] == 61
+ ): # 'r' and '='
+ # Continue with sasl authentication
+ dec_response = packet[5:]
+ cresponse = auth.auth_continue(dec_response)
+ self._socket.send(cresponse)
+ packet = self._socket.recv()
+ if packet[5] == 118 and packet[6] == 61: # 'v' and '='
+ if auth.auth_finalize(packet[5:]):
+ # receive packed OK
+ packet = self._socket.recv()
+ elif (
+ auth_plugin == "authentication_ldap_sasl_client"
+ and auth_data == b'GSSAPI' and packet[4] != 255
+ ):
+ rcode_size = 5 # header size for the response status code.
+ _LOGGER.debug("# Continue with sasl GSSAPI authentication")
+ _LOGGER.debug("# response header: %s", packet[:rcode_size+1])
+ _LOGGER.debug("# response size: %s", len(packet))
+
+ _LOGGER.debug("# Negotiate a service request")
+ complete = False
+ tries = 0 # To avoid a infinite loop attempt no more than feedback messages
+ while not complete and tries < 5:
+ _LOGGER.debug("%s Attempt %s %s", "-" * 20, tries + 1, "-" * 20)
+ _LOGGER.debug("<< server response: %s", packet)
+ _LOGGER.debug("# response code: %s", packet[:rcode_size + 1])
+ step, complete = auth.auth_continue_krb(packet[rcode_size:])
+ _LOGGER.debug(" >> response to server: %s", step)
+ self._socket.send(step or b'')
+ packet = self._socket.recv()
+ tries += 1
+ if not complete:
+ raise errors.InterfaceError(
+ "Unable to fulfill server request after %s attempts. "
+ "Last server response: %s", tries, packet,
+ )
+ _LOGGER.debug(
+ " last GSSAPI response from server: %s length: %d",
+ packet,
+ len(packet),
+ )
+ last_step = auth.auth_accept_close_handshake(packet[rcode_size:])
+ _LOGGER.debug(
+ " >> last response to server: %s length: %d",
+ last_step,
+ len(last_step),
+ )
+ self._socket.send(last_step)
+ # Receive final handshake from server
+ packet = self._socket.recv()
+ _LOGGER.debug("<< final handshake from server: %s", packet)
+
+ # receive OK packet from server.
+ packet = self._socket.recv()
+ _LOGGER.debug("<< ok packet from server: %s", packet)
+ elif (
+ auth_plugin == "authentication_kerberos_client"
+ and packet[4] != 255
+ ):
+ rcode_size = 5 # Reader size for the response status code
+ _LOGGER.debug("# Continue with GSSAPI authentication")
+ _LOGGER.debug("# Response header: %s", packet[:rcode_size + 1])
+ _LOGGER.debug("# Response size: %s", len(packet))
+ _LOGGER.debug("# Negotiate a service request")
+ complete = False
+ tries = 0
+
+ while not complete and tries < 5:
+ _LOGGER.debug(
+ "%s Attempt %s %s", "-" * 20, tries + 1, "-" * 20
+ )
+ _LOGGER.debug("<< Server response: %s", packet)
+ _LOGGER.debug(
+ "# Response code: %s", packet[:rcode_size + 1]
+ )
+ token, complete = auth.auth_continue(packet[rcode_size:])
+ if token:
+ self._socket.send(token)
+ if complete:
+ break
+ packet = self._socket.recv()
+
+ _LOGGER.debug(">> Response to server: %s", token)
+ tries += 1
+
+ if not complete:
+ raise errors.InterfaceError(
+ "Unable to fulfill server request after {} attempts. "
+ "Last server response: {}".format(tries, packet)
+ )
+
+ _LOGGER.debug(
+ "Last response from server: %s length: %d",
+ packet,
+ len(packet),
+ )
+
+ # Receive OK packet from server.
+ packet = self._socket.recv()
+ _LOGGER.debug("<< Ok packet from server: %s", packet)
+
+ return packet
+
+ def _get_connection(self, prtcls=None):
+ """Get connection based on configuration
+
+ This method will return the appropriated connection object using
+ the connection parameters.
+
+ Returns subclass of MySQLBaseSocket.
+ """
+ conn = None
+ if self.unix_socket and os.name != 'nt':
+ conn = MySQLUnixSocket(unix_socket=self.unix_socket)
+ else:
+ conn = MySQLTCPSocket(host=self.server_host,
+ port=self.server_port,
+ force_ipv6=self._force_ipv6)
+
+ conn.set_connection_timeout(self._connection_timeout)
+ return conn
+
+ def _open_connection(self):
+ """Open the connection to the MySQL server
+
+ This method sets up and opens the connection to the MySQL server.
+
+ Raises on errors.
+ """
+ if self._auth_plugin == "authentication_kerberos_client":
+ if os.name == "nt":
+ raise errors.ProgrammingError(
+ "The Kerberos authentication is not available on Windows"
+ )
+ if not self._user:
+ cls = get_auth_plugin(self._auth_plugin)
+ self._user = cls.get_user_from_credentials()
+
+ self._protocol = MySQLProtocol()
+ self._socket = self._get_connection()
+ try:
+ self._socket.open_connection()
+ self._do_handshake()
+ self._do_auth(self._user, self._password,
+ self._database, self._client_flags, self._charset_id,
+ self._ssl, self._conn_attrs)
+ self.set_converter_class(self._converter_class)
+ if self._client_flags & ClientFlag.COMPRESS:
+ self._socket.recv = self._socket.recv_compressed
+ self._socket.send = self._socket.send_compressed
+ self._socket.set_connection_timeout(None)
+ except:
+ # close socket
+ self.close()
+ raise
+
+ if (
+ not self._ssl_disabled
+ and hasattr(self._socket.sock, "version")
+ and callable(self._socket.sock.version)
+ ):
+ # Raise a deprecation warning if TLSv1 or TLSv1.1 is being used
+ tls_version = self._socket.sock.version()
+ if tls_version in ("TLSv1", "TLSv1.1"):
+ warn_msg = (
+ f"This connection is using {tls_version} which is now "
+ "deprecated and will be removed in a future release of "
+ "MySQL Connector/Python"
+ )
+ warnings.warn(warn_msg, DeprecationWarning)
+
+ def shutdown(self):
+ """Shut down connection to MySQL Server.
+ """
+ if not self._socket:
+ return
+
+ try:
+ self._socket.shutdown()
+ except (AttributeError, errors.Error):
+ pass # Getting an exception would mean we are disconnected.
+
+ def close(self):
+ """Disconnect from the MySQL server"""
+ if not self._socket:
+ return
+
+ try:
+ self.cmd_quit()
+ except (AttributeError, errors.Error):
+ pass # Getting an exception would mean we are disconnected.
+ self._socket.close_connection()
+ self._handshake = None
+
+ disconnect = close
+
+ def _send_cmd(self, command, argument=None, packet_number=0, packet=None,
+ expect_response=True, compressed_packet_number=0):
+ """Send a command to the MySQL server
+
+ This method sends a command with an optional argument.
+ If packet is not None, it will be sent and the argument will be
+ ignored.
+
+ The packet_number is optional and should usually not be used.
+
+ Some commands might not result in the MySQL server returning
+ a response. If a command does not return anything, you should
+ set expect_response to False. The _send_cmd method will then
+ return None instead of a MySQL packet.
+
+ Returns a MySQL packet or None.
+ """
+ self.handle_unread_result()
+
+ try:
+ self._socket.send(
+ self._protocol.make_command(command, packet or argument),
+ packet_number, compressed_packet_number)
+ except AttributeError:
+ raise errors.OperationalError("MySQL Connection not available.")
+
+ if not expect_response:
+ return None
+ return self._socket.recv()
+
+ def _send_data(self, data_file, send_empty_packet=False):
+ """Send data to the MySQL server
+
+ This method accepts a file-like object and sends its data
+ as is to the MySQL server. If the send_empty_packet is
+ True, it will send an extra empty package (for example
+ when using LOAD LOCAL DATA INFILE).
+
+ Returns a MySQL packet.
+ """
+ self.handle_unread_result()
+
+ if not hasattr(data_file, 'read'):
+ raise ValueError("expecting a file-like object")
+
+ try:
+ buf = data_file.read(NET_BUFFER_LENGTH - 16)
+ while buf:
+ self._socket.send(buf)
+ buf = data_file.read(NET_BUFFER_LENGTH - 16)
+ except AttributeError:
+ raise errors.OperationalError("MySQL Connection not available.")
+
+ if send_empty_packet:
+ try:
+ self._socket.send(b'')
+ except AttributeError:
+ raise errors.OperationalError(
+ "MySQL Connection not available.")
+
+ return self._socket.recv()
+
+ def _handle_server_status(self, flags):
+ """Handle the server flags found in MySQL packets
+
+ This method handles the server flags send by MySQL OK and EOF
+ packets. It, for example, checks whether there exists more result
+ sets or whether there is an ongoing transaction.
+ """
+ self._have_next_result = flag_is_set(ServerFlag.MORE_RESULTS_EXISTS,
+ flags)
+ self._in_transaction = flag_is_set(ServerFlag.STATUS_IN_TRANS, flags)
+
+ @property
+ def in_transaction(self):
+ """MySQL session has started a transaction"""
+ return self._in_transaction
+
+ def _handle_ok(self, packet):
+ """Handle a MySQL OK packet
+
+ This method handles a MySQL OK packet. When the packet is found to
+ be an Error packet, an error will be raised. If the packet is neither
+ an OK or an Error packet, errors.InterfaceError will be raised.
+
+ Returns a dict()
+ """
+ if packet[4] == 0:
+ ok_pkt = self._protocol.parse_ok(packet)
+ self._handle_server_status(ok_pkt['status_flag'])
+ return ok_pkt
+ elif packet[4] == 255:
+ raise errors.get_exception(packet)
+ raise errors.InterfaceError('Expected OK packet')
+
+ def _handle_eof(self, packet):
+ """Handle a MySQL EOF packet
+
+ This method handles a MySQL EOF packet. When the packet is found to
+ be an Error packet, an error will be raised. If the packet is neither
+ and OK or an Error packet, errors.InterfaceError will be raised.
+
+ Returns a dict()
+ """
+ if packet[4] == 254:
+ eof = self._protocol.parse_eof(packet)
+ self._handle_server_status(eof['status_flag'])
+ return eof
+ elif packet[4] == 255:
+ raise errors.get_exception(packet)
+ raise errors.InterfaceError('Expected EOF packet')
+
+ def _handle_load_data_infile(self, filename):
+ """Handle a LOAD DATA INFILE LOCAL request"""
+ file_name = os.path.abspath(filename)
+ if os.path.islink(file_name):
+ raise errors.OperationalError("Use of symbolic link is not allowed")
+ if not self._allow_local_infile and \
+ not self._allow_local_infile_in_path:
+ raise errors.DatabaseError(
+ "LOAD DATA LOCAL INFILE file request rejected due to "
+ "restrictions on access.")
+ if not self._allow_local_infile and self._allow_local_infile_in_path:
+ # validate filename is inside of allow_local_infile_in_path path.
+ infile_path = os.path.abspath(self._allow_local_infile_in_path)
+ c_path = None
+ try:
+ c_path = os.path.commonpath([infile_path, file_name])
+ except ValueError as err:
+ err_msg = ("{} while loading file `{}` and path `{}` given"
+ " in allow_local_infile_in_path")
+ raise errors.InterfaceError(
+ err_msg.format(str(err), file_name, infile_path))
+
+ if c_path != infile_path:
+ err_msg = ("The file `{}` is not found in the given "
+ "allow_local_infile_in_path {}")
+ raise errors.DatabaseError(
+ err_msg.format(file_name,infile_path))
+
+ try:
+ data_file = open(file_name, 'rb')
+ return self._handle_ok(self._send_data(data_file,
+ send_empty_packet=True))
+ except IOError:
+ # Send a empty packet to cancel the operation
+ try:
+ self._socket.send(b'')
+ except AttributeError:
+ raise errors.OperationalError(
+ "MySQL Connection not available.")
+ raise errors.InterfaceError(
+ "File '{0}' could not be read".format(file_name))
+ finally:
+ try:
+ data_file.close()
+ except (IOError, NameError):
+ pass
+
+ def _handle_result(self, packet):
+ """Handle a MySQL Result
+
+ This method handles a MySQL result, for example, after sending the
+ query command. OK and EOF packets will be handled and returned. If
+ the packet is an Error packet, an errors.Error-exception will be
+ raised.
+
+ The dictionary returned of:
+ - columns: column information
+ - eof: the EOF-packet information
+
+ Returns a dict()
+ """
+ if not packet or len(packet) < 4:
+ raise errors.InterfaceError('Empty response')
+ elif packet[4] == 0:
+ return self._handle_ok(packet)
+ elif packet[4] == 251:
+ filename = packet[5:].decode()
+ return self._handle_load_data_infile(filename)
+ elif packet[4] == 254:
+ return self._handle_eof(packet)
+ elif packet[4] == 255:
+ raise errors.get_exception(packet)
+
+ # We have a text result set
+ column_count = self._protocol.parse_column_count(packet)
+ if not column_count or not isinstance(column_count, int):
+ raise errors.InterfaceError('Illegal result set.')
+
+ self._columns_desc = [None,] * column_count
+ for i in range(0, column_count):
+ self._columns_desc[i] = self._protocol.parse_column(
+ self._socket.recv(), self.python_charset)
+
+ eof = self._handle_eof(self._socket.recv())
+ self.unread_result = True
+ return {'columns': self._columns_desc, 'eof': eof}
+
+ def get_row(self, binary=False, columns=None, raw=None):
+ """Get the next rows returned by the MySQL server
+
+ This method gets one row from the result set after sending, for
+ example, the query command. The result is a tuple consisting of the
+ row and the EOF packet.
+ If no row was available in the result set, the row data will be None.
+
+ Returns a tuple.
+ """
+ (rows, eof) = self.get_rows(count=1, binary=binary, columns=columns,
+ raw=raw)
+ if rows:
+ return (rows[0], eof)
+ return (None, eof)
+
+ def get_rows(self, count=None, binary=False, columns=None, raw=None,
+ prep_stmt=None):
+ """Get all rows returned by the MySQL server
+
+ This method gets all rows returned by the MySQL server after sending,
+ for example, the query command. The result is a tuple consisting of
+ a list of rows and the EOF packet.
+
+ Returns a tuple()
+ """
+ if raw is None:
+ raw = self._raw
+
+ if not self.unread_result:
+ raise errors.InternalError("No result set available.")
+
+ try:
+ if binary:
+ charset = self.charset
+ if charset == 'utf8mb4':
+ charset = 'utf8'
+ rows = self._protocol.read_binary_result(
+ self._socket, columns, count, charset)
+ else:
+ rows = self._protocol.read_text_result(self._socket,
+ self._server_version,
+ count=count)
+ except errors.Error as err:
+ self.unread_result = False
+ raise err
+
+ rows, eof_p = rows
+
+ if not (binary or raw) and self._columns_desc is not None and rows \
+ and hasattr(self, 'converter'):
+ row_to_python = self.converter.row_to_python
+ rows = [row_to_python(row, self._columns_desc) for row in rows]
+
+ if eof_p is not None:
+ self._handle_server_status(eof_p['status_flag'] if 'status_flag' in
+ eof_p else eof_p['server_status'])
+ self.unread_result = False
+
+ return rows, eof_p
+
+ def consume_results(self):
+ """Consume results
+ """
+ if self.unread_result:
+ self.get_rows()
+
+ def cmd_init_db(self, database):
+ """Change the current database
+
+ This method changes the current (default) database by sending the
+ INIT_DB command. The result is a dictionary containing the OK packet
+ information.
+
+ Returns a dict()
+ """
+ return self._handle_ok(
+ self._send_cmd(ServerCmd.INIT_DB, database.encode('utf-8')))
+
+ def cmd_query(self, query, raw=False, buffered=False, raw_as_string=False):
+ """Send a query to the MySQL server
+
+ This method send the query to the MySQL server and returns the result.
+
+ If there was a text result, a tuple will be returned consisting of
+ the number of columns and a list containing information about these
+ columns.
+
+ When the query doesn't return a text result, the OK or EOF packet
+ information as dictionary will be returned. In case the result was
+ an error, exception errors.Error will be raised.
+
+ Returns a tuple()
+ """
+ if not isinstance(query, bytearray):
+ if isinstance(query, str):
+ query = query.encode('utf-8')
+ query = bytearray(query)
+ # Prepare query attrs
+ charset = self.charset if self.charset != "utf8mb4" else "utf8"
+ packet = bytearray()
+ if not self._query_attrs_supported and self._query_attrs:
+ warnings.warn(
+ "This version of the server does not support Query Attributes",
+ category=Warning)
+ if self._client_flags & ClientFlag.CLIENT_QUERY_ATTRIBUTES:
+ names = []
+ types = []
+ values = []
+ null_bitmap = [0] * ((len(self._query_attrs) + 7) // 8)
+ for pos, attr_tuple in enumerate(self._query_attrs):
+ value = attr_tuple[1]
+ flags = 0
+ if value is None:
+ null_bitmap[(pos // 8)] |= 1 << (pos % 8)
+ types.append(int1store(FieldType.NULL) +
+ int1store(flags))
+ continue
+ elif isinstance(value, int):
+ (packed, field_type,
+ flags) = self._protocol._prepare_binary_integer(value)
+ values.append(packed)
+ elif isinstance(value, str):
+ value = value.encode(charset)
+ values.append(lc_int(len(value)) + value)
+ field_type = FieldType.VARCHAR
+ elif isinstance(value, bytes):
+ values.append(lc_int(len(value)) + value)
+ field_type = FieldType.BLOB
+ elif isinstance(value, Decimal):
+ values.append(
+ lc_int(len(str(value).encode(
+ charset))) + str(value).encode(charset))
+ field_type = FieldType.DECIMAL
+ elif isinstance(value, float):
+ values.append(struct.pack('<d', value))
+ field_type = FieldType.DOUBLE
+ elif isinstance(value, (datetime.datetime, datetime.date)):
+ (packed, field_type) = \
+ self._protocol._prepare_binary_timestamp(value)
+ values.append(packed)
+ elif isinstance(value, (datetime.timedelta, datetime.time)):
+ (packed, field_type) = \
+ self._protocol._prepare_binary_time(value)
+ values.append(packed)
+ else:
+ raise errors.ProgrammingError(
+ "MySQL binary protocol can not handle "
+ "'{classname}' objects".format(
+ classname=value.__class__.__name__))
+ types.append(int1store(field_type) +
+ int1store(flags))
+ name = attr_tuple[0].encode(charset)
+ names.append(lc_int(len(name)) + name)
+
+ # int<lenenc> parameter_count Number of parameters
+ packet.extend(lc_int(len(self._query_attrs)))
+ # int<lenenc> parameter_set_count Number of parameter sets.
+ # Currently always 1
+ packet.extend(lc_int(1))
+ if values:
+ packet.extend(
+ b''.join([struct.pack('B', bit) for bit in null_bitmap]) +
+ int1store(1))
+ for _type, name in zip(types, names):
+ packet.extend(_type)
+ packet.extend(name)
+
+ for value in values:
+ packet.extend(value)
+
+ packet.extend(query)
+ query = bytes(packet)
+ try:
+ result = self._handle_result(self._send_cmd(ServerCmd.QUERY, query))
+ except errors.ProgrammingError as err:
+ if err.errno == 3948 and \
+ "Loading local data is disabled" in err.msg:
+ err_msg = ("LOAD DATA LOCAL INFILE file request rejected due "
+ "to restrictions on access.")
+ raise errors.DatabaseError(err_msg)
+ raise
+ if self._have_next_result:
+ raise errors.InterfaceError(
+ 'Use cmd_query_iter for statements with multiple queries.')
+
+ return result
+
+ def cmd_query_iter(self, statements):
+ """Send one or more statements to the MySQL server
+
+ Similar to the cmd_query method, but instead returns a generator
+ object to iterate through results. It sends the statements to the
+ MySQL server and through the iterator you can get the results.
+
+ statement = 'SELECT 1; INSERT INTO t1 VALUES (); SELECT 2'
+ for result in cnx.cmd_query(statement, iterate=True):
+ if 'columns' in result:
+ columns = result['columns']
+ rows = cnx.get_rows()
+ else:
+ # do something useful with INSERT result
+
+ Returns a generator.
+ """
+ packet = bytearray()
+ if not isinstance(statements, bytearray):
+ if isinstance(statements, str):
+ statements = statements.encode('utf8')
+ statements = bytearray(statements)
+
+ if self._client_flags & ClientFlag.CLIENT_QUERY_ATTRIBUTES:
+ # int<lenenc> parameter_count Number of parameters
+ packet.extend(lc_int(0))
+ # int<lenenc> parameter_set_count Number of parameter sets.
+ # Currently always 1
+ packet.extend(lc_int(1))
+
+ packet.extend(statements)
+ query = bytes(packet)
+ # Handle the first query result
+ yield self._handle_result(self._send_cmd(ServerCmd.QUERY, query))
+
+ # Handle next results, if any
+ while self._have_next_result:
+ self.handle_unread_result()
+ yield self._handle_result(self._socket.recv())
+
+ def cmd_refresh(self, options):
+ """Send the Refresh command to the MySQL server
+
+ This method sends the Refresh command to the MySQL server. The options
+ argument should be a bitwise value using constants.RefreshOption.
+ Usage example:
+ RefreshOption = mysql.connector.RefreshOption
+ refresh = RefreshOption.LOG | RefreshOption.THREADS
+ cnx.cmd_refresh(refresh)
+
+ The result is a dictionary with the OK packet information.
+
+ Returns a dict()
+ """
+ return self._handle_ok(
+ self._send_cmd(ServerCmd.REFRESH, int4store(options)))
+
+ def cmd_quit(self):
+ """Close the current connection with the server
+
+ This method sends the QUIT command to the MySQL server, closing the
+ current connection. Since the no response can be returned to the
+ client, cmd_quit() will return the packet it send.
+
+ Returns a str()
+ """
+ self.handle_unread_result()
+
+ packet = self._protocol.make_command(ServerCmd.QUIT)
+ self._socket.send(packet, 0, 0)
+ return packet
+
+ def cmd_shutdown(self, shutdown_type=None):
+ """Shut down the MySQL Server
+
+ This method sends the SHUTDOWN command to the MySQL server and is only
+ possible if the current user has SUPER privileges. The result is a
+ dictionary containing the OK packet information.
+
+ Note: Most applications and scripts do not the SUPER privilege.
+
+ Returns a dict()
+ """
+ if shutdown_type:
+ if not ShutdownType.get_info(shutdown_type):
+ raise errors.InterfaceError("Invalid shutdown type")
+ atype = shutdown_type
+ else:
+ atype = ShutdownType.SHUTDOWN_DEFAULT
+ return self._handle_eof(self._send_cmd(ServerCmd.SHUTDOWN,
+ int4store(atype)))
+
+ def cmd_statistics(self):
+ """Send the statistics command to the MySQL Server
+
+ This method sends the STATISTICS command to the MySQL server. The
+ result is a dictionary with various statistical information.
+
+ Returns a dict()
+ """
+ self.handle_unread_result()
+
+ packet = self._protocol.make_command(ServerCmd.STATISTICS)
+ self._socket.send(packet, 0, 0)
+ return self._protocol.parse_statistics(self._socket.recv())
+
+ def cmd_process_kill(self, mysql_pid):
+ """Kill a MySQL process
+
+ This method send the PROCESS_KILL command to the server along with
+ the process ID. The result is a dictionary with the OK packet
+ information.
+
+ Returns a dict()
+ """
+ return self._handle_ok(
+ self._send_cmd(ServerCmd.PROCESS_KILL, int4store(mysql_pid)))
+
+ def cmd_debug(self):
+ """Send the DEBUG command
+
+ This method sends the DEBUG command to the MySQL server, which
+ requires the MySQL user to have SUPER privilege. The output will go
+ to the MySQL server error log and the result of this method is a
+ dictionary with EOF packet information.
+
+ Returns a dict()
+ """
+ return self._handle_eof(self._send_cmd(ServerCmd.DEBUG))
+
+ def cmd_ping(self):
+ """Send the PING command
+
+ This method sends the PING command to the MySQL server. It is used to
+ check if the the connection is still valid. The result of this
+ method is dictionary with OK packet information.
+
+ Returns a dict()
+ """
+ return self._handle_ok(self._send_cmd(ServerCmd.PING))
+
+ def cmd_change_user(self, username='', password='', database='',
+ charset=45, password1='', password2='', password3='',
+ oci_config_file=''):
+ """Change the current logged in user
+
+ This method allows to change the current logged in user information.
+ The result is a dictionary with OK packet information.
+
+ Returns a dict()
+ """
+ self._mfa_nfactor = 1
+ self._user = username
+ self._password = password
+ self._password1 = password1
+ self._password2 = password2
+ self._password3 = password3
+
+ if self._password1 and password != self._password1:
+ password = self._password1
+
+ self.handle_unread_result()
+
+ if self._compress:
+ raise errors.NotSupportedError("Change user is not supported with "
+ "compression.")
+ packet = self._protocol.make_change_user(
+ handshake=self._handshake,
+ username=username, password=password, database=database,
+ charset=charset, client_flags=self._client_flags,
+ ssl_enabled=self._ssl_active,
+ auth_plugin=self._auth_plugin,
+ conn_attrs=self._conn_attrs)
+ self._socket.send(packet, 0, 0)
+
+ if oci_config_file:
+ self._oci_config_file = oci_config_file
+
+ ok_packet = self._auth_switch_request(username, password)
+
+ try:
+ if not (self._client_flags & ClientFlag.CONNECT_WITH_DB) \
+ and database:
+ self.cmd_init_db(database)
+ except:
+ raise
+
+ self._charset_id = charset
+ self._post_connection()
+
+ return ok_packet
+
+ @property
+ def database(self):
+ """Get the current database"""
+ return self.info_query("SELECT DATABASE()")[0]
+
+ @database.setter
+ def database(self, value): # pylint: disable=W0221
+ """Set the current database"""
+ self.cmd_query("USE %s" % value)
+
+ def is_connected(self):
+ """Reports whether the connection to MySQL Server is available
+
+ This method checks whether the connection to MySQL is available.
+ It is similar to ping(), but unlike the ping()-method, either True
+ or False is returned and no exception is raised.
+
+ Returns True or False.
+ """
+ try:
+ self.cmd_ping()
+ except:
+ return False # This method does not raise
+ return True
+
+ def set_allow_local_infile_in_path(self, path):
+ """set local_infile_in_path
+
+ Set allow_local_infile_in_path.
+ """
+ self._allow_local_infile_in_path = path
+
+ def reset_session(self, user_variables=None, session_variables=None):
+ """Clears the current active session
+
+ This method resets the session state, if the MySQL server is 5.7.3
+ or later active session will be reset without re-authenticating.
+ For other server versions session will be reset by re-authenticating.
+
+ It is possible to provide a sequence of variables and their values to
+ be set after clearing the session. This is possible for both user
+ defined variables and session variables.
+ This method takes two arguments user_variables and session_variables
+ which are dictionaries.
+
+ Raises OperationalError if not connected, InternalError if there are
+ unread results and InterfaceError on errors.
+ """
+ if not self.is_connected():
+ raise errors.OperationalError("MySQL Connection not available.")
+
+ try:
+ self.cmd_reset_connection()
+ except errors.NotSupportedError:
+ self.cmd_change_user(self._user, self._password,
+ self._database, self._charset_id,
+ self._password1, self._password2,
+ self._password3,
+ self._oci_config_file)
+
+ cur = self.cursor()
+ if user_variables:
+ for key, value in user_variables.items():
+ cur.execute("SET @`{0}` = %s".format(key), (value,))
+ if session_variables:
+ for key, value in session_variables.items():
+ cur.execute("SET SESSION `{0}` = %s".format(key), (value,))
+
+ def reconnect(self, attempts=1, delay=0):
+ """Attempt to reconnect to the MySQL server
+
+ The argument attempts should be the number of times a reconnect
+ is tried. The delay argument is the number of seconds to wait between
+ each retry.
+
+ You may want to set the number of attempts higher and use delay when
+ you expect the MySQL server to be down for maintenance or when you
+ expect the network to be temporary unavailable.
+
+ Raises InterfaceError on errors.
+ """
+ counter = 0
+ while counter != attempts:
+ counter = counter + 1
+ try:
+ self.disconnect()
+ self.connect()
+ if self.is_connected():
+ break
+ except Exception as err: # pylint: disable=W0703
+ if counter == attempts:
+ msg = "Can not reconnect to MySQL after {0} "\
+ "attempt(s): {1}".format(attempts, str(err))
+ raise errors.InterfaceError(msg)
+ if delay > 0:
+ time.sleep(delay)
+
+ def ping(self, reconnect=False, attempts=1, delay=0):
+ """Check availability of the MySQL server
+
+ When reconnect is set to True, one or more attempts are made to try
+ to reconnect to the MySQL server using the reconnect()-method.
+
+ delay is the number of seconds to wait between each retry.
+
+ When the connection is not available, an InterfaceError is raised. Use
+ the is_connected()-method if you just want to check the connection
+ without raising an error.
+
+ Raises InterfaceError on errors.
+ """
+ try:
+ self.cmd_ping()
+ except:
+ if reconnect:
+ self.reconnect(attempts=attempts, delay=delay)
+ else:
+ raise errors.InterfaceError("Connection to MySQL is"
+ " not available.")
+
+ @property
+ def connection_id(self):
+ """MySQL connection ID"""
+ if self._handshake:
+ return self._handshake.get("server_threadid")
+ return None
+
+ def cursor(self, buffered=None, raw=None, prepared=None, cursor_class=None,
+ dictionary=None, named_tuple=None):
+ """Instantiates and returns a cursor
+
+ By default, MySQLCursor is returned. Depending on the options
+ while connecting, a buffered and/or raw cursor is instantiated
+ instead. Also depending upon the cursor options, rows can be
+ returned as dictionary or named tuple.
+
+ Dictionary and namedtuple based cursors are available with buffered
+ output but not raw.
+
+ It is possible to also give a custom cursor through the
+ cursor_class parameter, but it needs to be a subclass of
+ mysql.connector.cursor.CursorBase.
+
+ Raises ProgrammingError when cursor_class is not a subclass of
+ CursorBase. Raises ValueError when cursor is not available.
+
+ Returns a cursor-object
+ """
+ self.handle_unread_result()
+
+ if not self.is_connected():
+ raise errors.OperationalError("MySQL Connection not available.")
+ if cursor_class is not None:
+ if not issubclass(cursor_class, CursorBase):
+ raise errors.ProgrammingError(
+ "Cursor class needs be to subclass of cursor.CursorBase")
+ return (cursor_class)(self)
+
+ buffered = buffered if buffered is not None else self._buffered
+ raw = raw if raw is not None else self._raw
+
+ cursor_type = 0
+ if buffered is True:
+ cursor_type |= 1
+ if raw is True:
+ cursor_type |= 2
+ if dictionary is True:
+ cursor_type |= 4
+ if named_tuple is True:
+ cursor_type |= 8
+ if prepared is True:
+ cursor_type |= 16
+
+ types = {
+ 0: MySQLCursor, # 0
+ 1: MySQLCursorBuffered,
+ 2: MySQLCursorRaw,
+ 3: MySQLCursorBufferedRaw,
+ 4: MySQLCursorDict,
+ 5: MySQLCursorBufferedDict,
+ 8: MySQLCursorNamedTuple,
+ 9: MySQLCursorBufferedNamedTuple,
+ 16: MySQLCursorPrepared
+ }
+ try:
+ return (types[cursor_type])(self)
+ except KeyError:
+ args = ('buffered', 'raw', 'dictionary', 'named_tuple', 'prepared')
+ raise ValueError('Cursor not available with given criteria: ' +
+ ', '.join([args[i] for i in range(5)
+ if cursor_type & (1 << i) != 0]))
+
+ def commit(self):
+ """Commit current transaction"""
+ self._execute_query("COMMIT")
+
+ def rollback(self):
+ """Rollback current transaction"""
+ if self.unread_result:
+ self.get_rows()
+
+ self._execute_query("ROLLBACK")
+
+ def _execute_query(self, query):
+ """Execute a query
+
+ This method simply calls cmd_query() after checking for unread
+ result. If there are still unread result, an errors.InterfaceError
+ is raised. Otherwise whatever cmd_query() returns is returned.
+
+ Returns a dict()
+ """
+ self.handle_unread_result()
+ self.cmd_query(query)
+
+ def info_query(self, query):
+ """Send a query which only returns 1 row"""
+ cursor = self.cursor(buffered=True)
+ cursor.execute(query)
+ return cursor.fetchone()
+
+ def _handle_binary_ok(self, packet):
+ """Handle a MySQL Binary Protocol OK packet
+
+ This method handles a MySQL Binary Protocol OK packet. When the
+ packet is found to be an Error packet, an error will be raised. If
+ the packet is neither an OK or an Error packet, errors.InterfaceError
+ will be raised.
+
+ Returns a dict()
+ """
+ if packet[4] == 0:
+ return self._protocol.parse_binary_prepare_ok(packet)
+ elif packet[4] == 255:
+ raise errors.get_exception(packet)
+ raise errors.InterfaceError('Expected Binary OK packet')
+
+ def _handle_binary_result(self, packet):
+ """Handle a MySQL Result
+
+ This method handles a MySQL result, for example, after sending the
+ query command. OK and EOF packets will be handled and returned. If
+ the packet is an Error packet, an errors.Error-exception will be
+ raised.
+
+ The tuple returned by this method consist of:
+ - the number of columns in the result,
+ - a list of tuples with information about the columns,
+ - the EOF packet information as a dictionary.
+
+ Returns tuple() or dict()
+ """
+ if not packet or len(packet) < 4:
+ raise errors.InterfaceError('Empty response')
+ elif packet[4] == 0:
+ return self._handle_ok(packet)
+ elif packet[4] == 254:
+ return self._handle_eof(packet)
+ elif packet[4] == 255:
+ raise errors.get_exception(packet)
+
+ # We have a binary result set
+ column_count = self._protocol.parse_column_count(packet)
+ if not column_count or not isinstance(column_count, int):
+ raise errors.InterfaceError('Illegal result set.')
+
+ columns = [None] * column_count
+ for i in range(0, column_count):
+ columns[i] = self._protocol.parse_column(
+ self._socket.recv(), self.python_charset)
+
+ eof = self._handle_eof(self._socket.recv())
+ return (column_count, columns, eof)
+
+ def cmd_stmt_fetch(self, statement_id, rows=1):
+ """Fetch a MySQL statement Result Set
+
+ This method will send the FETCH command to MySQL together with the
+ given statement id and the number of rows to fetch.
+ """
+ packet = self._protocol.make_stmt_fetch(statement_id, rows)
+ self.unread_result = False
+ self._send_cmd(ServerCmd.STMT_FETCH, packet, expect_response=False)
+ self.unread_result = True
+
+ def cmd_stmt_prepare(self, statement):
+ """Prepare a MySQL statement
+
+ This method will send the PREPARE command to MySQL together with the
+ given statement.
+
+ Returns a dict()
+ """
+ packet = self._send_cmd(ServerCmd.STMT_PREPARE, statement)
+ result = self._handle_binary_ok(packet)
+
+ result['columns'] = []
+ result['parameters'] = []
+ if result['num_params'] > 0:
+ for _ in range(0, result['num_params']):
+ result['parameters'].append(
+ self._protocol.parse_column(self._socket.recv(),
+ self.python_charset))
+ self._handle_eof(self._socket.recv())
+ if result['num_columns'] > 0:
+ for _ in range(0, result['num_columns']):
+ result['columns'].append(
+ self._protocol.parse_column(self._socket.recv(),
+ self.python_charset))
+ self._handle_eof(self._socket.recv())
+
+ return result
+
+ def cmd_stmt_execute(self, statement_id, data=(), parameters=(), flags=0):
+ """Execute a prepared MySQL statement"""
+ parameters = list(parameters)
+ long_data_used = {}
+
+ if data:
+ for param_id, _ in enumerate(parameters):
+ if isinstance(data[param_id], IOBase):
+ binary = True
+ try:
+ binary = 'b' not in data[param_id].mode
+ except AttributeError:
+ pass
+ self.cmd_stmt_send_long_data(statement_id, param_id,
+ data[param_id])
+ long_data_used[param_id] = (binary,)
+ if not self._query_attrs_supported and self._query_attrs:
+ warnings.warn(
+ "This version of the server does not support Query Attributes",
+ category=Warning)
+ if self._client_flags & ClientFlag.CLIENT_QUERY_ATTRIBUTES:
+ execute_packet = self._protocol.make_stmt_execute(
+ statement_id, data, tuple(parameters), flags,
+ long_data_used, self.charset, self._query_attrs,
+ self._converter_str_fallback)
+ else:
+ execute_packet = self._protocol.make_stmt_execute(
+ statement_id, data, tuple(parameters), flags,
+ long_data_used, self.charset,
+ converter_str_fallback=self._converter_str_fallback)
+ packet = self._send_cmd(ServerCmd.STMT_EXECUTE, packet=execute_packet)
+ result = self._handle_binary_result(packet)
+ return result
+
+ def cmd_stmt_close(self, statement_id):
+ """Deallocate a prepared MySQL statement
+
+ This method deallocates the prepared statement using the
+ statement_id. Note that the MySQL server does not return
+ anything.
+ """
+ self._send_cmd(ServerCmd.STMT_CLOSE, int4store(statement_id),
+ expect_response=False)
+
+ def cmd_stmt_send_long_data(self, statement_id, param_id, data):
+ """Send data for a column
+
+ This methods send data for a column (for example BLOB) for statement
+ identified by statement_id. The param_id indicate which parameter
+ the data belongs too.
+ The data argument should be a file-like object.
+
+ Since MySQL does not send anything back, no error is raised. When
+ the MySQL server is not reachable, an OperationalError is raised.
+
+ cmd_stmt_send_long_data should be called before cmd_stmt_execute.
+
+ The total bytes send is returned.
+
+ Returns int.
+ """
+ chunk_size = 8192
+ total_sent = 0
+ # pylint: disable=W0212
+ prepare_packet = self._protocol._prepare_stmt_send_long_data
+ # pylint: enable=W0212
+ try:
+ buf = data.read(chunk_size)
+ while buf:
+ packet = prepare_packet(statement_id, param_id, buf)
+ self._send_cmd(ServerCmd.STMT_SEND_LONG_DATA, packet=packet,
+ expect_response=False)
+ total_sent += len(buf)
+ buf = data.read(chunk_size)
+ except AttributeError:
+ raise errors.OperationalError("MySQL Connection not available.")
+
+ return total_sent
+
+ def cmd_stmt_reset(self, statement_id):
+ """Reset data for prepared statement sent as long data
+
+ The result is a dictionary with OK packet information.
+
+ Returns a dict()
+ """
+ self._handle_ok(self._send_cmd(ServerCmd.STMT_RESET,
+ int4store(statement_id)))
+
+ def cmd_reset_connection(self):
+ """Resets the session state without re-authenticating
+
+ Works only for MySQL server 5.7.3 or later.
+ The result is a dictionary with OK packet information.
+
+ Returns a dict()
+ """
+ if self._server_version < (5, 7, 3):
+ raise errors.NotSupportedError("MySQL version 5.7.2 and "
+ "earlier does not support "
+ "COM_RESET_CONNECTION.")
+ self._handle_ok(self._send_cmd(ServerCmd.RESET_CONNECTION))
+ self._post_connection()
+
+ def handle_unread_result(self):
+ """Check whether there is an unread result"""
+ if self.can_consume_results:
+ self.consume_results()
+ elif self.unread_result:
+ raise errors.InternalError("Unread result found")
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/connection_cext.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/connection_cext.py
new file mode 100644
index 0000000000000000000000000000000000000000..664daa281d2094167297e7c195419a3866a228e4
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/connection_cext.py
@@ -0,0 +1,840 @@
+# Copyright (c) 2014, 2021, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Connection class using the C Extension
+"""
+
+# Detection of abstract methods in pylint is not working correctly
+#pylint: disable=W0223
+
+import os
+import socket
+import sysconfig
+
+from . import errors, version
+from .constants import (
+ CharacterSet, FieldFlag, ServerFlag, ShutdownType, ClientFlag
+)
+from .abstracts import MySQLConnectionAbstract, MySQLCursorAbstract
+from .protocol import MySQLProtocol
+
+HAVE_CMYSQL = False
+# pylint: disable=F0401,C0413
+try:
+ import _mysql_connector
+ from .cursor_cext import (
+ CMySQLCursor, CMySQLCursorRaw,
+ CMySQLCursorBuffered, CMySQLCursorBufferedRaw, CMySQLCursorPrepared,
+ CMySQLCursorDict, CMySQLCursorBufferedDict, CMySQLCursorNamedTuple,
+ CMySQLCursorBufferedNamedTuple)
+ from _mysql_connector import MySQLInterfaceError # pylint: disable=F0401
+except ImportError as exc:
+ raise ImportError(
+ "MySQL Connector/Python C Extension not available ({0})".format(
+ str(exc)
+ ))
+else:
+ HAVE_CMYSQL = True
+# pylint: enable=F0401,C0413
+
+
+class CMySQLConnection(MySQLConnectionAbstract):
+
+ """Class initiating a MySQL Connection using Connector/C"""
+
+ def __init__(self, **kwargs):
+ """Initialization"""
+ if not HAVE_CMYSQL:
+ raise RuntimeError(
+ "MySQL Connector/Python C Extension not available")
+ self._cmysql = None
+ self._columns = []
+ self._plugin_dir = os.path.join(
+ os.path.dirname(os.path.abspath(_mysql_connector.__file__)),
+ "mysql", "vendor", "plugin"
+ )
+ self.converter = None
+ super(CMySQLConnection, self).__init__(**kwargs)
+
+ if kwargs:
+ self.connect(**kwargs)
+
+ def _add_default_conn_attrs(self):
+ """Add default connection attributes"""
+ license_chunks = version.LICENSE.split(" ")
+ if license_chunks[0] == "GPLv2":
+ client_license = "GPL-2.0"
+ else:
+ client_license = "Commercial"
+
+ self._conn_attrs.update({
+ "_connector_name": "mysql-connector-python",
+ "_connector_license": client_license,
+ "_connector_version": ".".join(
+ [str(x) for x in version.VERSION[0:3]]),
+ "_source_host": socket.gethostname()
+ })
+
+ def _do_handshake(self):
+ """Gather information of the MySQL server before authentication"""
+ self._handshake = {
+ 'protocol': self._cmysql.get_proto_info(),
+ 'server_version_original': self._cmysql.get_server_info(),
+ 'server_threadid': self._cmysql.thread_id(),
+ 'charset': None,
+ 'server_status': None,
+ 'auth_plugin': None,
+ 'auth_data': None,
+ 'capabilities': self._cmysql.st_server_capabilities(),
+ }
+
+ self._server_version = self._check_server_version(
+ self._handshake['server_version_original']
+ )
+
+ @property
+ def _server_status(self):
+ """Returns the server status attribute of MYSQL structure"""
+ return self._cmysql.st_server_status()
+
+ def set_allow_local_infile_in_path(self, path):
+ """set local_infile_in_path
+
+ Set allow_local_infile_in_path.
+ """
+
+ if self._cmysql:
+ self._cmysql.set_load_data_local_infile_option(path)
+
+ def set_unicode(self, value=True):
+ """Toggle unicode mode
+
+ Set whether we return string fields as unicode or not.
+ Default is True.
+ """
+ self._use_unicode = value
+ if self._cmysql:
+ self._cmysql.use_unicode(value)
+ if self.converter:
+ self.converter.set_unicode(value)
+
+ @property
+ def autocommit(self):
+ """Get whether autocommit is on or off"""
+ value = self.info_query("SELECT @@session.autocommit")[0]
+ return True if value == 1 else False
+
+ @autocommit.setter
+ def autocommit(self, value): # pylint: disable=W0221
+ """Toggle autocommit"""
+ try:
+ self._cmysql.autocommit(value)
+ self._autocommit = value
+ except MySQLInterfaceError as exc:
+ raise errors.get_mysql_exception(msg=exc.msg, errno=exc.errno,
+ sqlstate=exc.sqlstate)
+
+ @property
+ def database(self):
+ """Get the current database"""
+ return self.info_query("SELECT DATABASE()")[0]
+
+ @database.setter
+ def database(self, value): # pylint: disable=W0221
+ """Set the current database"""
+ self._cmysql.select_db(value)
+
+ @property
+ def in_transaction(self):
+ """MySQL session has started a transaction"""
+ return self._server_status & ServerFlag.STATUS_IN_TRANS
+
+ def _open_connection(self):
+ charset_name = CharacterSet.get_info(self._charset_id)[0]
+ self._cmysql = _mysql_connector.MySQL( # pylint: disable=E1101,I1101
+ buffered=self._buffered,
+ raw=self._raw,
+ charset_name=charset_name,
+ connection_timeout=(self._connection_timeout or 0),
+ use_unicode=self._use_unicode,
+ auth_plugin=self._auth_plugin,
+ plugin_dir=self._plugin_dir)
+ if not self.isset_client_flag(ClientFlag.CONNECT_ARGS):
+ self._conn_attrs = {}
+ cnx_kwargs = {
+ 'host': self._host,
+ 'user': self._user,
+ 'password': self._password,
+ 'password1': self._password1,
+ 'password2': self._password2,
+ 'password3': self._password3,
+ 'database': self._database,
+ 'port': self._port,
+ 'client_flags': self._client_flags,
+ 'unix_socket': self._unix_socket,
+ 'compress': self.isset_client_flag(ClientFlag.COMPRESS),
+ 'ssl_disabled': True,
+ "conn_attrs": self._conn_attrs,
+ "local_infile": self._allow_local_infile,
+ "load_data_local_dir": self._allow_local_infile_in_path,
+ "oci_config_file": self._oci_config_file,
+ }
+
+ tls_versions = self._ssl.get('tls_versions')
+ if tls_versions is not None:
+ tls_versions.sort(reverse=True)
+ tls_versions = ",".join(tls_versions)
+ if self._ssl.get('tls_ciphersuites') is not None:
+ ssl_ciphersuites = self._ssl.get('tls_ciphersuites')[0]
+ tls_ciphersuites = self._ssl.get('tls_ciphersuites')[1]
+ else:
+ ssl_ciphersuites = None
+ tls_ciphersuites = None
+ if tls_versions is not None and "TLSv1.3" in tls_versions and \
+ not tls_ciphersuites:
+ tls_ciphersuites = "TLS_AES_256_GCM_SHA384"
+ if not self._ssl_disabled:
+ cnx_kwargs.update({
+ 'ssl_ca': self._ssl.get('ca'),
+ 'ssl_cert': self._ssl.get('cert'),
+ 'ssl_key': self._ssl.get('key'),
+ 'ssl_cipher_suites': ssl_ciphersuites,
+ 'tls_versions': tls_versions,
+ 'tls_cipher_suites': tls_ciphersuites,
+ 'ssl_verify_cert': self._ssl.get('verify_cert') or False,
+ 'ssl_verify_identity':
+ self._ssl.get('verify_identity') or False,
+ 'ssl_disabled': self._ssl_disabled
+ })
+
+ try:
+ self._cmysql.connect(**cnx_kwargs)
+ self._cmysql.converter_str_fallback = self._converter_str_fallback
+ if self.converter:
+ self.converter.str_fallback = self._converter_str_fallback
+ except MySQLInterfaceError as exc:
+ raise errors.get_mysql_exception(msg=exc.msg, errno=exc.errno,
+ sqlstate=exc.sqlstate)
+
+ self._do_handshake()
+
+ def close(self):
+ """Disconnect from the MySQL server"""
+ if self._cmysql:
+ try:
+ self.free_result()
+ self._cmysql.close()
+ except MySQLInterfaceError as exc:
+ raise errors.get_mysql_exception(msg=exc.msg, errno=exc.errno,
+ sqlstate=exc.sqlstate)
+ disconnect = close
+
+ def is_closed(self):
+ """Return True if the connection to MySQL Server is closed."""
+ return not self._cmysql.connected()
+
+ def is_connected(self):
+ """Reports whether the connection to MySQL Server is available"""
+ if self._cmysql:
+ return self._cmysql.ping()
+
+ return False
+
+ def ping(self, reconnect=False, attempts=1, delay=0):
+ """Check availability of the MySQL server
+
+ When reconnect is set to True, one or more attempts are made to try
+ to reconnect to the MySQL server using the reconnect()-method.
+
+ delay is the number of seconds to wait between each retry.
+
+ When the connection is not available, an InterfaceError is raised. Use
+ the is_connected()-method if you just want to check the connection
+ without raising an error.
+
+ Raises InterfaceError on errors.
+ """
+ errmsg = "Connection to MySQL is not available"
+
+ try:
+ connected = self._cmysql.ping()
+ except AttributeError:
+ pass # Raise or reconnect later
+ else:
+ if connected:
+ return
+
+ if reconnect:
+ self.reconnect(attempts=attempts, delay=delay)
+ else:
+ raise errors.InterfaceError(errmsg)
+
+ def set_character_set_name(self, charset):
+ """Sets the default character set name for current connection.
+ """
+ self._cmysql.set_character_set(charset)
+
+ def info_query(self, query):
+ """Send a query which only returns 1 row"""
+ self._cmysql.query(query)
+ first_row = ()
+ if self._cmysql.have_result_set:
+ first_row = self._cmysql.fetch_row()
+ if self._cmysql.fetch_row():
+ self._cmysql.free_result()
+ raise errors.InterfaceError(
+ "Query should not return more than 1 row")
+ self._cmysql.free_result()
+
+ return first_row
+
+ @property
+ def connection_id(self):
+ """MySQL connection ID"""
+ try:
+ return self._cmysql.thread_id()
+ except MySQLInterfaceError:
+ pass # Just return None
+
+ return None
+
+ def get_rows(self, count=None, binary=False, columns=None, raw=None,
+ prep_stmt=None):
+ """Get all or a subset of rows returned by the MySQL server"""
+ unread_result = prep_stmt.have_result_set if prep_stmt \
+ else self.unread_result
+ if not (self._cmysql and unread_result):
+ raise errors.InternalError("No result set available")
+
+ if raw is None:
+ raw = self._raw
+
+ rows = []
+ if count is not None and count <= 0:
+ raise AttributeError("count should be 1 or higher, or None")
+
+ counter = 0
+ try:
+ fetch_row = (
+ prep_stmt.fetch_row if prep_stmt
+ else self._cmysql.fetch_row
+ )
+ if self.converter:
+ # When using a converter class, the C extension should not
+ # convert the values. This can be accomplished by setting
+ # the raw option to True.
+ self._cmysql.raw(True)
+ row = fetch_row()
+ while row:
+ if not self._raw and self.converter:
+ row = list(row)
+ for i, _ in enumerate(row):
+ if not raw:
+ row[i] = self.converter.to_python(self._columns[i],
+ row[i])
+ row = tuple(row)
+ rows.append(row)
+ counter += 1
+ if count and counter == count:
+ break
+ row = fetch_row()
+ if not row:
+ _eof = self.fetch_eof_columns(prep_stmt)['eof']
+ if prep_stmt:
+ prep_stmt.free_result()
+ self._unread_result = False
+ else:
+ self.free_result()
+ else:
+ _eof = None
+ except MySQLInterfaceError as exc:
+ if prep_stmt:
+ prep_stmt.free_result()
+ raise errors.InterfaceError(str(exc))
+ else:
+ self.free_result()
+ raise errors.get_mysql_exception(msg=exc.msg, errno=exc.errno,
+ sqlstate=exc.sqlstate)
+
+ return rows, _eof
+
+ def get_row(self, binary=False, columns=None, raw=None, prep_stmt=None):
+ """Get the next rows returned by the MySQL server"""
+ try:
+ rows, eof = self.get_rows(count=1, binary=binary, columns=columns,
+ raw=raw, prep_stmt=prep_stmt)
+ if rows:
+ return (rows[0], eof)
+ return (None, eof)
+ except IndexError:
+ # No row available
+ return (None, None)
+
+ def next_result(self):
+ """Reads the next result"""
+ if self._cmysql:
+ self._cmysql.consume_result()
+ return self._cmysql.next_result()
+ return None
+
+ def free_result(self):
+ """Frees the result"""
+ if self._cmysql:
+ self._cmysql.free_result()
+
+ def commit(self):
+ """Commit current transaction"""
+ if self._cmysql:
+ self._cmysql.commit()
+
+ def rollback(self):
+ """Rollback current transaction"""
+ if self._cmysql:
+ self._cmysql.consume_result()
+ self._cmysql.rollback()
+
+ def cmd_init_db(self, database):
+ """Change the current database"""
+ try:
+ self._cmysql.select_db(database)
+ except MySQLInterfaceError as exc:
+ raise errors.get_mysql_exception(msg=exc.msg, errno=exc.errno,
+ sqlstate=exc.sqlstate)
+
+ def fetch_eof_columns(self, prep_stmt=None):
+ """Fetch EOF and column information"""
+ have_result_set = prep_stmt.have_result_set if prep_stmt \
+ else self._cmysql.have_result_set
+ if not have_result_set:
+ raise errors.InterfaceError("No result set")
+
+ fields = prep_stmt.fetch_fields() if prep_stmt \
+ else self._cmysql.fetch_fields()
+ self._columns = []
+ for col in fields:
+ self._columns.append((
+ col[4],
+ int(col[8]),
+ None,
+ None,
+ None,
+ None,
+ ~int(col[9]) & FieldFlag.NOT_NULL,
+ int(col[9]),
+ int(col[6]),
+ ))
+
+ return {
+ 'eof': {
+ 'status_flag': self._server_status,
+ 'warning_count': self._cmysql.st_warning_count(),
+ },
+ 'columns': self._columns,
+ }
+
+ def fetch_eof_status(self):
+ """Fetch EOF and status information"""
+ if self._cmysql:
+ return {
+ 'warning_count': self._cmysql.st_warning_count(),
+ 'field_count': self._cmysql.st_field_count(),
+ 'insert_id': self._cmysql.insert_id(),
+ 'affected_rows': self._cmysql.affected_rows(),
+ 'server_status': self._server_status,
+ }
+
+ return None
+
+ def cmd_stmt_prepare(self, statement):
+ """Prepares the SQL statement"""
+ if not self._cmysql:
+ raise errors.OperationalError("MySQL Connection not available")
+
+ try:
+ stmt = self._cmysql.stmt_prepare(statement)
+ stmt.converter_str_fallback = self._converter_str_fallback
+ return stmt
+ except MySQLInterfaceError as err:
+ raise errors.InterfaceError(str(err))
+
+ # pylint: disable=W0221
+ def cmd_stmt_execute(self, prep_stmt, *args):
+ """Executes the prepared statement"""
+ try:
+ prep_stmt.stmt_execute(*args)
+ except MySQLInterfaceError as err:
+ raise errors.InterfaceError(str(err))
+
+ self._columns = []
+ if not prep_stmt.have_result_set:
+ # No result
+ self._unread_result = False
+ return self.fetch_eof_status()
+
+ self._unread_result = True
+ return self.fetch_eof_columns(prep_stmt)
+
+ def cmd_stmt_close(self, prep_stmt):
+ """Closes the prepared statement"""
+ if self._unread_result:
+ raise errors.InternalError("Unread result found")
+ prep_stmt.stmt_close()
+
+ def cmd_stmt_reset(self, prep_stmt):
+ """Resets the prepared statement"""
+ if self._unread_result:
+ raise errors.InternalError("Unread result found")
+ prep_stmt.stmt_reset()
+ # pylint: enable=W0221
+
+ def cmd_query(self, query, raw=None, buffered=False, raw_as_string=False):
+ """Send a query to the MySQL server"""
+ self.handle_unread_result()
+ if raw is None:
+ raw = self._raw
+ try:
+ if not isinstance(query, bytes):
+ query = query.encode('utf-8')
+ self._cmysql.query(query,
+ raw=raw, buffered=buffered,
+ raw_as_string=raw_as_string,
+ query_attrs=self._query_attrs)
+ except MySQLInterfaceError as exc:
+ raise errors.get_mysql_exception(exc.errno, msg=exc.msg,
+ sqlstate=exc.sqlstate)
+ except AttributeError:
+ if self._unix_socket:
+ addr = self._unix_socket
+ else:
+ addr = self._host + ':' + str(self._port)
+ raise errors.OperationalError(
+ errno=2055, values=(addr, 'Connection not available.'))
+
+ self._columns = []
+ if not self._cmysql.have_result_set:
+ # No result
+ return self.fetch_eof_status()
+
+ return self.fetch_eof_columns()
+ _execute_query = cmd_query
+
+ def cursor(self, buffered=None, raw=None, prepared=None, cursor_class=None,
+ dictionary=None, named_tuple=None):
+ """Instantiates and returns a cursor using C Extension
+
+ By default, CMySQLCursor is returned. Depending on the options
+ while connecting, a buffered and/or raw cursor is instantiated
+ instead. Also depending upon the cursor options, rows can be
+ returned as dictionary or named tuple.
+
+ Dictionary and namedtuple based cursors are available with buffered
+ output but not raw.
+
+ It is possible to also give a custom cursor through the
+ cursor_class parameter, but it needs to be a subclass of
+ mysql.connector.cursor_cext.CMySQLCursor.
+
+ Raises ProgrammingError when cursor_class is not a subclass of
+ CursorBase. Raises ValueError when cursor is not available.
+
+ Returns instance of CMySQLCursor or subclass.
+
+ :param buffered: Return a buffering cursor
+ :param raw: Return a raw cursor
+ :param prepared: Return a cursor which uses prepared statements
+ :param cursor_class: Use a custom cursor class
+ :param dictionary: Rows are returned as dictionary
+ :param named_tuple: Rows are returned as named tuple
+ :return: Subclass of CMySQLCursor
+ :rtype: CMySQLCursor or subclass
+ """
+ self.handle_unread_result(prepared)
+ if not self.is_connected():
+ raise errors.OperationalError("MySQL Connection not available.")
+ if cursor_class is not None:
+ if not issubclass(cursor_class, MySQLCursorAbstract):
+ raise errors.ProgrammingError(
+ "Cursor class needs be to subclass"
+ " of cursor_cext.CMySQLCursor")
+ return (cursor_class)(self)
+
+ buffered = buffered or self._buffered
+ raw = raw or self._raw
+
+ cursor_type = 0
+ if buffered is True:
+ cursor_type |= 1
+ if raw is True:
+ cursor_type |= 2
+ if dictionary is True:
+ cursor_type |= 4
+ if named_tuple is True:
+ cursor_type |= 8
+ if prepared is True:
+ cursor_type |= 16
+
+ types = {
+ 0: CMySQLCursor, # 0
+ 1: CMySQLCursorBuffered,
+ 2: CMySQLCursorRaw,
+ 3: CMySQLCursorBufferedRaw,
+ 4: CMySQLCursorDict,
+ 5: CMySQLCursorBufferedDict,
+ 8: CMySQLCursorNamedTuple,
+ 9: CMySQLCursorBufferedNamedTuple,
+ 16: CMySQLCursorPrepared
+ }
+ try:
+ return (types[cursor_type])(self)
+ except KeyError:
+ args = ('buffered', 'raw', 'dictionary', 'named_tuple', 'prepared')
+ raise ValueError('Cursor not available with given criteria: ' +
+ ', '.join([args[i] for i in range(5)
+ if cursor_type & (1 << i) != 0]))
+
+ @property
+ def num_rows(self):
+ """Returns number of rows of current result set"""
+ if not self._cmysql.have_result_set:
+ raise errors.InterfaceError("No result set")
+
+ return self._cmysql.num_rows()
+
+ @property
+ def warning_count(self):
+ """Returns number of warnings"""
+ if not self._cmysql:
+ return 0
+
+ return self._cmysql.warning_count()
+
+ @property
+ def result_set_available(self):
+ """Check if a result set is available"""
+ if not self._cmysql:
+ return False
+
+ return self._cmysql.have_result_set
+
+ @property
+ def unread_result(self):
+ """Check if there are unread results or rows"""
+ return self.result_set_available
+
+ @property
+ def more_results(self):
+ """Check if there are more results"""
+ return self._cmysql.more_results()
+
+ def prepare_for_mysql(self, params):
+ """Prepare parameters for statements
+
+ This method is use by cursors to prepared parameters found in the
+ list (or tuple) params.
+
+ Returns dict.
+ """
+ if isinstance(params, (list, tuple)):
+ if self.converter:
+ result = [
+ self.converter.quote(
+ self.converter.escape(
+ self.converter.to_mysql(value)
+ )
+ ) for value in params
+ ]
+ else:
+ result = self._cmysql.convert_to_mysql(*params)
+ elif isinstance(params, dict):
+ result = {}
+ if self.converter:
+ for key, value in params.items():
+ result[key] = self.converter.quote(
+ self.converter.escape(
+ self.converter.to_mysql(value)
+ )
+ )
+ else:
+ for key, value in params.items():
+ result[key] = self._cmysql.convert_to_mysql(value)[0]
+ else:
+ raise errors.ProgrammingError(
+ f"Could not process parameters: {type(params).__name__}({params}),"
+ " it must be of type list, tuple or dict")
+
+ return result
+
+ def consume_results(self):
+ """Consume the current result
+
+ This method consume the result by reading (consuming) all rows.
+ """
+ self._cmysql.consume_result()
+
+ def cmd_change_user(self, username='', password='', database='',
+ charset=45, password1='', password2='', password3='',
+ oci_config_file=None):
+ """Change the current logged in user"""
+ try:
+ self._cmysql.change_user(
+ username,
+ password,
+ database,
+ password1,
+ password2,
+ password3,
+ oci_config_file)
+
+ except MySQLInterfaceError as exc:
+ raise errors.get_mysql_exception(msg=exc.msg, errno=exc.errno,
+ sqlstate=exc.sqlstate)
+
+ self._charset_id = charset
+ self._post_connection()
+
+ def cmd_reset_connection(self):
+ """Resets the session state without re-authenticating
+
+ Works only for MySQL server 5.7.3 or later.
+ """
+ if self._server_version < (5, 7, 3):
+ raise errors.NotSupportedError("MySQL version 5.7.2 and "
+ "earlier does not support "
+ "COM_RESET_CONNECTION.")
+ try:
+ self._cmysql.reset_connection()
+ except MySQLInterfaceError as exc:
+ raise errors.get_mysql_exception(msg=exc.msg, errno=exc.errno,
+ sqlstate=exc.sqlstate)
+
+ self._post_connection()
+
+ def cmd_refresh(self, options):
+ """Send the Refresh command to the MySQL server"""
+ try:
+ self._cmysql.refresh(options)
+ except MySQLInterfaceError as exc:
+ raise errors.get_mysql_exception(msg=exc.msg, errno=exc.errno,
+ sqlstate=exc.sqlstate)
+
+ return self.fetch_eof_status()
+
+ def cmd_quit(self):
+ """Close the current connection with the server"""
+ self.close()
+
+ def cmd_shutdown(self, shutdown_type=None):
+ """Shut down the MySQL Server"""
+ if not self._cmysql:
+ raise errors.OperationalError("MySQL Connection not available")
+
+ if shutdown_type:
+ if not ShutdownType.get_info(shutdown_type):
+ raise errors.InterfaceError("Invalid shutdown type")
+ level = shutdown_type
+ else:
+ level = ShutdownType.SHUTDOWN_DEFAULT
+
+ try:
+ self._cmysql.shutdown(level)
+ except MySQLInterfaceError as exc:
+ raise errors.get_mysql_exception(msg=exc.msg, errno=exc.errno,
+ sqlstate=exc.sqlstate)
+ self.close()
+
+ def cmd_statistics(self):
+ """Return statistics from the MySQL server"""
+ self.handle_unread_result()
+
+ try:
+ stat = self._cmysql.stat()
+ return MySQLProtocol().parse_statistics(stat, with_header=False)
+ except (MySQLInterfaceError, errors.InterfaceError) as exc:
+ raise errors.get_mysql_exception(msg=exc.msg, errno=exc.errno,
+ sqlstate=exc.sqlstate)
+
+ def cmd_process_kill(self, mysql_pid):
+ """Kill a MySQL process"""
+ if not isinstance(mysql_pid, int):
+ raise ValueError("MySQL PID must be int")
+ self.info_query("KILL {0}".format(mysql_pid))
+
+ def handle_unread_result(self, prepared=False):
+ """Check whether there is an unread result"""
+ unread_result = self._unread_result if prepared is True \
+ else self.unread_result
+ if self.can_consume_results:
+ self.consume_results()
+ elif unread_result:
+ raise errors.InternalError("Unread result found")
+
+ def reset_session(self, user_variables=None, session_variables=None):
+ """Clears the current active session
+
+ This method resets the session state, if the MySQL server is 5.7.3
+ or later active session will be reset without re-authenticating.
+ For other server versions session will be reset by re-authenticating.
+
+ It is possible to provide a sequence of variables and their values to
+ be set after clearing the session. This is possible for both user
+ defined variables and session variables.
+ This method takes two arguments user_variables and session_variables
+ which are dictionaries.
+
+ Raises OperationalError if not connected, InternalError if there are
+ unread results and InterfaceError on errors.
+ """
+ if not self.is_connected():
+ raise errors.OperationalError("MySQL Connection not available.")
+
+ try:
+ self.cmd_reset_connection()
+ except (errors.NotSupportedError, NotImplementedError):
+ if self._compress:
+ raise errors.NotSupportedError(
+ "Reset session is not supported with compression for "
+ "MySQL server version 5.7.2 or earlier.")
+ elif self._server_version < (5, 7, 3):
+ raise errors.NotSupportedError(
+ "Reset session is not supported with MySQL server "
+ "version 5.7.2 or earlier.")
+ else:
+ self.cmd_change_user(self._user, self._password,
+ self._database, self._charset_id,
+ self._password1, self._password2,
+ self._password3, self._oci_config_file)
+
+ if user_variables or session_variables:
+ cur = self.cursor()
+ if user_variables:
+ for key, value in user_variables.items():
+ cur.execute("SET @`{0}` = %s".format(key), (value,))
+ if session_variables:
+ for key, value in session_variables.items():
+ cur.execute("SET SESSION `{0}` = %s".format(key), (value,))
+ cur.close()
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/constants.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..6c9dc8de4aba2862f1484576cc84f6c2e5c24ed3
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/constants.py
@@ -0,0 +1,1080 @@
+# Copyright (c) 2009, 2021, Oracle and/or its affiliates. All rights reserved.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Various MySQL constants and character sets
+"""
+
+import ssl
+import warnings
+
+from .utils import make_abc
+from .errors import ProgrammingError
+from .charsets import MYSQL_CHARACTER_SETS
+
+MAX_PACKET_LENGTH = 16777215
+NET_BUFFER_LENGTH = 8192
+MAX_MYSQL_TABLE_COLUMNS = 4096
+# Flag used to send the Query Attributes with 0 (or more) parameters.
+PARAMETER_COUNT_AVAILABLE = 8
+
+DEFAULT_CONFIGURATION = {
+ 'database': None,
+ 'user': '',
+ 'password': '',
+ 'password1': '',
+ 'password2': '',
+ 'password3': '',
+ 'host': '127.0.0.1',
+ 'port': 3306,
+ 'unix_socket': None,
+ 'use_unicode': True,
+ 'charset': 'utf8mb4',
+ 'collation': None,
+ 'converter_class': None,
+ 'converter_str_fallback': False,
+ 'autocommit': False,
+ 'time_zone': None,
+ 'sql_mode': None,
+ 'get_warnings': False,
+ 'raise_on_warnings': False,
+ 'connection_timeout': None,
+ 'client_flags': 0,
+ 'compress': False,
+ 'buffered': False,
+ 'raw': False,
+ 'ssl_ca': None,
+ 'ssl_cert': None,
+ 'ssl_key': None,
+ 'ssl_verify_cert': False,
+ 'ssl_verify_identity': False,
+ 'ssl_cipher': None,
+ 'tls_ciphersuites': None,
+ 'ssl_disabled': False,
+ 'tls_versions': None,
+ 'passwd': None,
+ 'db': None,
+ 'connect_timeout': None,
+ 'dsn': None,
+ 'force_ipv6': False,
+ 'auth_plugin': None,
+ 'allow_local_infile': False,
+ 'allow_local_infile_in_path': None,
+ 'consume_results': False,
+ 'conn_attrs': None,
+ 'dns_srv': False,
+ 'use_pure': False,
+ 'krb_service_principal': None,
+ 'oci_config_file': None
+}
+
+CNX_POOL_ARGS = ('pool_name', 'pool_size', 'pool_reset_session')
+
+TLS_VERSIONS = ["TLSv1.2", "TLSv1.3"]
+
+DEPRECATED_TLS_VERSIONS = ["TLSv1", "TLSv1.1"]
+
+
+def flag_is_set(flag, flags):
+ """Checks if the flag is set
+
+ Returns boolean"""
+ if (flags & flag) > 0:
+ return True
+ return False
+
+
+def _obsolete_option(name, new_name, value):
+ warnings.warn('The option "{}" has been deprecated, use "{}" instead.'
+ ''.format(name, new_name), category=DeprecationWarning)
+ return value
+
+
+class _Constants(object):
+ """
+ Base class for constants
+ """
+ prefix = ''
+ desc = {}
+
+ def __new__(cls):
+ raise TypeError("Can not instanciate from %s" % cls.__name__)
+
+ @classmethod
+ def get_desc(cls, name):
+ """Get description of given constant"""
+ try:
+ return cls.desc[name][1]
+ except:
+ return None
+
+ @classmethod
+ def get_info(cls, setid):
+ """Get information about given constant"""
+ for name, info in cls.desc.items():
+ if info[0] == setid:
+ return name
+ return None
+
+ @classmethod
+ def get_full_info(cls):
+ """get full information about given constant"""
+ res = ()
+ try:
+ res = ["%s : %s" % (k, v[1]) for k, v in cls.desc.items()]
+ except Exception as err: # pylint: disable=W0703
+ res = ('No information found in constant class.%s' % err)
+
+ return res
+
+
+class _Flags(_Constants):
+ """Base class for classes describing flags
+ """
+
+ @classmethod
+ def get_bit_info(cls, value):
+ """Get the name of all bits set
+
+ Returns a list of strings."""
+ res = []
+ for name, info in cls.desc.items():
+ if value & info[0]:
+ res.append(name)
+ return res
+
+
+class FieldType(_Constants):
+ """MySQL Field Types
+ """
+ prefix = 'FIELD_TYPE_'
+ DECIMAL = 0x00
+ TINY = 0x01
+ SHORT = 0x02
+ LONG = 0x03
+ FLOAT = 0x04
+ DOUBLE = 0x05
+ NULL = 0x06
+ TIMESTAMP = 0x07
+ LONGLONG = 0x08
+ INT24 = 0x09
+ DATE = 0x0a
+ TIME = 0x0b
+ DATETIME = 0x0c
+ YEAR = 0x0d
+ NEWDATE = 0x0e
+ VARCHAR = 0x0f
+ BIT = 0x10
+ JSON = 0xf5
+ NEWDECIMAL = 0xf6
+ ENUM = 0xf7
+ SET = 0xf8
+ TINY_BLOB = 0xf9
+ MEDIUM_BLOB = 0xfa
+ LONG_BLOB = 0xfb
+ BLOB = 0xfc
+ VAR_STRING = 0xfd
+ STRING = 0xfe
+ GEOMETRY = 0xff
+
+ desc = {
+ 'DECIMAL': (0x00, 'DECIMAL'),
+ 'TINY': (0x01, 'TINY'),
+ 'SHORT': (0x02, 'SHORT'),
+ 'LONG': (0x03, 'LONG'),
+ 'FLOAT': (0x04, 'FLOAT'),
+ 'DOUBLE': (0x05, 'DOUBLE'),
+ 'NULL': (0x06, 'NULL'),
+ 'TIMESTAMP': (0x07, 'TIMESTAMP'),
+ 'LONGLONG': (0x08, 'LONGLONG'),
+ 'INT24': (0x09, 'INT24'),
+ 'DATE': (0x0a, 'DATE'),
+ 'TIME': (0x0b, 'TIME'),
+ 'DATETIME': (0x0c, 'DATETIME'),
+ 'YEAR': (0x0d, 'YEAR'),
+ 'NEWDATE': (0x0e, 'NEWDATE'),
+ 'VARCHAR': (0x0f, 'VARCHAR'),
+ 'BIT': (0x10, 'BIT'),
+ 'JSON': (0xf5, 'JSON'),
+ 'NEWDECIMAL': (0xf6, 'NEWDECIMAL'),
+ 'ENUM': (0xf7, 'ENUM'),
+ 'SET': (0xf8, 'SET'),
+ 'TINY_BLOB': (0xf9, 'TINY_BLOB'),
+ 'MEDIUM_BLOB': (0xfa, 'MEDIUM_BLOB'),
+ 'LONG_BLOB': (0xfb, 'LONG_BLOB'),
+ 'BLOB': (0xfc, 'BLOB'),
+ 'VAR_STRING': (0xfd, 'VAR_STRING'),
+ 'STRING': (0xfe, 'STRING'),
+ 'GEOMETRY': (0xff, 'GEOMETRY'),
+ }
+
+ @classmethod
+ def get_string_types(cls):
+ """Get the list of all string types"""
+ return [
+ cls.VARCHAR,
+ cls.ENUM,
+ cls.VAR_STRING, cls.STRING,
+ ]
+
+ @classmethod
+ def get_binary_types(cls):
+ """Get the list of all binary types"""
+ return [
+ cls.TINY_BLOB, cls.MEDIUM_BLOB,
+ cls.LONG_BLOB, cls.BLOB,
+ ]
+
+ @classmethod
+ def get_number_types(cls):
+ """Get the list of all number types"""
+ return [
+ cls.DECIMAL, cls.NEWDECIMAL,
+ cls.TINY, cls.SHORT, cls.LONG,
+ cls.FLOAT, cls.DOUBLE,
+ cls.LONGLONG, cls.INT24,
+ cls.BIT,
+ cls.YEAR,
+ ]
+
+ @classmethod
+ def get_timestamp_types(cls):
+ """Get the list of all timestamp types"""
+ return [
+ cls.DATETIME, cls.TIMESTAMP,
+ ]
+
+
+class FieldFlag(_Flags):
+ """MySQL Field Flags
+
+ Field flags as found in MySQL sources mysql-src/include/mysql_com.h
+ """
+ _prefix = ''
+ NOT_NULL = 1 << 0
+ PRI_KEY = 1 << 1
+ UNIQUE_KEY = 1 << 2
+ MULTIPLE_KEY = 1 << 3
+ BLOB = 1 << 4
+ UNSIGNED = 1 << 5
+ ZEROFILL = 1 << 6
+ BINARY = 1 << 7
+
+ ENUM = 1 << 8
+ AUTO_INCREMENT = 1 << 9
+ TIMESTAMP = 1 << 10
+ SET = 1 << 11
+
+ NO_DEFAULT_VALUE = 1 << 12
+ ON_UPDATE_NOW = 1 << 13
+ NUM = 1 << 14
+ PART_KEY = 1 << 15
+ GROUP = 1 << 14 # SAME AS NUM !!!!!!!????
+ UNIQUE = 1 << 16
+ BINCMP = 1 << 17
+
+ GET_FIXED_FIELDS = 1 << 18
+ FIELD_IN_PART_FUNC = 1 << 19
+ FIELD_IN_ADD_INDEX = 1 << 20
+ FIELD_IS_RENAMED = 1 << 21
+
+ desc = {
+ 'NOT_NULL': (1 << 0, "Field can't be NULL"),
+ 'PRI_KEY': (1 << 1, "Field is part of a primary key"),
+ 'UNIQUE_KEY': (1 << 2, "Field is part of a unique key"),
+ 'MULTIPLE_KEY': (1 << 3, "Field is part of a key"),
+ 'BLOB': (1 << 4, "Field is a blob"),
+ 'UNSIGNED': (1 << 5, "Field is unsigned"),
+ 'ZEROFILL': (1 << 6, "Field is zerofill"),
+ 'BINARY': (1 << 7, "Field is binary "),
+ 'ENUM': (1 << 8, "field is an enum"),
+ 'AUTO_INCREMENT': (1 << 9, "field is a autoincrement field"),
+ 'TIMESTAMP': (1 << 10, "Field is a timestamp"),
+ 'SET': (1 << 11, "field is a set"),
+ 'NO_DEFAULT_VALUE': (1 << 12, "Field doesn't have default value"),
+ 'ON_UPDATE_NOW': (1 << 13, "Field is set to NOW on UPDATE"),
+ 'NUM': (1 << 14, "Field is num (for clients)"),
+
+ 'PART_KEY': (1 << 15, "Intern; Part of some key"),
+ 'GROUP': (1 << 14, "Intern: Group field"), # Same as NUM
+ 'UNIQUE': (1 << 16, "Intern: Used by sql_yacc"),
+ 'BINCMP': (1 << 17, "Intern: Used by sql_yacc"),
+ 'GET_FIXED_FIELDS': (1 << 18, "Used to get fields in item tree"),
+ 'FIELD_IN_PART_FUNC': (1 << 19, "Field part of partition func"),
+ 'FIELD_IN_ADD_INDEX': (1 << 20, "Intern: Field used in ADD INDEX"),
+ 'FIELD_IS_RENAMED': (1 << 21, "Intern: Field is being renamed"),
+ }
+
+
+class ServerCmd(_Constants):
+ """MySQL Server Commands
+ """
+ _prefix = 'COM_'
+ SLEEP = 0
+ QUIT = 1
+ INIT_DB = 2
+ QUERY = 3
+ FIELD_LIST = 4
+ CREATE_DB = 5
+ DROP_DB = 6
+ REFRESH = 7
+ SHUTDOWN = 8
+ STATISTICS = 9
+ PROCESS_INFO = 10
+ CONNECT = 11
+ PROCESS_KILL = 12
+ DEBUG = 13
+ PING = 14
+ TIME = 15
+ DELAYED_INSERT = 16
+ CHANGE_USER = 17
+ BINLOG_DUMP = 18
+ TABLE_DUMP = 19
+ CONNECT_OUT = 20
+ REGISTER_REPLICA = 21
+ STMT_PREPARE = 22
+ STMT_EXECUTE = 23
+ STMT_SEND_LONG_DATA = 24
+ STMT_CLOSE = 25
+ STMT_RESET = 26
+ SET_OPTION = 27
+ STMT_FETCH = 28
+ DAEMON = 29
+ BINLOG_DUMP_GTID = 30
+ RESET_CONNECTION = 31
+
+ desc = {
+ 'SLEEP': (0, 'SLEEP'),
+ 'QUIT': (1, 'QUIT'),
+ 'INIT_DB': (2, 'INIT_DB'),
+ 'QUERY': (3, 'QUERY'),
+ 'FIELD_LIST': (4, 'FIELD_LIST'),
+ 'CREATE_DB': (5, 'CREATE_DB'),
+ 'DROP_DB': (6, 'DROP_DB'),
+ 'REFRESH': (7, 'REFRESH'),
+ 'SHUTDOWN': (8, 'SHUTDOWN'),
+ 'STATISTICS': (9, 'STATISTICS'),
+ 'PROCESS_INFO': (10, 'PROCESS_INFO'),
+ 'CONNECT': (11, 'CONNECT'),
+ 'PROCESS_KILL': (12, 'PROCESS_KILL'),
+ 'DEBUG': (13, 'DEBUG'),
+ 'PING': (14, 'PING'),
+ 'TIME': (15, 'TIME'),
+ 'DELAYED_INSERT': (16, 'DELAYED_INSERT'),
+ 'CHANGE_USER': (17, 'CHANGE_USER'),
+ 'BINLOG_DUMP': (18, 'BINLOG_DUMP'),
+ 'TABLE_DUMP': (19, 'TABLE_DUMP'),
+ 'CONNECT_OUT': (20, 'CONNECT_OUT'),
+ 'REGISTER_REPLICA': (21, 'REGISTER_REPLICA'),
+ 'STMT_PREPARE': (22, 'STMT_PREPARE'),
+ 'STMT_EXECUTE': (23, 'STMT_EXECUTE'),
+ 'STMT_SEND_LONG_DATA': (24, 'STMT_SEND_LONG_DATA'),
+ 'STMT_CLOSE': (25, 'STMT_CLOSE'),
+ 'STMT_RESET': (26, 'STMT_RESET'),
+ 'SET_OPTION': (27, 'SET_OPTION'),
+ 'STMT_FETCH': (28, 'STMT_FETCH'),
+ 'DAEMON': (29, 'DAEMON'),
+ 'BINLOG_DUMP_GTID': (30, 'BINLOG_DUMP_GTID'),
+ 'RESET_CONNECTION': (31, 'RESET_CONNECTION'),
+ }
+
+
+class ClientFlag(_Flags):
+ """MySQL Client Flags
+
+ Client options as found in the MySQL sources mysql-src/include/mysql_com.h
+ """
+ LONG_PASSWD = 1 << 0
+ FOUND_ROWS = 1 << 1
+ LONG_FLAG = 1 << 2
+ CONNECT_WITH_DB = 1 << 3
+ NO_SCHEMA = 1 << 4
+ COMPRESS = 1 << 5
+ ODBC = 1 << 6
+ LOCAL_FILES = 1 << 7
+ IGNORE_SPACE = 1 << 8
+ PROTOCOL_41 = 1 << 9
+ INTERACTIVE = 1 << 10
+ SSL = 1 << 11
+ IGNORE_SIGPIPE = 1 << 12
+ TRANSACTIONS = 1 << 13
+ RESERVED = 1 << 14
+ SECURE_CONNECTION = 1 << 15
+ MULTI_STATEMENTS = 1 << 16
+ MULTI_RESULTS = 1 << 17
+ PS_MULTI_RESULTS = 1 << 18
+ PLUGIN_AUTH = 1 << 19
+ CONNECT_ARGS = 1 << 20
+ PLUGIN_AUTH_LENENC_CLIENT_DATA = 1 << 21
+ CAN_HANDLE_EXPIRED_PASSWORDS = 1 << 22
+ SESION_TRACK = 1 << 23
+ DEPRECATE_EOF = 1 << 24
+ CLIENT_QUERY_ATTRIBUTES = 1 << 27
+ SSL_VERIFY_SERVER_CERT = 1 << 30
+ REMEMBER_OPTIONS = 1 << 31
+ MULTI_FACTOR_AUTHENTICATION = 1 << 28
+
+ desc = {
+ 'LONG_PASSWD': (1 << 0, 'New more secure passwords'),
+ 'FOUND_ROWS': (1 << 1, 'Found instead of affected rows'),
+ 'LONG_FLAG': (1 << 2, 'Get all column flags'),
+ 'CONNECT_WITH_DB': (1 << 3, 'One can specify db on connect'),
+ 'NO_SCHEMA': (1 << 4, "Don't allow database.table.column"),
+ 'COMPRESS': (1 << 5, 'Can use compression protocol'),
+ 'ODBC': (1 << 6, 'ODBC client'),
+ 'LOCAL_FILES': (1 << 7, 'Can use LOAD DATA LOCAL'),
+ 'IGNORE_SPACE': (1 << 8, "Ignore spaces before ''"),
+ 'PROTOCOL_41': (1 << 9, 'New 4.1 protocol'),
+ 'INTERACTIVE': (1 << 10, 'This is an interactive client'),
+ 'SSL': (1 << 11, 'Switch to SSL after handshake'),
+ 'IGNORE_SIGPIPE': (1 << 12, 'IGNORE sigpipes'),
+ 'TRANSACTIONS': (1 << 13, 'Client knows about transactions'),
+ 'RESERVED': (1 << 14, 'Old flag for 4.1 protocol'),
+ 'SECURE_CONNECTION': (1 << 15, 'New 4.1 authentication'),
+ 'MULTI_STATEMENTS': (1 << 16, 'Enable/disable multi-stmt support'),
+ 'MULTI_RESULTS': (1 << 17, 'Enable/disable multi-results'),
+ 'PS_MULTI_RESULTS': (1 << 18, 'Multi-results in PS-protocol'),
+ 'PLUGIN_AUTH': (1 << 19, 'Client supports plugin authentication'),
+ 'CONNECT_ARGS': (1 << 20, 'Client supports connection attributes'),
+ 'PLUGIN_AUTH_LENENC_CLIENT_DATA': (1 << 21,
+ 'Enable authentication response packet to be larger than 255 bytes'),
+ 'CAN_HANDLE_EXPIRED_PASSWORDS': (1 << 22, "Don't close the connection for a connection with expired password"),
+ 'SESION_TRACK': (1 << 23, 'Capable of handling server state change information'),
+ 'DEPRECATE_EOF': (1 << 24, 'Client no longer needs EOF packet'),
+ 'CLIENT_QUERY_ATTRIBUTES': (1 << 27, 'Support optional extension for query parameters'),
+ 'SSL_VERIFY_SERVER_CERT': (1 << 30, ''),
+ 'REMEMBER_OPTIONS': (1 << 31, ''),
+ }
+
+ default = [
+ LONG_PASSWD,
+ LONG_FLAG,
+ CONNECT_WITH_DB,
+ PROTOCOL_41,
+ TRANSACTIONS,
+ SECURE_CONNECTION,
+ MULTI_STATEMENTS,
+ MULTI_RESULTS,
+ CONNECT_ARGS,
+ ]
+
+ @classmethod
+ def get_default(cls):
+ """Get the default client options set
+
+ Returns a flag with all the default client options set"""
+ flags = 0
+ for option in cls.default:
+ flags |= option
+ return flags
+
+
+class ServerFlag(_Flags):
+ """MySQL Server Flags
+
+ Server flags as found in the MySQL sources mysql-src/include/mysql_com.h
+ """
+ _prefix = 'SERVER_'
+ STATUS_IN_TRANS = 1 << 0
+ STATUS_AUTOCOMMIT = 1 << 1
+ MORE_RESULTS_EXISTS = 1 << 3
+ QUERY_NO_GOOD_INDEX_USED = 1 << 4
+ QUERY_NO_INDEX_USED = 1 << 5
+ STATUS_CURSOR_EXISTS = 1 << 6
+ STATUS_LAST_ROW_SENT = 1 << 7
+ STATUS_DB_DROPPED = 1 << 8
+ STATUS_NO_BACKSLASH_ESCAPES = 1 << 9
+ SERVER_STATUS_METADATA_CHANGED = 1 << 10
+ SERVER_QUERY_WAS_SLOW = 1 << 11
+ SERVER_PS_OUT_PARAMS = 1 << 12
+ SERVER_STATUS_IN_TRANS_READONLY = 1 << 13
+ SERVER_SESSION_STATE_CHANGED = 1 << 14
+
+ desc = {
+ 'SERVER_STATUS_IN_TRANS': (1 << 0,
+ 'Transaction has started'),
+ 'SERVER_STATUS_AUTOCOMMIT': (1 << 1,
+ 'Server in auto_commit mode'),
+ 'SERVER_MORE_RESULTS_EXISTS': (1 << 3,
+ 'Multi query - '
+ 'next query exists'),
+ 'SERVER_QUERY_NO_GOOD_INDEX_USED': (1 << 4, ''),
+ 'SERVER_QUERY_NO_INDEX_USED': (1 << 5, ''),
+ 'SERVER_STATUS_CURSOR_EXISTS': (1 << 6,
+ 'Set when server opened a read-only '
+ 'non-scrollable cursor for a query.'),
+ 'SERVER_STATUS_LAST_ROW_SENT': (1 << 7,
+ 'Set when a read-only cursor is '
+ 'exhausted'),
+ 'SERVER_STATUS_DB_DROPPED': (1 << 8, 'A database was dropped'),
+ 'SERVER_STATUS_NO_BACKSLASH_ESCAPES': (1 << 9, ''),
+ 'SERVER_STATUS_METADATA_CHANGED': (1024,
+ 'Set if after a prepared statement '
+ 'reprepare we discovered that the '
+ 'new statement returns a different '
+ 'number of result set columns.'),
+ 'SERVER_QUERY_WAS_SLOW': (2048, ''),
+ 'SERVER_PS_OUT_PARAMS': (4096,
+ 'To mark ResultSet containing output '
+ 'parameter values.'),
+ 'SERVER_STATUS_IN_TRANS_READONLY': (8192,
+ 'Set if multi-statement '
+ 'transaction is a read-only '
+ 'transaction.'),
+ 'SERVER_SESSION_STATE_CHANGED': (1 << 14,
+ 'Session state has changed on the '
+ 'server because of the execution of '
+ 'the last statement'),
+ }
+
+
+class RefreshOption_meta(type):
+ @property
+ def SLAVE(self):
+ return _obsolete_option("RefreshOption.SLAVE", "RefreshOption.REPLICA",
+ RefreshOption.REPLICA)
+
+@make_abc(RefreshOption_meta)
+class RefreshOption(_Constants):
+ """MySQL Refresh command options
+
+ Options used when sending the COM_REFRESH server command.
+ """
+ _prefix = 'REFRESH_'
+ GRANT = 1 << 0
+ LOG = 1 << 1
+ TABLES = 1 << 2
+ HOST = 1 << 3
+ STATUS = 1 << 4
+ THREADS = 1 << 5
+ REPLICA = 1 << 6
+
+ desc = {
+ 'GRANT': (1 << 0, 'Refresh grant tables'),
+ 'LOG': (1 << 1, 'Start on new log file'),
+ 'TABLES': (1 << 2, 'close all tables'),
+ 'HOST': (1 << 3, 'Flush host cache'),
+ 'STATUS': (1 << 4, 'Flush status variables'),
+ 'THREADS': (1 << 5, 'Flush thread cache'),
+ 'REPLICA': (1 << 6, 'Reset source info and restart replica thread'),
+ 'SLAVE': (1 << 6, 'Deprecated option; use REPLICA instead.'),
+ }
+
+
+class ShutdownType(_Constants):
+ """MySQL Shutdown types
+
+ Shutdown types used by the COM_SHUTDOWN server command.
+ """
+ _prefix = ''
+ SHUTDOWN_DEFAULT = 0
+ SHUTDOWN_WAIT_CONNECTIONS = 1
+ SHUTDOWN_WAIT_TRANSACTIONS = 2
+ SHUTDOWN_WAIT_UPDATES = 8
+ SHUTDOWN_WAIT_ALL_BUFFERS = 16
+ SHUTDOWN_WAIT_CRITICAL_BUFFERS = 17
+ KILL_QUERY = 254
+ KILL_CONNECTION = 255
+
+ desc = {
+ 'SHUTDOWN_DEFAULT': (
+ SHUTDOWN_DEFAULT,
+ "defaults to SHUTDOWN_WAIT_ALL_BUFFERS"),
+ 'SHUTDOWN_WAIT_CONNECTIONS': (
+ SHUTDOWN_WAIT_CONNECTIONS,
+ "wait for existing connections to finish"),
+ 'SHUTDOWN_WAIT_TRANSACTIONS': (
+ SHUTDOWN_WAIT_TRANSACTIONS,
+ "wait for existing trans to finish"),
+ 'SHUTDOWN_WAIT_UPDATES': (
+ SHUTDOWN_WAIT_UPDATES,
+ "wait for existing updates to finish"),
+ 'SHUTDOWN_WAIT_ALL_BUFFERS': (
+ SHUTDOWN_WAIT_ALL_BUFFERS,
+ "flush InnoDB and other storage engine buffers"),
+ 'SHUTDOWN_WAIT_CRITICAL_BUFFERS': (
+ SHUTDOWN_WAIT_CRITICAL_BUFFERS,
+ "don't flush InnoDB buffers, "
+ "flush other storage engines' buffers"),
+ 'KILL_QUERY': (
+ KILL_QUERY,
+ "(no description)"),
+ 'KILL_CONNECTION': (
+ KILL_CONNECTION,
+ "(no description)"),
+ }
+
+
+class CharacterSet(_Constants):
+ """MySQL supported character sets and collations
+
+ List of character sets with their collations supported by MySQL. This
+ maps to the character set we get from the server within the handshake
+ packet.
+
+ The list is hardcode so we avoid a database query when getting the
+ name of the used character set or collation.
+ """
+ desc = MYSQL_CHARACTER_SETS
+
+ # Multi-byte character sets which use 5c (backslash) in characters
+ slash_charsets = (1, 13, 28, 84, 87, 88)
+
+ @classmethod
+ def get_info(cls, setid):
+ """Retrieves character set information as tuple using an ID
+
+ Retrieves character set and collation information based on the
+ given MySQL ID.
+
+ Raises ProgrammingError when character set is not supported.
+
+ Returns a tuple.
+ """
+ try:
+ return cls.desc[setid][0:2]
+ except IndexError:
+ raise ProgrammingError(
+ "Character set '{0}' unsupported".format(setid))
+
+ @classmethod
+ def get_desc(cls, name):
+ """Retrieves character set information as string using an ID
+
+ Retrieves character set and collation information based on the
+ given MySQL ID.
+
+ Returns a tuple.
+ """
+ try:
+ return "%s/%s" % cls.get_info(name)
+ except:
+ raise
+
+ @classmethod
+ def get_default_collation(cls, charset):
+ """Retrieves the default collation for given character set
+
+ Raises ProgrammingError when character set is not supported.
+
+ Returns list (collation, charset, index)
+ """
+ if isinstance(charset, int):
+ try:
+ info = cls.desc[charset]
+ return info[1], info[0], charset
+ except:
+ ProgrammingError("Character set ID '%s' unsupported." % (
+ charset))
+
+ for cid, info in enumerate(cls.desc):
+ if info is None:
+ continue
+ if info[0] == charset and info[2] is True:
+ return info[1], info[0], cid
+
+ raise ProgrammingError("Character set '%s' unsupported." % (charset))
+
+ @classmethod
+ def get_charset_info(cls, charset=None, collation=None):
+ """Get character set information using charset name and/or collation
+
+ Retrieves character set and collation information given character
+ set name and/or a collation name.
+ If charset is an integer, it will look up the character set based
+ on the MySQL's ID.
+ For example:
+ get_charset_info('utf8',None)
+ get_charset_info(collation='utf8_general_ci')
+ get_charset_info(47)
+
+ Raises ProgrammingError when character set is not supported.
+
+ Returns a tuple with (id, characterset name, collation)
+ """
+ if isinstance(charset, int):
+ try:
+ info = cls.desc[charset]
+ return (charset, info[0], info[1])
+ except IndexError:
+ ProgrammingError("Character set ID {0} unknown.".format(
+ charset))
+
+ if charset is not None and collation is None:
+ info = cls.get_default_collation(charset)
+ return (info[2], info[1], info[0])
+ elif charset is None and collation is not None:
+ for cid, info in enumerate(cls.desc):
+ if info is None:
+ continue
+ if collation == info[1]:
+ return (cid, info[0], info[1])
+ raise ProgrammingError("Collation '{0}' unknown.".format(collation))
+ else:
+ for cid, info in enumerate(cls.desc):
+ if info is None:
+ continue
+ if info[0] == charset and info[1] == collation:
+ return (cid, info[0], info[1])
+ _ = cls.get_default_collation(charset)
+ raise ProgrammingError("Collation '{0}' unknown.".format(collation))
+
+ @classmethod
+ def get_supported(cls):
+ """Retrieves a list with names of all supproted character sets
+
+ Returns a tuple.
+ """
+ res = []
+ for info in cls.desc:
+ if info and info[0] not in res:
+ res.append(info[0])
+ return tuple(res)
+
+
+class SQLMode(_Constants):
+ """MySQL SQL Modes
+
+ The numeric values of SQL Modes are not interesting, only the names
+ are used when setting the SQL_MODE system variable using the MySQL
+ SET command.
+
+ See http://dev.mysql.com/doc/refman/5.6/en/server-sql-mode.html
+ """
+ _prefix = 'MODE_'
+ REAL_AS_FLOAT = 'REAL_AS_FLOAT'
+ PIPES_AS_CONCAT = 'PIPES_AS_CONCAT'
+ ANSI_QUOTES = 'ANSI_QUOTES'
+ IGNORE_SPACE = 'IGNORE_SPACE'
+ NOT_USED = 'NOT_USED'
+ ONLY_FULL_GROUP_BY = 'ONLY_FULL_GROUP_BY'
+ NO_UNSIGNED_SUBTRACTION = 'NO_UNSIGNED_SUBTRACTION'
+ NO_DIR_IN_CREATE = 'NO_DIR_IN_CREATE'
+ POSTGRESQL = 'POSTGRESQL'
+ ORACLE = 'ORACLE'
+ MSSQL = 'MSSQL'
+ DB2 = 'DB2'
+ MAXDB = 'MAXDB'
+ NO_KEY_OPTIONS = 'NO_KEY_OPTIONS'
+ NO_TABLE_OPTIONS = 'NO_TABLE_OPTIONS'
+ NO_FIELD_OPTIONS = 'NO_FIELD_OPTIONS'
+ MYSQL323 = 'MYSQL323'
+ MYSQL40 = 'MYSQL40'
+ ANSI = 'ANSI'
+ NO_AUTO_VALUE_ON_ZERO = 'NO_AUTO_VALUE_ON_ZERO'
+ NO_BACKSLASH_ESCAPES = 'NO_BACKSLASH_ESCAPES'
+ STRICT_TRANS_TABLES = 'STRICT_TRANS_TABLES'
+ STRICT_ALL_TABLES = 'STRICT_ALL_TABLES'
+ NO_ZERO_IN_DATE = 'NO_ZERO_IN_DATE'
+ NO_ZERO_DATE = 'NO_ZERO_DATE'
+ INVALID_DATES = 'INVALID_DATES'
+ ERROR_FOR_DIVISION_BY_ZERO = 'ERROR_FOR_DIVISION_BY_ZERO'
+ TRADITIONAL = 'TRADITIONAL'
+ NO_AUTO_CREATE_USER = 'NO_AUTO_CREATE_USER'
+ HIGH_NOT_PRECEDENCE = 'HIGH_NOT_PRECEDENCE'
+ NO_ENGINE_SUBSTITUTION = 'NO_ENGINE_SUBSTITUTION'
+ PAD_CHAR_TO_FULL_LENGTH = 'PAD_CHAR_TO_FULL_LENGTH'
+
+ @classmethod
+ def get_desc(cls, name):
+ raise NotImplementedError
+
+ @classmethod
+ def get_info(cls, setid):
+ raise NotImplementedError
+
+ @classmethod
+ def get_full_info(cls):
+ """Returns a sequence of all available SQL Modes
+
+ This class method returns a tuple containing all SQL Mode names. The
+ names will be alphabetically sorted.
+
+ Returns a tuple.
+ """
+ res = []
+ for key in vars(cls).keys():
+ if not key.startswith('_') \
+ and not hasattr(getattr(cls, key), '__call__'):
+ res.append(key)
+ return tuple(sorted(res))
+
+CONN_ATTRS_DN = ["_pid", "_platform", "_source_host", "_client_name",
+ "_client_license", "_client_version", "_os", "_connector_name",
+ "_connector_license", "_connector_version"]
+
+# TLS v1.0 cipher suites IANI to OpenSSL name translation
+TLSV1_CIPHER_SUITES = {
+ "TLS_RSA_WITH_NULL_MD5": "NULL-MD5",
+ "TLS_RSA_WITH_NULL_SHA": "NULL-SHA",
+ "TLS_RSA_WITH_RC4_128_MD5": "RC4-MD5",
+ "TLS_RSA_WITH_RC4_128_SHA": "RC4-SHA",
+ "TLS_RSA_WITH_IDEA_CBC_SHA": "IDEA-CBC-SHA",
+ "TLS_RSA_WITH_3DES_EDE_CBC_SHA": "DES-CBC3-SHA",
+
+ "TLS_DH_DSS_WITH_3DES_EDE_CBC_SHA": "Not implemented.",
+ "TLS_DH_RSA_WITH_3DES_EDE_CBC_SHA": "Not implemented.",
+ "TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA": "DHE-DSS-DES-CBC3-SHA",
+ "TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA": "DHE-RSA-DES-CBC3-SHA",
+
+ "TLS_DH_anon_WITH_RC4_128_MD5": "ADH-RC4-MD5",
+ "TLS_DH_anon_WITH_3DES_EDE_CBC_SHA": "ADH-DES-CBC3-SHA",
+
+ # AES cipher suites from RFC3268, extending TLS v1.0
+ "TLS_RSA_WITH_AES_128_CBC_SHA": "AES128-SHA",
+ "TLS_RSA_WITH_AES_256_CBC_SHA": "AES256-SHA",
+
+ "TLS_DH_DSS_WITH_AES_128_CBC_SHA": "DH-DSS-AES128-SHA",
+ "TLS_DH_DSS_WITH_AES_256_CBC_SHA": "DH-DSS-AES256-SHA",
+ "TLS_DH_RSA_WITH_AES_128_CBC_SHA": "DH-RSA-AES128-SHA",
+ "TLS_DH_RSA_WITH_AES_256_CBC_SHA": "DH-RSA-AES256-SHA",
+
+ "TLS_DHE_DSS_WITH_AES_128_CBC_SHA": "DHE-DSS-AES128-SHA",
+ "TLS_DHE_DSS_WITH_AES_256_CBC_SHA": "DHE-DSS-AES256-SHA",
+ "TLS_DHE_RSA_WITH_AES_128_CBC_SHA": "DHE-RSA-AES128-SHA",
+ "TLS_DHE_RSA_WITH_AES_256_CBC_SHA": "DHE-RSA-AES256-SHA",
+
+ "TLS_DH_anon_WITH_AES_128_CBC_SHA": "ADH-AES128-SHA",
+ "TLS_DH_anon_WITH_AES_256_CBC_SHA": "ADH-AES256-SHA",
+
+ # Camellia cipher suites from RFC4132, extending TLS v1.0
+ "TLS_RSA_WITH_CAMELLIA_128_CBC_SHA": "CAMELLIA128-SHA",
+ "TLS_RSA_WITH_CAMELLIA_256_CBC_SHA": "CAMELLIA256-SHA",
+
+ "TLS_DH_DSS_WITH_CAMELLIA_128_CBC_SHA": "DH-DSS-CAMELLIA128-SHA",
+ "TLS_DH_DSS_WITH_CAMELLIA_256_CBC_SHA": "DH-DSS-CAMELLIA256-SHA",
+ "TLS_DH_RSA_WITH_CAMELLIA_128_CBC_SHA": "DH-RSA-CAMELLIA128-SHA",
+ "TLS_DH_RSA_WITH_CAMELLIA_256_CBC_SHA": "DH-RSA-CAMELLIA256-SHA",
+
+ "TLS_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA": "DHE-DSS-CAMELLIA128-SHA",
+ "TLS_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA": "DHE-DSS-CAMELLIA256-SHA",
+ "TLS_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA": "DHE-RSA-CAMELLIA128-SHA",
+ "TLS_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA": "DHE-RSA-CAMELLIA256-SHA",
+
+ "TLS_DH_anon_WITH_CAMELLIA_128_CBC_SHA": "ADH-CAMELLIA128-SHA",
+ "TLS_DH_anon_WITH_CAMELLIA_256_CBC_SHA": "ADH-CAMELLIA256-SHA",
+
+ # SEED cipher suites from RFC4162, extending TLS v1.0
+ "TLS_RSA_WITH_SEED_CBC_SHA": "SEED-SHA",
+
+ "TLS_DH_DSS_WITH_SEED_CBC_SHA": "DH-DSS-SEED-SHA",
+ "TLS_DH_RSA_WITH_SEED_CBC_SHA": "DH-RSA-SEED-SHA",
+
+ "TLS_DHE_DSS_WITH_SEED_CBC_SHA": "DHE-DSS-SEED-SHA",
+ "TLS_DHE_RSA_WITH_SEED_CBC_SHA": "DHE-RSA-SEED-SHA",
+
+ "TLS_DH_anon_WITH_SEED_CBC_SHA": "ADH-SEED-SHA",
+
+ # GOST cipher suites from draft-chudov-cryptopro-cptls, extending TLS v1.0
+ "TLS_GOSTR341094_WITH_28147_CNT_IMIT": "GOST94-GOST89-GOST89",
+ "TLS_GOSTR341001_WITH_28147_CNT_IMIT": "GOST2001-GOST89-GOST89",
+ "TLS_GOSTR341094_WITH_NULL_GOSTR3411": "GOST94-NULL-GOST94",
+ "TLS_GOSTR341001_WITH_NULL_GOSTR3411": "GOST2001-NULL-GOST94"}
+
+# TLS v1.1 cipher suites IANI to OpenSSL name translation
+TLSV1_1_CIPHER_SUITES = TLSV1_CIPHER_SUITES
+
+# TLS v1.2 cipher suites IANI to OpenSSL name translation
+TLSV1_2_CIPHER_SUITES = {
+ "TLS_RSA_WITH_NULL_SHA256": "NULL-SHA256",
+
+ "TLS_RSA_WITH_AES_128_CBC_SHA256": "AES128-SHA256",
+ "TLS_RSA_WITH_AES_256_CBC_SHA256": "AES256-SHA256",
+ "TLS_RSA_WITH_AES_128_GCM_SHA256": "AES128-GCM-SHA256",
+ "TLS_RSA_WITH_AES_256_GCM_SHA384": "AES256-GCM-SHA384",
+
+ "TLS_DH_RSA_WITH_AES_128_CBC_SHA256": "DH-RSA-AES128-SHA256",
+ "TLS_DH_RSA_WITH_AES_256_CBC_SHA256": "DH-RSA-AES256-SHA256",
+ "TLS_DH_RSA_WITH_AES_128_GCM_SHA256": "DH-RSA-AES128-GCM-SHA256",
+ "TLS_DH_RSA_WITH_AES_256_GCM_SHA384": "DH-RSA-AES256-GCM-SHA384",
+
+ "TLS_DH_DSS_WITH_AES_128_CBC_SHA256": "DH-DSS-AES128-SHA256",
+ "TLS_DH_DSS_WITH_AES_256_CBC_SHA256": "DH-DSS-AES256-SHA256",
+ "TLS_DH_DSS_WITH_AES_128_GCM_SHA256": "DH-DSS-AES128-GCM-SHA256",
+ "TLS_DH_DSS_WITH_AES_256_GCM_SHA384": "DH-DSS-AES256-GCM-SHA384",
+
+ "TLS_DHE_RSA_WITH_AES_128_CBC_SHA256": "DHE-RSA-AES128-SHA256",
+ "TLS_DHE_RSA_WITH_AES_256_CBC_SHA256": "DHE-RSA-AES256-SHA256",
+ "TLS_DHE_RSA_WITH_AES_128_GCM_SHA256": "DHE-RSA-AES128-GCM-SHA256",
+ "TLS_DHE_RSA_WITH_AES_256_GCM_SHA384": "DHE-RSA-AES256-GCM-SHA384",
+
+ "TLS_DHE_DSS_WITH_AES_128_CBC_SHA256": "DHE-DSS-AES128-SHA256",
+ "TLS_DHE_DSS_WITH_AES_256_CBC_SHA256": "DHE-DSS-AES256-SHA256",
+ "TLS_DHE_DSS_WITH_AES_128_GCM_SHA256": "DHE-DSS-AES128-GCM-SHA256",
+ "TLS_DHE_DSS_WITH_AES_256_GCM_SHA384": "DHE-DSS-AES256-GCM-SHA384",
+
+ "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256": "ECDHE-RSA-AES128-SHA256",
+ "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384": "ECDHE-RSA-AES256-SHA384",
+ "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256": "ECDHE-RSA-AES128-GCM-SHA256",
+ "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384": "ECDHE-RSA-AES256-GCM-SHA384",
+
+ "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256": "ECDHE-ECDSA-AES128-SHA256",
+ "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384": "ECDHE-ECDSA-AES256-SHA384",
+ "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256": "ECDHE-ECDSA-AES128-GCM-SHA256",
+ "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384": "ECDHE-ECDSA-AES256-GCM-SHA384",
+
+ "TLS_DH_anon_WITH_AES_128_CBC_SHA256": "ADH-AES128-SHA256",
+ "TLS_DH_anon_WITH_AES_256_CBC_SHA256": "ADH-AES256-SHA256",
+ "TLS_DH_anon_WITH_AES_128_GCM_SHA256": "ADH-AES128-GCM-SHA256",
+ "TLS_DH_anon_WITH_AES_256_GCM_SHA384": "ADH-AES256-GCM-SHA384",
+
+ "RSA_WITH_AES_128_CCM": "AES128-CCM",
+ "RSA_WITH_AES_256_CCM": "AES256-CCM",
+ "DHE_RSA_WITH_AES_128_CCM": "DHE-RSA-AES128-CCM",
+ "DHE_RSA_WITH_AES_256_CCM": "DHE-RSA-AES256-CCM",
+ "RSA_WITH_AES_128_CCM_8": "AES128-CCM8",
+ "RSA_WITH_AES_256_CCM_8": "AES256-CCM8",
+ "DHE_RSA_WITH_AES_128_CCM_8": "DHE-RSA-AES128-CCM8",
+ "DHE_RSA_WITH_AES_256_CCM_8": "DHE-RSA-AES256-CCM8",
+ "ECDHE_ECDSA_WITH_AES_128_CCM": "ECDHE-ECDSA-AES128-CCM",
+ "ECDHE_ECDSA_WITH_AES_256_CCM": "ECDHE-ECDSA-AES256-CCM",
+ "ECDHE_ECDSA_WITH_AES_128_CCM_8": "ECDHE-ECDSA-AES128-CCM8",
+ "ECDHE_ECDSA_WITH_AES_256_CCM_8": "ECDHE-ECDSA-AES256-CCM8",
+
+ # ARIA cipher suites from RFC6209, extending TLS v1.2
+ "TLS_RSA_WITH_ARIA_128_GCM_SHA256": "ARIA128-GCM-SHA256",
+ "TLS_RSA_WITH_ARIA_256_GCM_SHA384": "ARIA256-GCM-SHA384",
+ "TLS_DHE_RSA_WITH_ARIA_128_GCM_SHA256": "DHE-RSA-ARIA128-GCM-SHA256",
+ "TLS_DHE_RSA_WITH_ARIA_256_GCM_SHA384": "DHE-RSA-ARIA256-GCM-SHA384",
+ "TLS_DHE_DSS_WITH_ARIA_128_GCM_SHA256": "DHE-DSS-ARIA128-GCM-SHA256",
+ "TLS_DHE_DSS_WITH_ARIA_256_GCM_SHA384": "DHE-DSS-ARIA256-GCM-SHA384",
+ "TLS_ECDHE_ECDSA_WITH_ARIA_128_GCM_SHA256": "ECDHE-ECDSA-ARIA128-GCM-SHA256",
+ "TLS_ECDHE_ECDSA_WITH_ARIA_256_GCM_SHA384": "ECDHE-ECDSA-ARIA256-GCM-SHA384",
+ "TLS_ECDHE_RSA_WITH_ARIA_128_GCM_SHA256": "ECDHE-ARIA128-GCM-SHA256",
+ "TLS_ECDHE_RSA_WITH_ARIA_256_GCM_SHA384": "ECDHE-ARIA256-GCM-SHA384",
+ "TLS_PSK_WITH_ARIA_128_GCM_SHA256": "PSK-ARIA128-GCM-SHA256",
+ "TLS_PSK_WITH_ARIA_256_GCM_SHA384": "PSK-ARIA256-GCM-SHA384",
+ "TLS_DHE_PSK_WITH_ARIA_128_GCM_SHA256": "DHE-PSK-ARIA128-GCM-SHA256",
+ "TLS_DHE_PSK_WITH_ARIA_256_GCM_SHA384": "DHE-PSK-ARIA256-GCM-SHA384",
+ "TLS_RSA_PSK_WITH_ARIA_128_GCM_SHA256": "RSA-PSK-ARIA128-GCM-SHA256",
+ "TLS_RSA_PSK_WITH_ARIA_256_GCM_SHA384": "RSA-PSK-ARIA256-GCM-SHA384",
+
+ # Camellia HMAC-Based cipher suites from RFC6367, extending TLS v1.2
+ "TLS_ECDHE_ECDSA_WITH_CAMELLIA_128_CBC_SHA256": "ECDHE-ECDSA-CAMELLIA128-SHA256",
+ "TLS_ECDHE_ECDSA_WITH_CAMELLIA_256_CBC_SHA384": "ECDHE-ECDSA-CAMELLIA256-SHA384",
+ "TLS_ECDHE_RSA_WITH_CAMELLIA_128_CBC_SHA256": "ECDHE-RSA-CAMELLIA128-SHA256",
+ "TLS_ECDHE_RSA_WITH_CAMELLIA_256_CBC_SHA384": "ECDHE-RSA-CAMELLIA256-SHA384",
+
+ # Pre-shared keying (PSK) cipher suites",
+ "PSK_WITH_NULL_SHA": "PSK-NULL-SHA",
+ "DHE_PSK_WITH_NULL_SHA": "DHE-PSK-NULL-SHA",
+ "RSA_PSK_WITH_NULL_SHA": "RSA-PSK-NULL-SHA",
+
+ "PSK_WITH_RC4_128_SHA": "PSK-RC4-SHA",
+ "PSK_WITH_3DES_EDE_CBC_SHA": "PSK-3DES-EDE-CBC-SHA",
+ "PSK_WITH_AES_128_CBC_SHA": "PSK-AES128-CBC-SHA",
+ "PSK_WITH_AES_256_CBC_SHA": "PSK-AES256-CBC-SHA",
+
+ "DHE_PSK_WITH_RC4_128_SHA": "DHE-PSK-RC4-SHA",
+ "DHE_PSK_WITH_3DES_EDE_CBC_SHA": "DHE-PSK-3DES-EDE-CBC-SHA",
+ "DHE_PSK_WITH_AES_128_CBC_SHA": "DHE-PSK-AES128-CBC-SHA",
+ "DHE_PSK_WITH_AES_256_CBC_SHA": "DHE-PSK-AES256-CBC-SHA",
+
+ "RSA_PSK_WITH_RC4_128_SHA": "RSA-PSK-RC4-SHA",
+ "RSA_PSK_WITH_3DES_EDE_CBC_SHA": "RSA-PSK-3DES-EDE-CBC-SHA",
+ "RSA_PSK_WITH_AES_128_CBC_SHA": "RSA-PSK-AES128-CBC-SHA",
+ "RSA_PSK_WITH_AES_256_CBC_SHA": "RSA-PSK-AES256-CBC-SHA",
+
+ "PSK_WITH_AES_128_GCM_SHA256": "PSK-AES128-GCM-SHA256",
+ "PSK_WITH_AES_256_GCM_SHA384": "PSK-AES256-GCM-SHA384",
+ "DHE_PSK_WITH_AES_128_GCM_SHA256": "DHE-PSK-AES128-GCM-SHA256",
+ "DHE_PSK_WITH_AES_256_GCM_SHA384": "DHE-PSK-AES256-GCM-SHA384",
+ "RSA_PSK_WITH_AES_128_GCM_SHA256": "RSA-PSK-AES128-GCM-SHA256",
+ "RSA_PSK_WITH_AES_256_GCM_SHA384": "RSA-PSK-AES256-GCM-SHA384",
+
+ "PSK_WITH_AES_128_CBC_SHA256": "PSK-AES128-CBC-SHA256",
+ "PSK_WITH_AES_256_CBC_SHA384": "PSK-AES256-CBC-SHA384",
+ "PSK_WITH_NULL_SHA256": "PSK-NULL-SHA256",
+ "PSK_WITH_NULL_SHA384": "PSK-NULL-SHA384",
+ "DHE_PSK_WITH_AES_128_CBC_SHA256": "DHE-PSK-AES128-CBC-SHA256",
+ "DHE_PSK_WITH_AES_256_CBC_SHA384": "DHE-PSK-AES256-CBC-SHA384",
+ "DHE_PSK_WITH_NULL_SHA256": "DHE-PSK-NULL-SHA256",
+ "DHE_PSK_WITH_NULL_SHA384": "DHE-PSK-NULL-SHA384",
+ "RSA_PSK_WITH_AES_128_CBC_SHA256": "RSA-PSK-AES128-CBC-SHA256",
+ "RSA_PSK_WITH_AES_256_CBC_SHA384": "RSA-PSK-AES256-CBC-SHA384",
+ "RSA_PSK_WITH_NULL_SHA256": "RSA-PSK-NULL-SHA256",
+ "RSA_PSK_WITH_NULL_SHA384": "RSA-PSK-NULL-SHA384",
+
+ "ECDHE_PSK_WITH_RC4_128_SHA": "ECDHE-PSK-RC4-SHA",
+ "ECDHE_PSK_WITH_3DES_EDE_CBC_SHA": "ECDHE-PSK-3DES-EDE-CBC-SHA",
+ "ECDHE_PSK_WITH_AES_128_CBC_SHA": "ECDHE-PSK-AES128-CBC-SHA",
+ "ECDHE_PSK_WITH_AES_256_CBC_SHA": "ECDHE-PSK-AES256-CBC-SHA",
+ "ECDHE_PSK_WITH_AES_128_CBC_SHA256": "ECDHE-PSK-AES128-CBC-SHA256",
+ "ECDHE_PSK_WITH_AES_256_CBC_SHA384": "ECDHE-PSK-AES256-CBC-SHA384",
+ "ECDHE_PSK_WITH_NULL_SHA": "ECDHE-PSK-NULL-SHA",
+ "ECDHE_PSK_WITH_NULL_SHA256": "ECDHE-PSK-NULL-SHA256",
+ "ECDHE_PSK_WITH_NULL_SHA384": "ECDHE-PSK-NULL-SHA384",
+
+ "PSK_WITH_CAMELLIA_128_CBC_SHA256": "PSK-CAMELLIA128-SHA256",
+ "PSK_WITH_CAMELLIA_256_CBC_SHA384": "PSK-CAMELLIA256-SHA384",
+
+ "DHE_PSK_WITH_CAMELLIA_128_CBC_SHA256": "DHE-PSK-CAMELLIA128-SHA256",
+ "DHE_PSK_WITH_CAMELLIA_256_CBC_SHA384": "DHE-PSK-CAMELLIA256-SHA384",
+
+ "RSA_PSK_WITH_CAMELLIA_128_CBC_SHA256": "RSA-PSK-CAMELLIA128-SHA256",
+ "RSA_PSK_WITH_CAMELLIA_256_CBC_SHA384": "RSA-PSK-CAMELLIA256-SHA384",
+
+ "ECDHE_PSK_WITH_CAMELLIA_128_CBC_SHA256": "ECDHE-PSK-CAMELLIA128-SHA256",
+ "ECDHE_PSK_WITH_CAMELLIA_256_CBC_SHA384": "ECDHE-PSK-CAMELLIA256-SHA384",
+
+ "PSK_WITH_AES_128_CCM": "PSK-AES128-CCM",
+ "PSK_WITH_AES_256_CCM": "PSK-AES256-CCM",
+ "DHE_PSK_WITH_AES_128_CCM": "DHE-PSK-AES128-CCM",
+ "DHE_PSK_WITH_AES_256_CCM": "DHE-PSK-AES256-CCM",
+ "PSK_WITH_AES_128_CCM_8": "PSK-AES128-CCM8",
+ "PSK_WITH_AES_256_CCM_8": "PSK-AES256-CCM8",
+ "DHE_PSK_WITH_AES_128_CCM_8": "DHE-PSK-AES128-CCM8",
+ "DHE_PSK_WITH_AES_256_CCM_8": "DHE-PSK-AES256-CCM8",
+
+ # ChaCha20-Poly1305 cipher suites, extending TLS v1.2
+ "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256": "ECDHE-RSA-CHACHA20-POLY1305",
+ "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256": "ECDHE-ECDSA-CHACHA20-POLY1305",
+ "TLS_DHE_RSA_WITH_CHACHA20_POLY1305_SHA256": "DHE-RSA-CHACHA20-POLY1305",
+ "TLS_PSK_WITH_CHACHA20_POLY1305_SHA256": "PSK-CHACHA20-POLY1305",
+ "TLS_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256": "ECDHE-PSK-CHACHA20-POLY1305",
+ "TLS_DHE_PSK_WITH_CHACHA20_POLY1305_SHA256": "DHE-PSK-CHACHA20-POLY1305",
+ "TLS_RSA_PSK_WITH_CHACHA20_POLY1305_SHA256": "RSA-PSK-CHACHA20-POLY1305"}
+
+# TLS v1.3 cipher suites IANI to OpenSSL name translation
+TLSV1_3_CIPHER_SUITES = {
+ "TLS_AES_128_GCM_SHA256": "TLS_AES_128_GCM_SHA256",
+ "TLS_AES_256_GCM_SHA384": "TLS_AES_256_GCM_SHA384",
+ "TLS_CHACHA20_POLY1305_SHA256": "TLS_CHACHA20_POLY1305_SHA256",
+ "TLS_AES_128_CCM_SHA256": "TLS_AES_128_CCM_SHA256",
+ "TLS_AES_128_CCM_8_SHA256": "TLS_AES_128_CCM_8_SHA256"}
+
+TLS_CIPHER_SUITES = {
+ "TLSv1": TLSV1_CIPHER_SUITES,
+ "TLSv1.1": TLSV1_1_CIPHER_SUITES,
+ "TLSv1.2": TLSV1_2_CIPHER_SUITES,
+ "TLSv1.3": TLSV1_3_CIPHER_SUITES}
+
+OPENSSL_CS_NAMES = {
+ "TLSv1": TLSV1_CIPHER_SUITES.values(),
+ "TLSv1.1": TLSV1_1_CIPHER_SUITES.values(),
+ "TLSv1.2": TLSV1_2_CIPHER_SUITES.values(),
+ "TLSv1.3": TLSV1_3_CIPHER_SUITES.values()}
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/conversion.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/conversion.py
new file mode 100644
index 0000000000000000000000000000000000000000..c131c511f8f5d48ad2dd587a664ee096d922cc22
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/conversion.py
@@ -0,0 +1,614 @@
+# Copyright (c) 2009, 2021, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Converting MySQL and Python types
+"""
+
+import datetime
+import struct
+import time
+from decimal import Decimal
+
+from .constants import FieldType, FieldFlag, CharacterSet
+from .utils import NUMERIC_TYPES
+from .custom_types import HexLiteral
+
+CONVERT_ERROR = "Could not convert '{value}' to python {pytype}"
+
+
+class MySQLConverterBase(object):
+ """Base class for conversion classes
+
+ All class dealing with converting to and from MySQL data types must
+ be a subclass of this class.
+ """
+
+ def __init__(self, charset='utf8', use_unicode=True, str_fallback=False):
+ self.python_types = None
+ self.mysql_types = None
+ self.charset = None
+ self.charset_id = 0
+ self.use_unicode = None
+ self.set_charset(charset)
+ self.use_unicode = use_unicode
+ self.str_fallback = str_fallback
+ self._cache_field_types = {}
+
+ def set_charset(self, charset):
+ """Set character set"""
+ if charset == 'utf8mb4':
+ charset = 'utf8'
+ if charset is not None:
+ self.charset = charset
+ else:
+ # default to utf8
+ self.charset = 'utf8'
+ self.charset_id = CharacterSet.get_charset_info(self.charset)[0]
+
+ def set_unicode(self, value=True):
+ """Set whether to use Unicode"""
+ self.use_unicode = value
+
+ def to_mysql(self, value):
+ """Convert Python data type to MySQL"""
+ type_name = value.__class__.__name__.lower()
+ try:
+ return getattr(self, "_{0}_to_mysql".format(type_name))(value)
+ except AttributeError:
+ return value
+
+ def to_python(self, vtype, value):
+ """Convert MySQL data type to Python"""
+
+ if (value == b'\x00' or value is None) and vtype[1] != FieldType.BIT:
+ # Don't go further when we hit a NULL value
+ return None
+
+ if not self._cache_field_types:
+ self._cache_field_types = {}
+ for name, info in FieldType.desc.items():
+ try:
+ self._cache_field_types[info[0]] = getattr(
+ self, '_{0}_to_python'.format(name))
+ except AttributeError:
+ # We ignore field types which has no method
+ pass
+
+ try:
+ return self._cache_field_types[vtype[1]](value, vtype)
+ except KeyError:
+ return value
+
+ def escape(self, value):
+ """Escape buffer for sending to MySQL"""
+ return value
+
+ def quote(self, buf):
+ """Quote buffer for sending to MySQL"""
+ return str(buf)
+
+
+class MySQLConverter(MySQLConverterBase):
+ """Default conversion class for MySQL Connector/Python.
+
+ o escape method: for escaping values send to MySQL
+ o quoting method: for quoting values send to MySQL in statements
+ o conversion mapping: maps Python and MySQL data types to
+ function for converting them.
+
+ Whenever one needs to convert values differently, a converter_class
+ argument can be given while instantiating a new connection like
+ cnx.connect(converter_class=CustomMySQLConverterClass).
+
+ """
+
+ def __init__(self, charset=None, use_unicode=True, str_fallback=False):
+ MySQLConverterBase.__init__(self, charset, use_unicode, str_fallback)
+ self._cache_field_types = {}
+
+ def escape(self, value):
+ """
+ Escapes special characters as they are expected to by when MySQL
+ receives them.
+ As found in MySQL source mysys/charset.c
+
+ Returns the value if not a string, or the escaped string.
+ """
+ if value is None:
+ return value
+ elif isinstance(value, NUMERIC_TYPES):
+ return value
+ if isinstance(value, (bytes, bytearray)):
+ value = value.replace(b'\\', b'\\\\')
+ value = value.replace(b'\n', b'\\n')
+ value = value.replace(b'\r', b'\\r')
+ value = value.replace(b'\047', b'\134\047') # single quotes
+ value = value.replace(b'\042', b'\134\042') # double quotes
+ value = value.replace(b'\032', b'\134\032') # for Win32
+ else:
+ value = value.replace('\\', '\\\\')
+ value = value.replace('\n', '\\n')
+ value = value.replace('\r', '\\r')
+ value = value.replace('\047', '\134\047') # single quotes
+ value = value.replace('\042', '\134\042') # double quotes
+ value = value.replace('\032', '\134\032') # for Win32
+ return value
+
+ def quote(self, buf):
+ """
+ Quote the parameters for commands. General rules:
+ o numbers are returns as bytes using ascii codec
+ o None is returned as bytearray(b'NULL')
+ o Everything else is single quoted '<buf>'
+
+ Returns a bytearray object.
+ """
+ if isinstance(buf, NUMERIC_TYPES):
+ return str(buf).encode('ascii')
+ elif isinstance(buf, type(None)):
+ return bytearray(b"NULL")
+ return bytearray(b"'" + buf + b"'")
+
+ def to_mysql(self, value):
+ """Convert Python data type to MySQL"""
+ type_name = value.__class__.__name__.lower()
+ try:
+ return getattr(self, "_{0}_to_mysql".format(type_name))(value)
+ except AttributeError:
+ if self.str_fallback:
+ return str(value).encode()
+ raise TypeError("Python '{0}' cannot be converted to a "
+ "MySQL type".format(type_name))
+
+ def to_python(self, vtype, value):
+ """Convert MySQL data type to Python"""
+ if value == 0 and vtype[1] != FieldType.BIT: # \x00
+ # Don't go further when we hit a NULL value
+ return None
+ if value is None:
+ return None
+
+ if not self._cache_field_types:
+ self._cache_field_types = {}
+ for name, info in FieldType.desc.items():
+ try:
+ self._cache_field_types[info[0]] = getattr(
+ self, '_{0}_to_python'.format(name))
+ except AttributeError:
+ # We ignore field types which has no method
+ pass
+
+ try:
+ return self._cache_field_types[vtype[1]](value, vtype)
+ except KeyError:
+ # If one type is not defined, we just return the value as str
+ try:
+ return value.decode('utf-8')
+ except UnicodeDecodeError:
+ return value
+ except ValueError as err:
+ raise ValueError("%s (field %s)" % (err, vtype[0]))
+ except TypeError as err:
+ raise TypeError("%s (field %s)" % (err, vtype[0]))
+ except:
+ raise
+
+ def _int_to_mysql(self, value):
+ """Convert value to int"""
+ return int(value)
+
+ def _long_to_mysql(self, value):
+ """Convert value to int"""
+ return int(value)
+
+ def _float_to_mysql(self, value):
+ """Convert value to float"""
+ return float(value)
+
+ def _str_to_mysql(self, value):
+ """Convert value to string"""
+ return self._unicode_to_mysql(value)
+
+ def _unicode_to_mysql(self, value):
+ """Convert unicode"""
+ charset = self.charset
+ charset_id = self.charset_id
+ if charset == 'binary':
+ charset = 'utf8'
+ charset_id = CharacterSet.get_charset_info(charset)[0]
+ encoded = value.encode(charset)
+ if charset_id in CharacterSet.slash_charsets:
+ if b'\x5c' in encoded:
+ return HexLiteral(value, charset)
+ return encoded
+
+ def _bytes_to_mysql(self, value):
+ """Convert value to bytes"""
+ return value
+
+ def _bytearray_to_mysql(self, value):
+ """Convert value to bytes"""
+ return bytes(value)
+
+ def _bool_to_mysql(self, value):
+ """Convert value to boolean"""
+ if value:
+ return 1
+ return 0
+
+ def _nonetype_to_mysql(self, value):
+ """
+ This would return what None would be in MySQL, but instead we
+ leave it None and return it right away. The actual conversion
+ from None to NULL happens in the quoting functionality.
+
+ Return None.
+ """
+ return None
+
+ def _datetime_to_mysql(self, value):
+ """
+ Converts a datetime instance to a string suitable for MySQL.
+ The returned string has format: %Y-%m-%d %H:%M:%S[.%f]
+
+ If the instance isn't a datetime.datetime type, it return None.
+
+ Returns a bytes.
+ """
+ if value.microsecond:
+ fmt = '{0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}.{6:06d}'
+ return fmt.format(
+ value.year, value.month, value.day,
+ value.hour, value.minute, value.second,
+ value.microsecond).encode('ascii')
+
+ fmt = '{0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}'
+ return fmt.format(
+ value.year, value.month, value.day,
+ value.hour, value.minute, value.second).encode('ascii')
+
+ def _date_to_mysql(self, value):
+ """
+ Converts a date instance to a string suitable for MySQL.
+ The returned string has format: %Y-%m-%d
+
+ If the instance isn't a datetime.date type, it return None.
+
+ Returns a bytes.
+ """
+ return '{0:04d}-{1:02d}-{2:02d}'.format(value.year, value.month,
+ value.day).encode('ascii')
+
+ def _time_to_mysql(self, value):
+ """
+ Converts a time instance to a string suitable for MySQL.
+ The returned string has format: %H:%M:%S[.%f]
+
+ If the instance isn't a datetime.time type, it return None.
+
+ Returns a bytes.
+ """
+ if value.microsecond:
+ return value.strftime('%H:%M:%S.%f').encode('ascii')
+ return value.strftime('%H:%M:%S').encode('ascii')
+
+ def _struct_time_to_mysql(self, value):
+ """
+ Converts a time.struct_time sequence to a string suitable
+ for MySQL.
+ The returned string has format: %Y-%m-%d %H:%M:%S
+
+ Returns a bytes or None when not valid.
+ """
+ return time.strftime('%Y-%m-%d %H:%M:%S', value).encode('ascii')
+
+ def _timedelta_to_mysql(self, value):
+ """
+ Converts a timedelta instance to a string suitable for MySQL.
+ The returned string has format: %H:%M:%S
+
+ Returns a bytes.
+ """
+ seconds = abs(value.days * 86400 + value.seconds)
+
+ if value.microseconds:
+ fmt = '{0:02d}:{1:02d}:{2:02d}.{3:06d}'
+ if value.days < 0:
+ mcs = 1000000 - value.microseconds
+ seconds -= 1
+ else:
+ mcs = value.microseconds
+ else:
+ fmt = '{0:02d}:{1:02d}:{2:02d}'
+
+ if value.days < 0:
+ fmt = '-' + fmt
+
+ (hours, remainder) = divmod(seconds, 3600)
+ (mins, secs) = divmod(remainder, 60)
+
+ if value.microseconds:
+ result = fmt.format(hours, mins, secs, mcs)
+ else:
+ result = fmt.format(hours, mins, secs)
+
+ return result.encode('ascii')
+
+ def _decimal_to_mysql(self, value):
+ """
+ Converts a decimal.Decimal instance to a string suitable for
+ MySQL.
+
+ Returns a bytes or None when not valid.
+ """
+ if isinstance(value, Decimal):
+ return str(value).encode('ascii')
+
+ return None
+
+ def row_to_python(self, row, fields):
+ """Convert a MySQL text result row to Python types
+
+ The row argument is a sequence containing text result returned
+ by a MySQL server. Each value of the row is converted to the
+ using the field type information in the fields argument.
+
+ Returns a tuple.
+ """
+ i = 0
+ result = [None]*len(fields)
+
+ if not self._cache_field_types:
+ self._cache_field_types = {}
+ for name, info in FieldType.desc.items():
+ try:
+ self._cache_field_types[info[0]] = getattr(
+ self, '_{0}_to_python'.format(name))
+ except AttributeError:
+ # We ignore field types which has no method
+ pass
+
+ for field in fields:
+ field_type = field[1]
+
+ if (row[i] == 0 and field_type != FieldType.BIT) or row[i] is None:
+ # Don't convert NULL value
+ i += 1
+ continue
+
+ try:
+ result[i] = self._cache_field_types[field_type](row[i], field)
+ except KeyError:
+ # If one type is not defined, we just return the value as str
+ try:
+ result[i] = row[i].decode('utf-8')
+ except UnicodeDecodeError:
+ result[i] = row[i]
+ except (ValueError, TypeError) as err:
+ err.message = "{0} (field {1})".format(str(err), field[0])
+ raise
+
+ i += 1
+
+ return tuple(result)
+
+ def _FLOAT_to_python(self, value, desc=None): # pylint: disable=C0103
+ """
+ Returns value as float type.
+ """
+ return float(value)
+
+ _DOUBLE_to_python = _FLOAT_to_python
+
+ def _INT_to_python(self, value, desc=None): # pylint: disable=C0103
+ """
+ Returns value as int type.
+ """
+ return int(value)
+
+ _TINY_to_python = _INT_to_python
+ _SHORT_to_python = _INT_to_python
+ _INT24_to_python = _INT_to_python
+ _LONG_to_python = _INT_to_python
+ _LONGLONG_to_python = _INT_to_python
+
+ def _DECIMAL_to_python(self, value, desc=None): # pylint: disable=C0103
+ """
+ Returns value as a decimal.Decimal.
+ """
+ val = value.decode(self.charset)
+ return Decimal(val)
+
+ _NEWDECIMAL_to_python = _DECIMAL_to_python
+
+ def _str(self, value, desc=None):
+ """
+ Returns value as str type.
+ """
+ return str(value)
+
+ def _BIT_to_python(self, value, dsc=None): # pylint: disable=C0103
+ """Returns BIT columntype as integer"""
+ int_val = value
+ if len(int_val) < 8:
+ int_val = b'\x00' * (8 - len(int_val)) + int_val
+ return struct.unpack('>Q', int_val)[0]
+
+ def _DATE_to_python(self, value, dsc=None): # pylint: disable=C0103
+ """Converts TIME column MySQL to a python datetime.datetime type.
+
+ Raises ValueError if the value can not be converted.
+
+ Returns DATE column type as datetime.date type.
+ """
+ if isinstance(value, datetime.date):
+ return value
+ try:
+ parts = value.split(b'-')
+ if len(parts) != 3:
+ raise ValueError("invalid datetime format: {} len: {}"
+ "".format(parts, len(parts)))
+ try:
+ return datetime.date(int(parts[0]), int(parts[1]), int(parts[2]))
+ except ValueError:
+ return None
+ except (IndexError, ValueError):
+ raise ValueError(
+ "Could not convert {0} to python datetime.timedelta".format(
+ value))
+
+ _NEWDATE_to_python = _DATE_to_python
+
+ def _TIME_to_python(self, value, dsc=None): # pylint: disable=C0103
+ """Converts TIME column value to python datetime.time value type.
+
+ Converts the TIME column MySQL type passed as bytes to a python
+ datetime.datetime type.
+
+ Raises ValueError if the value can not be converted.
+
+ Returns datetime.time type.
+ """
+ try:
+ (hms, mcs) = value.split(b'.')
+ mcs = int(mcs.ljust(6, b'0'))
+ except (TypeError, ValueError):
+ hms = value
+ mcs = 0
+ try:
+ (hours, mins, secs) = [int(d) for d in hms.split(b':')]
+ if value[0] == 45 or value[0] == '-':
+ mins, secs, mcs = -mins, -secs, -mcs
+ return datetime.timedelta(hours=hours, minutes=mins,
+ seconds=secs, microseconds=mcs)
+ except (IndexError, TypeError, ValueError):
+ raise ValueError(CONVERT_ERROR.format(value=value,
+ pytype="datetime.timedelta"))
+
+ def _DATETIME_to_python(self, value, dsc=None): # pylint: disable=C0103
+ """"Converts DATETIME column value to python datetime.time value type.
+
+ Converts the DATETIME column MySQL type passed as bytes to a python
+ datetime.datetime type.
+
+ Returns: datetime.datetime type.
+ """
+ if isinstance(value, datetime.datetime):
+ return value
+ datetime_val = None
+ try:
+ (date_, time_) = value.split(b' ')
+ if len(time_) > 8:
+ (hms, mcs) = time_.split(b'.')
+ mcs = int(mcs.ljust(6, b'0'))
+ else:
+ hms = time_
+ mcs = 0
+ dtval = [int(i) for i in date_.split(b'-')] + \
+ [int(i) for i in hms.split(b':')] + [mcs, ]
+ if len(dtval) < 6:
+ raise ValueError("invalid datetime format: {} len: {}"
+ "".format(dtval, len(dtval)))
+ else:
+ # Note that by default MySQL accepts invalid timestamps
+ # (this is also backward compatibility).
+ # Traditionaly C/py returns None for this well formed but
+ # invalid datetime for python like '0000-00-00 HH:MM:SS'.
+ try:
+ datetime_val = datetime.datetime(*dtval)
+ except ValueError:
+ return None
+ except (IndexError, TypeError):
+ raise ValueError(CONVERT_ERROR.format(value=value,
+ pytype="datetime.timedelta"))
+
+ return datetime_val
+
+ _TIMESTAMP_to_python = _DATETIME_to_python
+
+ def _YEAR_to_python(self, value, desc=None): # pylint: disable=C0103
+ """Returns YEAR column type as integer"""
+ try:
+ year = int(value)
+ except ValueError:
+ raise ValueError("Failed converting YEAR to int (%s)" % value)
+
+ return year
+
+ def _SET_to_python(self, value, dsc=None): # pylint: disable=C0103
+ """Returns SET column type as set
+
+ Actually, MySQL protocol sees a SET as a string type field. So this
+ code isn't called directly, but used by STRING_to_python() method.
+
+ Returns SET column type as a set.
+ """
+ set_type = None
+ val = value.decode(self.charset)
+ if not val:
+ return set()
+ try:
+ set_type = set(val.split(','))
+ except ValueError:
+ raise ValueError("Could not convert set %s to a sequence." % value)
+ return set_type
+
+ def _STRING_to_python(self, value, dsc=None): # pylint: disable=C0103
+ """
+ Note that a SET is a string too, but using the FieldFlag we can see
+ whether we have to split it.
+
+ Returns string typed columns as string type.
+ """
+ if self.charset == "binary":
+ return value
+ if dsc is not None:
+ if dsc[1] == FieldType.JSON and self.use_unicode:
+ return value.decode(self.charset)
+ if dsc[7] & FieldFlag.SET:
+ return self._SET_to_python(value, dsc)
+ if dsc[8] == 63: # 'binary' charset
+ return value
+ if isinstance(value, (bytes, bytearray)) and self.use_unicode:
+ return value.decode(self.charset)
+
+ return value
+
+ _VAR_STRING_to_python = _STRING_to_python
+ _JSON_to_python = _STRING_to_python
+
+ def _BLOB_to_python(self, value, dsc=None): # pylint: disable=C0103
+ """Convert BLOB data type to Python."""
+ if dsc is not None:
+ if dsc[7] & FieldFlag.BLOB and dsc[7] & FieldFlag.BINARY:
+ return bytes(value)
+ return self._STRING_to_python(value, dsc)
+
+ _LONG_BLOB_to_python = _BLOB_to_python
+ _MEDIUM_BLOB_to_python = _BLOB_to_python
+ _TINY_BLOB_to_python = _BLOB_to_python
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/cursor.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/cursor.py
new file mode 100644
index 0000000000000000000000000000000000000000..7aa126708f78a5dea2bcc7acdcd8799625297a4f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/cursor.py
@@ -0,0 +1,1464 @@
+# Copyright (c) 2009, 2021, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Cursor classes
+"""
+
+from collections import namedtuple
+import re
+import weakref
+
+from . import errors
+from .abstracts import MySQLCursorAbstract, NAMED_TUPLE_CACHE
+from .constants import ServerFlag
+
+SQL_COMMENT = r"\/\*.*?\*\/"
+RE_SQL_COMMENT = re.compile(
+ r'''({0})|(["'`][^"'`]*?({0})[^"'`]*?["'`])'''.format(SQL_COMMENT),
+ re.I | re.M | re.S)
+RE_SQL_ON_DUPLICATE = re.compile(
+ r'''\s*ON\s+DUPLICATE\s+KEY(?:[^"'`]*["'`][^"'`]*["'`])*[^"'`]*$''',
+ re.I | re.M | re.S)
+RE_SQL_INSERT_STMT = re.compile(
+ r"({0}|\s)*INSERT({0}|\s)*INTO\s+[`'\"]?.+[`'\"]?(?:\.[`'\"]?.+[`'\"]?)"
+ r"{{0,2}}\s+VALUES\s*\(.+(?:\s*,.+)*\)".format(SQL_COMMENT),
+ re.I | re.M | re.S)
+RE_SQL_INSERT_VALUES = re.compile(r'.*VALUES\s*(\(.*\)).*', re.I | re.M | re.S)
+RE_PY_PARAM = re.compile(b'(%s)')
+RE_PY_MAPPING_PARAM = re.compile(
+ br'''
+ %
+ \((?P<mapping_key>[^)]+)\)
+ (?P<conversion_type>[diouxXeEfFgGcrs%])
+ ''',
+ re.X
+)
+RE_SQL_SPLIT_STMTS = re.compile(
+ b''';(?=(?:[^"'`]*["'`][^"'`]*["'`])*[^"'`]*$)''')
+RE_SQL_FIND_PARAM = re.compile(
+ b'''%s(?=(?:[^"'`]*["'`][^"'`]*["'`])*[^"'`]*$)''')
+
+ERR_NO_RESULT_TO_FETCH = "No result set to fetch from"
+
+MAX_RESULTS = 4294967295
+
+class _ParamSubstitutor(object):
+ """
+ Substitutes parameters into SQL statement.
+ """
+ def __init__(self, params):
+ self.params = params
+ self.index = 0
+
+ def __call__(self, matchobj):
+ index = self.index
+ self.index += 1
+ try:
+ return bytes(self.params[index])
+ except IndexError:
+ raise errors.ProgrammingError(
+ "Not enough parameters for the SQL statement")
+
+ @property
+ def remaining(self):
+ """Returns number of parameters remaining to be substituted"""
+ return len(self.params) - self.index
+
+
+def _bytestr_format_dict(bytestr, value_dict):
+ """
+ >>> _bytestr_format_dict(b'%(a)s', {b'a': b'foobar'})
+ b'foobar
+ >>> _bytestr_format_dict(b'%%(a)s', {b'a': b'foobar'})
+ b'%%(a)s'
+ >>> _bytestr_format_dict(b'%%%(a)s', {b'a': b'foobar'})
+ b'%%foobar'
+ >>> _bytestr_format_dict(b'%(x)s %(y)s',
+ ... {b'x': b'x=%(y)s', b'y': b'y=%(x)s'})
+ b'x=%(y)s y=%(x)s'
+ """
+ def replace(matchobj):
+ """Replace pattern."""
+ value = None
+ groups = matchobj.groupdict()
+ if groups["conversion_type"] == b"%":
+ value = b"%"
+ if groups["conversion_type"] == b"s":
+ key = groups["mapping_key"]
+ value = value_dict[key]
+ if value is None:
+ raise ValueError("Unsupported conversion_type: {0}"
+ "".format(groups["conversion_type"]))
+ return value
+
+ stmt = RE_PY_MAPPING_PARAM.sub(replace, bytestr)
+ return stmt
+
+
+class CursorBase(MySQLCursorAbstract):
+ """
+ Base for defining MySQLCursor. This class is a skeleton and defines
+ methods and members as required for the Python Database API
+ Specification v2.0.
+
+ It's better to inherite from MySQLCursor.
+ """
+
+ _raw = False
+
+ def __init__(self):
+ self._description = None
+ self._rowcount = -1
+ self._last_insert_id = None
+ self.arraysize = 1
+ super(CursorBase, self).__init__()
+
+ def callproc(self, procname, args=()):
+ """Calls a stored procedue with the given arguments
+
+ The arguments will be set during this session, meaning
+ they will be called like _<procname>__arg<nr> where
+ <nr> is an enumeration (+1) of the arguments.
+
+ Coding Example:
+ 1) Definining the Stored Routine in MySQL:
+ CREATE PROCEDURE multiply(IN pFac1 INT, IN pFac2 INT, OUT pProd INT)
+ BEGIN
+ SET pProd := pFac1 * pFac2;
+ END
+
+ 2) Executing in Python:
+ args = (5,5,0) # 0 is to hold pprod
+ cursor.callproc('multiply', args)
+ print(cursor.fetchone())
+
+ Does not return a value, but a result set will be
+ available when the CALL-statement execute successfully.
+ Raises exceptions when something is wrong.
+ """
+ pass
+
+ def close(self):
+ """Close the cursor."""
+ pass
+
+ def execute(self, operation, params=(), multi=False):
+ """Executes the given operation
+
+ Executes the given operation substituting any markers with
+ the given parameters.
+
+ For example, getting all rows where id is 5:
+ cursor.execute("SELECT * FROM t1 WHERE id = %s", (5,))
+
+ The multi argument should be set to True when executing multiple
+ statements in one operation. If not set and multiple results are
+ found, an InterfaceError will be raised.
+
+ If warnings where generated, and connection.get_warnings is True, then
+ self._warnings will be a list containing these warnings.
+
+ Returns an iterator when multi is True, otherwise None.
+ """
+ pass
+
+ def executemany(self, operation, seq_params):
+ """Execute the given operation multiple times
+
+ The executemany() method will execute the operation iterating
+ over the list of parameters in seq_params.
+
+ Example: Inserting 3 new employees and their phone number
+
+ data = [
+ ('Jane','555-001'),
+ ('Joe', '555-001'),
+ ('John', '555-003')
+ ]
+ stmt = "INSERT INTO employees (name, phone) VALUES ('%s','%s')"
+ cursor.executemany(stmt, data)
+
+ INSERT statements are optimized by batching the data, that is
+ using the MySQL multiple rows syntax.
+
+ Results are discarded. If they are needed, consider looping over
+ data using the execute() method.
+ """
+ pass
+
+ def fetchone(self):
+ """Returns next row of a query result set
+
+ Returns a tuple or None.
+ """
+ pass
+
+ def fetchmany(self, size=1):
+ """Returns the next set of rows of a query result, returning a
+ list of tuples. When no more rows are available, it returns an
+ empty list.
+
+ The number of rows returned can be specified using the size argument,
+ which defaults to one
+ """
+ pass
+
+ def fetchall(self):
+ """Returns all rows of a query result set
+
+ Returns a list of tuples.
+ """
+ pass
+
+ def nextset(self):
+ """Not Implemented."""
+ pass
+
+ def setinputsizes(self, sizes):
+ """Not Implemented."""
+ pass
+
+ def setoutputsize(self, size, column=None):
+ """Not Implemented."""
+ pass
+
+ def reset(self, free=True):
+ """Reset the cursor to default"""
+ pass
+
+ @property
+ def description(self):
+ """Returns description of columns in a result
+
+ This property returns a list of tuples describing the columns in
+ in a result set. A tuple is described as follows::
+
+ (column_name,
+ type,
+ None,
+ None,
+ None,
+ None,
+ null_ok,
+ column_flags) # Addition to PEP-249 specs
+
+ Returns a list of tuples.
+ """
+ return self._description
+
+ @property
+ def rowcount(self):
+ """Returns the number of rows produced or affected
+
+ This property returns the number of rows produced by queries
+ such as a SELECT, or affected rows when executing DML statements
+ like INSERT or UPDATE.
+
+ Note that for non-buffered cursors it is impossible to know the
+ number of rows produced before having fetched them all. For those,
+ the number of rows will be -1 right after execution, and
+ incremented when fetching rows.
+
+ Returns an integer.
+ """
+ return self._rowcount
+
+ @property
+ def lastrowid(self):
+ """Returns the value generated for an AUTO_INCREMENT column
+
+ Returns the value generated for an AUTO_INCREMENT column by
+ the previous INSERT or UPDATE statement or None when there is
+ no such value available.
+
+ Returns a long value or None.
+ """
+ return self._last_insert_id
+
+
+class MySQLCursor(CursorBase):
+ """Default cursor for interacting with MySQL
+
+ This cursor will execute statements and handle the result. It will
+ not automatically fetch all rows.
+
+ MySQLCursor should be inherited whenever other functionallity is
+ required. An example would to change the fetch* member functions
+ to return dictionaries instead of lists of values.
+
+ Implements the Python Database API Specification v2.0 (PEP-249)
+ """
+ def __init__(self, connection=None):
+ CursorBase.__init__(self)
+ self._connection = None
+ self._stored_results = []
+ self._nextrow = (None, None)
+ self._warnings = None
+ self._warning_count = 0
+ self._executed = None
+ self._executed_list = []
+ self._binary = False
+
+ if connection is not None:
+ self._set_connection(connection)
+
+ def __iter__(self):
+ """
+ Iteration over the result set which calls self.fetchone()
+ and returns the next row.
+ """
+ return iter(self.fetchone, None)
+
+ def _set_connection(self, connection):
+ """Set the connection"""
+ try:
+ self._connection = weakref.proxy(connection)
+ self._connection.is_connected()
+ except (AttributeError, TypeError):
+ raise errors.InterfaceError(errno=2048)
+
+ def _reset_result(self):
+ """Reset the cursor to default"""
+ self._rowcount = -1
+ self._nextrow = (None, None)
+ self._stored_results = []
+ self._warnings = None
+ self._warning_count = 0
+ self._description = None
+ self._executed = None
+ self._executed_list = []
+ self.reset()
+
+ def _have_unread_result(self):
+ """Check whether there is an unread result"""
+ try:
+ return self._connection.unread_result
+ except AttributeError:
+ return False
+
+ def _check_executed(self):
+ """Check if the statement has been executed.
+
+ Raises an error if the statement has not been executed.
+ """
+ if self._executed is None:
+ raise errors.InterfaceError(ERR_NO_RESULT_TO_FETCH)
+
+ def next(self):
+ """Used for iterating over the result set."""
+ return self.__next__()
+
+ def __next__(self):
+ """
+ Used for iterating over the result set. Calles self.fetchone()
+ to get the next row.
+ """
+ try:
+ row = self.fetchone()
+ except errors.InterfaceError:
+ raise StopIteration
+ if not row:
+ raise StopIteration
+ return row
+
+ def close(self):
+ """Close the cursor
+
+ Returns True when successful, otherwise False.
+ """
+ if self._connection is None:
+ return False
+
+ self._connection.handle_unread_result()
+ self._reset_result()
+ self._connection = None
+
+ return True
+
+ def _process_params_dict(self, params):
+ """Process query parameters given as dictionary"""
+ try:
+ to_mysql = self._connection.converter.to_mysql
+ escape = self._connection.converter.escape
+ quote = self._connection.converter.quote
+ res = {}
+ for key, value in list(params.items()):
+ conv = value
+ conv = to_mysql(conv)
+ conv = escape(conv)
+ conv = quote(conv)
+ res[key.encode()] = conv
+ except Exception as err:
+ raise errors.ProgrammingError(
+ "Failed processing pyformat-parameters; %s" % err)
+ else:
+ return res
+
+ def _process_params(self, params):
+ """Process query parameters."""
+ try:
+ res = params
+
+ to_mysql = self._connection.converter.to_mysql
+ escape = self._connection.converter.escape
+ quote = self._connection.converter.quote
+
+ res = [to_mysql(i) for i in res]
+ res = [escape(i) for i in res]
+ res = [quote(i) for i in res]
+ except Exception as err:
+ raise errors.ProgrammingError(
+ "Failed processing format-parameters; %s" % err)
+ else:
+ return tuple(res)
+
+ def _handle_noresultset(self, res):
+ """Handles result of execute() when there is no result set
+ """
+ try:
+ self._rowcount = res['affected_rows']
+ self._last_insert_id = res['insert_id']
+ self._warning_count = res['warning_count']
+ except (KeyError, TypeError) as err:
+ raise errors.ProgrammingError(
+ "Failed handling non-resultset; {0}".format(err))
+
+ self._handle_warnings()
+ if self._connection.raise_on_warnings is True and self._warnings:
+ raise errors.get_mysql_exception(
+ self._warnings[0][1], self._warnings[0][2])
+
+ def _handle_resultset(self):
+ """Handles result set
+
+ This method handles the result set and is called after reading
+ and storing column information in _handle_result(). For non-buffering
+ cursors, this method is usually doing nothing.
+ """
+ pass
+
+ def _handle_result(self, result):
+ """
+ Handle the result after a command was send. The result can be either
+ an OK-packet or a dictionary containing column/eof information.
+
+ Raises InterfaceError when result is not a dict() or result is
+ invalid.
+ """
+ if not isinstance(result, dict):
+ raise errors.InterfaceError('Result was not a dict()')
+
+ if 'columns' in result:
+ # Weak test, must be column/eof information
+ self._description = result['columns']
+ self._connection.unread_result = True
+ self._handle_resultset()
+ elif 'affected_rows' in result:
+ # Weak test, must be an OK-packet
+ self._connection.unread_result = False
+ self._handle_noresultset(result)
+ else:
+ raise errors.InterfaceError('Invalid result')
+
+ def _execute_iter(self, query_iter):
+ """Generator returns MySQLCursor objects for multiple statements
+
+ This method is only used when multiple statements are executed
+ by the execute() method. It uses zip() to make an iterator from the
+ given query_iter (result of MySQLConnection.cmd_query_iter()) and
+ the list of statements that were executed.
+ """
+ executed_list = RE_SQL_SPLIT_STMTS.split(self._executed)
+
+ i = 0
+ while True:
+ try:
+ result = next(query_iter)
+ self._reset_result()
+ self._handle_result(result)
+ try:
+ self._executed = executed_list[i].strip()
+ i += 1
+ except IndexError:
+ self._executed = executed_list[0]
+
+ yield self
+ except StopIteration:
+ return
+
+ def execute(self, operation, params=None, multi=False):
+ """Executes the given operation
+
+ Executes the given operation substituting any markers with
+ the given parameters.
+
+ For example, getting all rows where id is 5:
+ cursor.execute("SELECT * FROM t1 WHERE id = %s", (5,))
+
+ The multi argument should be set to True when executing multiple
+ statements in one operation. If not set and multiple results are
+ found, an InterfaceError will be raised.
+
+ If warnings where generated, and connection.get_warnings is True, then
+ self._warnings will be a list containing these warnings.
+
+ Returns an iterator when multi is True, otherwise None.
+ """
+ if not operation:
+ return None
+
+ if not self._connection:
+ raise errors.ProgrammingError("Cursor is not connected")
+
+ self._connection.handle_unread_result()
+
+ self._reset_result()
+ stmt = ''
+
+ try:
+ if not isinstance(operation, (bytes, bytearray)):
+ stmt = operation.encode(self._connection.python_charset)
+ else:
+ stmt = operation
+ except (UnicodeDecodeError, UnicodeEncodeError) as err:
+ raise errors.ProgrammingError(str(err))
+
+ if params:
+ if isinstance(params, dict):
+ stmt = _bytestr_format_dict(
+ stmt, self._process_params_dict(params))
+ elif isinstance(params, (list, tuple)):
+ psub = _ParamSubstitutor(self._process_params(params))
+ stmt = RE_PY_PARAM.sub(psub, stmt)
+ if psub.remaining != 0:
+ raise errors.ProgrammingError(
+ "Not all parameters were used in the SQL statement")
+ else:
+ raise errors.ProgrammingError(
+ f"Could not process parameters: {type(params).__name__}({params}),"
+ " it must be of type list, tuple or dict")
+
+ self._executed = stmt
+ if multi:
+ self._executed_list = []
+ return self._execute_iter(self._connection.cmd_query_iter(stmt))
+
+ try:
+ self._handle_result(self._connection.cmd_query(stmt))
+ except errors.InterfaceError:
+ if self._connection._have_next_result: # pylint: disable=W0212
+ raise errors.InterfaceError(
+ "Use multi=True when executing multiple statements")
+ raise
+ return None
+
+ def _batch_insert(self, operation, seq_params):
+ """Implements multi row insert"""
+ def remove_comments(match):
+ """Remove comments from INSERT statements.
+
+ This function is used while removing comments from INSERT
+ statements. If the matched string is a comment not enclosed
+ by quotes, it returns an empty string, else the string itself.
+ """
+ if match.group(1):
+ return ""
+ return match.group(2)
+
+ tmp = re.sub(RE_SQL_ON_DUPLICATE, '',
+ re.sub(RE_SQL_COMMENT, remove_comments, operation))
+
+ matches = re.search(RE_SQL_INSERT_VALUES, tmp)
+ if not matches:
+ raise errors.InterfaceError(
+ "Failed rewriting statement for multi-row INSERT. "
+ "Check SQL syntax."
+ )
+ fmt = matches.group(1).encode(self._connection.python_charset)
+ values = []
+
+ try:
+ stmt = operation.encode(self._connection.python_charset)
+ for params in seq_params:
+ tmp = fmt
+ if isinstance(params, dict):
+ tmp = _bytestr_format_dict(
+ tmp, self._process_params_dict(params))
+ else:
+ psub = _ParamSubstitutor(self._process_params(params))
+ tmp = RE_PY_PARAM.sub(psub, tmp)
+ if psub.remaining != 0:
+ raise errors.ProgrammingError(
+ "Not all parameters were used in the SQL statement")
+ #for p in self._process_params(params):
+ # tmp = tmp.replace(b'%s',p,1)
+ values.append(tmp)
+ if fmt in stmt:
+ stmt = stmt.replace(fmt, b','.join(values), 1)
+ self._executed = stmt
+ return stmt
+ return None
+ except (UnicodeDecodeError, UnicodeEncodeError) as err:
+ raise errors.ProgrammingError(str(err))
+ except errors.Error:
+ raise
+ except Exception as err:
+ raise errors.InterfaceError(
+ "Failed executing the operation; %s" % err)
+
+ def executemany(self, operation, seq_params):
+ """Execute the given operation multiple times
+
+ The executemany() method will execute the operation iterating
+ over the list of parameters in seq_params.
+
+ Example: Inserting 3 new employees and their phone number
+
+ data = [
+ ('Jane','555-001'),
+ ('Joe', '555-001'),
+ ('John', '555-003')
+ ]
+ stmt = "INSERT INTO employees (name, phone) VALUES ('%s','%s)"
+ cursor.executemany(stmt, data)
+
+ INSERT statements are optimized by batching the data, that is
+ using the MySQL multiple rows syntax.
+
+ Results are discarded. If they are needed, consider looping over
+ data using the execute() method.
+ """
+ if not operation or not seq_params:
+ return None
+ self._connection.handle_unread_result()
+
+ try:
+ _ = iter(seq_params)
+ except TypeError:
+ raise errors.ProgrammingError(
+ "Parameters for query must be an Iterable.")
+
+ # Optimize INSERTs by batching them
+ if re.match(RE_SQL_INSERT_STMT, operation):
+ if not seq_params:
+ self._rowcount = 0
+ return None
+ stmt = self._batch_insert(operation, seq_params)
+ if stmt is not None:
+ self._executed = stmt
+ return self.execute(stmt)
+
+ rowcnt = 0
+ try:
+ for params in seq_params:
+ self.execute(operation, params)
+ if self.with_rows and self._have_unread_result():
+ self.fetchall()
+ rowcnt += self._rowcount
+ except (ValueError, TypeError) as err:
+ raise errors.InterfaceError(
+ "Failed executing the operation; {0}".format(err))
+ except:
+ # Raise whatever execute() raises
+ raise
+ self._rowcount = rowcnt
+ return None
+
+ def stored_results(self):
+ """Returns an iterator for stored results
+
+ This method returns an iterator over results which are stored when
+ callproc() is called. The iterator will provide MySQLCursorBuffered
+ instances.
+
+ Returns a iterator.
+ """
+ return iter(self._stored_results)
+
+ def callproc(self, procname, args=()):
+ """Calls a stored procedure with the given arguments
+
+ The arguments will be set during this session, meaning
+ they will be called like _<procname>__arg<nr> where
+ <nr> is an enumeration (+1) of the arguments.
+
+ Coding Example:
+ 1) Defining the Stored Routine in MySQL:
+ CREATE PROCEDURE multiply(IN pFac1 INT, IN pFac2 INT, OUT pProd INT)
+ BEGIN
+ SET pProd := pFac1 * pFac2;
+ END
+
+ 2) Executing in Python:
+ args = (5, 5, 0) # 0 is to hold pprod
+ cursor.callproc('multiply', args)
+ print(cursor.fetchone())
+
+ For OUT and INOUT parameters the user should provide the
+ type of the parameter as well. The argument should be a
+ tuple with first item as the value of the parameter to pass
+ and second argument the type of the argument.
+
+ In the above example, one can call callproc method like:
+ args = (5, 5, (0, 'INT'))
+ cursor.callproc('multiply', args)
+
+ The type of the argument given in the tuple will be used by
+ the MySQL CAST function to convert the values in the corresponding
+ MySQL type (See CAST in MySQL Reference for more information)
+
+ Does not return a value, but a result set will be
+ available when the CALL-statement execute successfully.
+ Raises exceptions when something is wrong.
+ """
+ if not procname or not isinstance(procname, str):
+ raise ValueError("procname must be a string")
+
+ if not isinstance(args, (tuple, list)):
+ raise ValueError("args must be a sequence")
+
+ argfmt = "@_{name}_arg{index}"
+ self._stored_results = []
+
+ results = []
+ try:
+ argnames = []
+ argtypes = []
+ if args:
+ for idx, arg in enumerate(args):
+ argname = argfmt.format(name=procname, index=idx + 1)
+ argnames.append(argname)
+ if isinstance(arg, tuple):
+ argtypes.append(" CAST({0} AS {1})".format(argname,
+ arg[1]))
+ self.execute("SET {0}=%s".format(argname), (arg[0],))
+ else:
+ argtypes.append(argname)
+ self.execute("SET {0}=%s".format(argname), (arg,))
+
+ call = "CALL {0}({1})".format(procname, ','.join(argnames))
+
+ # pylint: disable=W0212
+ # We disable consuming results temporary to make sure we
+ # getting all results
+ can_consume_results = self._connection._consume_results
+ for result in self._connection.cmd_query_iter(call):
+ self._connection._consume_results = False
+ if isinstance(self, (MySQLCursorDict,
+ MySQLCursorBufferedDict)):
+ cursor_class = MySQLCursorBufferedDict
+ elif isinstance(self, (MySQLCursorNamedTuple,
+ MySQLCursorBufferedNamedTuple)):
+ cursor_class = MySQLCursorBufferedNamedTuple
+ elif self._raw:
+ cursor_class = MySQLCursorBufferedRaw
+ else:
+ cursor_class = MySQLCursorBuffered
+ tmp = cursor_class(self._connection._get_self())
+ tmp._executed = "(a result of {0})".format(call)
+ tmp._handle_result(result)
+ if tmp._warnings is not None:
+ self._warnings = tmp._warnings
+ if 'columns' in result:
+ results.append(tmp)
+ self._connection._consume_results = can_consume_results
+ # pylint: enable=W0212
+
+ if argnames:
+ # Create names aliases to be compatible with namedtuples
+ args = [
+ "{} AS {}".format(name, alias) for name, alias in
+ zip(argtypes, [arg.lstrip("@_") for arg in argnames])
+ ]
+ select = "SELECT {}".format(",".join(args))
+ self.execute(select)
+ self._stored_results = results
+ return self.fetchone()
+
+ self._stored_results = results
+ return ()
+
+ except errors.Error:
+ raise
+ except Exception as err:
+ raise errors.InterfaceError(
+ "Failed calling stored routine; {0}".format(err))
+
+ def getlastrowid(self):
+ """Returns the value generated for an AUTO_INCREMENT column
+
+ Returns the value generated for an AUTO_INCREMENT column by
+ the previous INSERT or UPDATE statement.
+
+ Returns a long value or None.
+ """
+ return self._last_insert_id
+
+ def _fetch_warnings(self):
+ """
+ Fetch warnings doing a SHOW WARNINGS. Can be called after getting
+ the result.
+
+ Returns a result set or None when there were no warnings.
+ """
+ res = []
+ try:
+ cur = self._connection.cursor(raw=False)
+ cur.execute("SHOW WARNINGS")
+ res = cur.fetchall()
+ cur.close()
+ except Exception as err:
+ raise errors.InterfaceError(
+ "Failed getting warnings; %s" % err)
+
+ if res:
+ return res
+
+ return None
+
+ def _handle_warnings(self):
+ """Handle possible warnings after all results are consumed"""
+ if self._connection.get_warnings is True and self._warning_count:
+ self._warnings = self._fetch_warnings()
+
+ def _handle_eof(self, eof):
+ """Handle EOF packet"""
+ self._connection.unread_result = False
+ self._nextrow = (None, None)
+ self._warning_count = eof['warning_count']
+ self._handle_warnings()
+ if self._connection.raise_on_warnings is True and self._warnings:
+ raise errors.get_mysql_exception(
+ self._warnings[0][1], self._warnings[0][2])
+
+ def _fetch_row(self, raw=False):
+ """Returns the next row in the result set
+
+ Returns a tuple or None.
+ """
+ if not self._have_unread_result():
+ return None
+ row = None
+
+ if self._nextrow == (None, None):
+ (row, eof) = self._connection.get_row(
+ binary=self._binary, columns=self.description, raw=raw)
+ else:
+ (row, eof) = self._nextrow
+
+ if row:
+ self._nextrow = self._connection.get_row(
+ binary=self._binary, columns=self.description, raw=raw)
+ eof = self._nextrow[1]
+ if eof is not None:
+ self._handle_eof(eof)
+ if self._rowcount == -1:
+ self._rowcount = 1
+ else:
+ self._rowcount += 1
+ if eof:
+ self._handle_eof(eof)
+
+ return row
+
+ def fetchone(self):
+ """Returns next row of a query result set
+
+ Returns a tuple or None.
+ """
+ self._check_executed()
+ return self._fetch_row()
+
+ def fetchmany(self, size=None):
+ self._check_executed()
+ res = []
+ cnt = (size or self.arraysize)
+ while cnt > 0 and self._have_unread_result():
+ cnt -= 1
+ row = self.fetchone()
+ if row:
+ res.append(row)
+ return res
+
+ def fetchall(self):
+ self._check_executed()
+ if not self._have_unread_result():
+ return []
+
+ (rows, eof) = self._connection.get_rows()
+ if self._nextrow[0]:
+ rows.insert(0, self._nextrow[0])
+
+ self._handle_eof(eof)
+ rowcount = len(rows)
+ if rowcount >= 0 and self._rowcount == -1:
+ self._rowcount = 0
+ self._rowcount += rowcount
+ return rows
+
+ @property
+ def column_names(self):
+ """Returns column names
+
+ This property returns the columns names as a tuple.
+
+ Returns a tuple.
+ """
+ if not self.description:
+ return ()
+ return tuple([d[0] for d in self.description])
+
+ @property
+ def statement(self):
+ """Returns the executed statement
+
+ This property returns the executed statement. When multiple
+ statements were executed, the current statement in the iterator
+ will be returned.
+ """
+ if self._executed is None:
+ return None
+ try:
+ return self._executed.strip().decode('utf-8')
+ except (AttributeError, UnicodeDecodeError):
+ return self._executed.strip()
+
+ @property
+ def with_rows(self):
+ """Returns whether the cursor could have rows returned
+
+ This property returns True when column descriptions are available
+ and possibly also rows, which will need to be fetched.
+
+ Returns True or False.
+ """
+ if not self.description:
+ return False
+ return True
+
+ def __str__(self):
+ fmt = "{class_name}: {stmt}"
+ if self._executed:
+ try:
+ executed = self._executed.decode('utf-8')
+ except AttributeError:
+ executed = self._executed
+ if len(executed) > 40:
+ executed = executed[:40] + '..'
+ else:
+ executed = '(Nothing executed yet)'
+ return fmt.format(class_name=self.__class__.__name__, stmt=executed)
+
+
+class MySQLCursorBuffered(MySQLCursor):
+ """Cursor which fetches rows within execute()"""
+
+ def __init__(self, connection=None):
+ MySQLCursor.__init__(self, connection)
+ self._rows = None
+ self._next_row = 0
+
+ def _handle_resultset(self):
+ (self._rows, eof) = self._connection.get_rows()
+ self._rowcount = len(self._rows)
+ self._handle_eof(eof)
+ self._next_row = 0
+ try:
+ self._connection.unread_result = False
+ except:
+ pass
+
+ def reset(self, free=True):
+ self._rows = None
+
+ def _fetch_row(self, raw=False):
+ row = None
+ try:
+ row = self._rows[self._next_row]
+ except:
+ return None
+ else:
+ self._next_row += 1
+ return row
+ return None
+
+ def fetchone(self):
+ """Returns next row of a query result set
+
+ Returns a tuple or None.
+ """
+ self._check_executed()
+ return self._fetch_row()
+
+ def fetchall(self):
+ if self._executed is None or self._rows is None:
+ raise errors.InterfaceError(ERR_NO_RESULT_TO_FETCH)
+ res = []
+ res = self._rows[self._next_row:]
+ self._next_row = len(self._rows)
+ return res
+
+ def fetchmany(self, size=None):
+ self._check_executed()
+ res = []
+ cnt = (size or self.arraysize)
+ while cnt > 0:
+ cnt -= 1
+ row = self.fetchone()
+ if row:
+ res.append(row)
+
+ return res
+
+ @property
+ def with_rows(self):
+ return self._rows is not None
+
+
+class MySQLCursorRaw(MySQLCursor):
+ """
+ Skips conversion from MySQL datatypes to Python types when fetching rows.
+ """
+
+ _raw = True
+
+ def fetchone(self):
+ self._check_executed()
+ return self._fetch_row(raw=True)
+
+ def fetchall(self):
+ self._check_executed()
+ if not self._have_unread_result():
+ return []
+ (rows, eof) = self._connection.get_rows(raw=True)
+ if self._nextrow[0]:
+ rows.insert(0, self._nextrow[0])
+ self._handle_eof(eof)
+ rowcount = len(rows)
+ if rowcount >= 0 and self._rowcount == -1:
+ self._rowcount = 0
+ self._rowcount += rowcount
+ return rows
+
+
+class MySQLCursorBufferedRaw(MySQLCursorBuffered):
+ """
+ Cursor which skips conversion from MySQL datatypes to Python types when
+ fetching rows and fetches rows within execute().
+ """
+
+ _raw = True
+
+ def _handle_resultset(self):
+ (self._rows, eof) = self._connection.get_rows(raw=self._raw)
+ self._rowcount = len(self._rows)
+ self._handle_eof(eof)
+ self._next_row = 0
+ try:
+ self._connection.unread_result = False
+ except:
+ pass
+
+ def fetchone(self):
+ self._check_executed()
+ return self._fetch_row()
+
+ def fetchall(self):
+ self._check_executed()
+ return [r for r in self._rows[self._next_row:]]
+
+ @property
+ def with_rows(self):
+ return self._rows is not None
+
+
+class MySQLCursorPrepared(MySQLCursor):
+ """Cursor using MySQL Prepared Statements
+ """
+ def __init__(self, connection=None):
+ super(MySQLCursorPrepared, self).__init__(connection)
+ self._rows = None
+ self._next_row = 0
+ self._prepared = None
+ self._binary = True
+ self._have_result = None
+ self._last_row_sent = False
+ self._cursor_exists = False
+
+ def reset(self, free=True):
+ if self._prepared:
+ try:
+ self._connection.cmd_stmt_close(self._prepared['statement_id'])
+ except errors.Error:
+ # We tried to deallocate, but it's OK when we fail.
+ pass
+ self._prepared = None
+ self._last_row_sent = False
+ self._cursor_exists = False
+
+ def _handle_noresultset(self, res):
+ self._handle_server_status(res.get('status_flag',
+ res.get('server_status', 0)))
+ super(MySQLCursorPrepared, self)._handle_noresultset(res)
+
+ def _handle_server_status(self, flags):
+ """Check for SERVER_STATUS_CURSOR_EXISTS and
+ SERVER_STATUS_LAST_ROW_SENT flags set by the server.
+ """
+ self._cursor_exists = flags & ServerFlag.STATUS_CURSOR_EXISTS != 0
+ self._last_row_sent = flags & ServerFlag.STATUS_LAST_ROW_SENT != 0
+
+ def _handle_eof(self, eof):
+ self._handle_server_status(eof.get('status_flag',
+ eof.get('server_status', 0)))
+ super(MySQLCursorPrepared, self)._handle_eof(eof)
+
+ def callproc(self, procname, args=()):
+ """Calls a stored procedue
+
+ Not supported with MySQLCursorPrepared.
+ """
+ raise errors.NotSupportedError()
+
+ def close(self):
+ """Close the cursor
+
+ This method will try to deallocate the prepared statement and close
+ the cursor.
+ """
+ self.reset()
+ super(MySQLCursorPrepared, self).close()
+
+ def _row_to_python(self, rowdata, desc=None):
+ """Convert row data from MySQL to Python types
+
+ The conversion is done while reading binary data in the
+ protocol module.
+ """
+ pass
+
+ def _handle_result(self, result):
+ """Handle result after execution"""
+ if isinstance(result, dict):
+ self._connection.unread_result = False
+ self._have_result = False
+ self._handle_noresultset(result)
+ else:
+ self._description = result[1]
+ self._connection.unread_result = True
+ self._have_result = True
+
+ if 'status_flag' in result[2]:
+ self._handle_server_status(result[2]['status_flag'])
+ elif 'server_status' in result[2]:
+ self._handle_server_status(result[2]['server_status'])
+
+ def execute(self, operation, params=None, multi=False): # multi is unused
+ """Prepare and execute a MySQL Prepared Statement
+
+ This method will prepare the given operation and execute it using
+ the optionally given parameters.
+
+ If the cursor instance already had a prepared statement, it is
+ first closed.
+ """
+ if operation is not self._executed:
+ if self._prepared:
+ self._connection.cmd_stmt_close(self._prepared['statement_id'])
+
+ self._executed = operation
+ try:
+ if not isinstance(operation, bytes):
+ charset = self._connection.charset
+ if charset == 'utf8mb4':
+ charset = 'utf8'
+ operation = operation.encode(charset)
+ except (UnicodeDecodeError, UnicodeEncodeError) as err:
+ raise errors.ProgrammingError(str(err))
+
+ # need to convert %s to ? before sending it to MySQL
+ if b'%s' in operation:
+ operation = re.sub(RE_SQL_FIND_PARAM, b'?', operation)
+
+ try:
+ self._prepared = self._connection.cmd_stmt_prepare(operation)
+ except errors.Error:
+ self._executed = None
+ raise
+
+ self._connection.cmd_stmt_reset(self._prepared['statement_id'])
+
+ if self._prepared['parameters'] and not params:
+ return
+ elif params:
+ if not isinstance(params, (tuple, list)):
+ raise errors.ProgrammingError(
+ errno=1210,
+ msg=f"Incorrect type of argument: {type(params).__name__}({params})"
+ ", it must be of type tuple or list the argument given to "
+ "the prepared statement")
+ if len(self._prepared['parameters']) != len(params):
+ raise errors.ProgrammingError(
+ errno=1210,
+ msg="Incorrect number of arguments " \
+ "executing prepared statement")
+
+ if params is None:
+ params = ()
+ res = self._connection.cmd_stmt_execute(
+ self._prepared['statement_id'],
+ data=params,
+ parameters=self._prepared['parameters'])
+ self._handle_result(res)
+
+ def executemany(self, operation, seq_params):
+ """Prepare and execute a MySQL Prepared Statement many times
+
+ This method will prepare the given operation and execute with each
+ tuple found the list seq_params.
+
+ If the cursor instance already had a prepared statement, it is
+ first closed.
+
+ executemany() simply calls execute().
+ """
+ rowcnt = 0
+ try:
+ for params in seq_params:
+ self.execute(operation, params)
+ if self.with_rows and self._have_unread_result():
+ self.fetchall()
+ rowcnt += self._rowcount
+ except (ValueError, TypeError) as err:
+ raise errors.InterfaceError(
+ "Failed executing the operation; {error}".format(error=err))
+ except:
+ # Raise whatever execute() raises
+ raise
+ self._rowcount = rowcnt
+
+ def fetchone(self):
+ """Returns next row of a query result set
+
+ Returns a tuple or None.
+ """
+ self._check_executed()
+ if self._cursor_exists:
+ self._connection.cmd_stmt_fetch(self._prepared['statement_id'])
+ return self._fetch_row() or None
+
+ def fetchmany(self, size=None):
+ self._check_executed()
+ res = []
+ cnt = (size or self.arraysize)
+ while cnt > 0 and self._have_unread_result():
+ cnt -= 1
+ row = self._fetch_row()
+ if row:
+ res.append(row)
+ return res
+
+ def fetchall(self):
+ self._check_executed()
+ rows = []
+ if self._nextrow[0]:
+ rows.append(self._nextrow[0])
+ while self._have_unread_result():
+ if self._cursor_exists:
+ self._connection.cmd_stmt_fetch(
+ self._prepared['statement_id'], MAX_RESULTS)
+ (tmp, eof) = self._connection.get_rows(
+ binary=self._binary, columns=self.description)
+ rows.extend(tmp)
+ self._handle_eof(eof)
+ self._rowcount = len(rows)
+ return rows
+
+
+class MySQLCursorDict(MySQLCursor):
+ """
+ Cursor fetching rows as dictionaries.
+
+ The fetch methods of this class will return dictionaries instead of tuples.
+ Each row is a dictionary that looks like:
+ row = {
+ "col1": value1,
+ "col2": value2
+ }
+ """
+ def _row_to_python(self, rowdata, desc=None):
+ """Convert a MySQL text result row to Python types
+
+ Returns a dictionary.
+ """
+ row = rowdata
+
+ if row:
+ return dict(zip(self.column_names, row))
+
+ return None
+
+ def fetchone(self):
+ """Returns next row of a query result set
+ """
+ self._check_executed()
+ row = self._fetch_row()
+ if row:
+ return self._row_to_python(row, self.description)
+ return None
+
+ def fetchall(self):
+ """Returns all rows of a query result set
+ """
+ self._check_executed()
+ if not self._have_unread_result():
+ return []
+
+ (rows, eof) = self._connection.get_rows()
+ if self._nextrow[0]:
+ rows.insert(0, self._nextrow[0])
+ res = []
+ for row in rows:
+ res.append(self._row_to_python(row, self.description))
+ self._handle_eof(eof)
+ rowcount = len(rows)
+ if rowcount >= 0 and self._rowcount == -1:
+ self._rowcount = 0
+ self._rowcount += rowcount
+ return res
+
+
+class MySQLCursorNamedTuple(MySQLCursor):
+ """
+ Cursor fetching rows as named tuple.
+
+ The fetch methods of this class will return namedtuples instead of tuples.
+ Each row is returned as a namedtuple and the values can be accessed as:
+ row.col1, row.col2
+ """
+ def _row_to_python(self, rowdata, desc=None):
+ """Convert a MySQL text result row to Python types
+
+ Returns a named tuple.
+ """
+ row = rowdata
+
+ if row:
+ # pylint: disable=W0201
+ columns = tuple(self.column_names)
+ try:
+ named_tuple = NAMED_TUPLE_CACHE[columns]
+ except KeyError:
+ named_tuple = namedtuple('Row', columns)
+ NAMED_TUPLE_CACHE[columns] = named_tuple
+ # pylint: enable=W0201
+ return named_tuple(*row)
+ return None
+
+ def fetchone(self):
+ """Returns next row of a query result set
+ """
+ self._check_executed()
+ row = self._fetch_row()
+ if row:
+ if hasattr(self._connection, 'converter'):
+ return self._row_to_python(row, self.description)
+ return row
+ return None
+
+ def fetchall(self):
+ """Returns all rows of a query result set
+ """
+ self._check_executed()
+ if not self._have_unread_result():
+ return []
+
+ (rows, eof) = self._connection.get_rows()
+ if self._nextrow[0]:
+ rows.insert(0, self._nextrow[0])
+ res = [self._row_to_python(row, self.description)
+ for row in rows]
+
+ self._handle_eof(eof)
+ rowcount = len(rows)
+ if rowcount >= 0 and self._rowcount == -1:
+ self._rowcount = 0
+ self._rowcount += rowcount
+ return res
+
+
+class MySQLCursorBufferedDict(MySQLCursorDict, MySQLCursorBuffered):
+ """
+ Buffered Cursor fetching rows as dictionaries.
+ """
+ def fetchone(self):
+ """Returns next row of a query result set
+ """
+ self._check_executed()
+ row = self._fetch_row()
+ if row:
+ return self._row_to_python(row, self.description)
+ return None
+
+ def fetchall(self):
+ """Returns all rows of a query result set
+ """
+ if self._executed is None or self._rows is None:
+ raise errors.InterfaceError(ERR_NO_RESULT_TO_FETCH)
+ res = []
+ for row in self._rows[self._next_row:]:
+ res.append(self._row_to_python(
+ row, self.description))
+ self._next_row = len(self._rows)
+ return res
+
+
+class MySQLCursorBufferedNamedTuple(MySQLCursorNamedTuple, MySQLCursorBuffered):
+ """
+ Buffered Cursor fetching rows as named tuple.
+ """
+ def fetchone(self):
+ """Returns next row of a query result set
+ """
+ self._check_executed()
+ row = self._fetch_row()
+ if row:
+ return self._row_to_python(row, self.description)
+ return None
+
+ def fetchall(self):
+ """Returns all rows of a query result set
+ """
+ if self._executed is None or self._rows is None:
+ raise errors.InterfaceError(ERR_NO_RESULT_TO_FETCH)
+ res = []
+ for row in self._rows[self._next_row:]:
+ res.append(self._row_to_python(
+ row, self.description))
+ self._next_row = len(self._rows)
+ return res
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/cursor_cext.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/cursor_cext.py
new file mode 100644
index 0000000000000000000000000000000000000000..faab1fe2cdc7f651b0aa16e5d9b11d5c4991baac
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/cursor_cext.py
@@ -0,0 +1,1046 @@
+# Copyright (c) 2014, 2021, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Cursor classes using the C Extension
+"""
+
+from collections import namedtuple
+import re
+import weakref
+
+from _mysql_connector import MySQLInterfaceError # pylint: disable=F0401,E0611
+
+from .abstracts import (MySQLConnectionAbstract, MySQLCursorAbstract,
+ NAMED_TUPLE_CACHE)
+from . import errors
+from .errorcode import CR_NO_RESULT_SET
+
+from .cursor import (
+ RE_PY_PARAM, RE_SQL_INSERT_STMT,
+ RE_SQL_ON_DUPLICATE, RE_SQL_COMMENT, RE_SQL_INSERT_VALUES,
+ RE_SQL_SPLIT_STMTS, RE_SQL_FIND_PARAM
+)
+
+ERR_NO_RESULT_TO_FETCH = "No result set to fetch from"
+
+
+class _ParamSubstitutor(object):
+
+ """
+ Substitutes parameters into SQL statement.
+ """
+
+ def __init__(self, params):
+ self.params = params
+ self.index = 0
+
+ def __call__(self, matchobj):
+ index = self.index
+ self.index += 1
+ try:
+ return self.params[index]
+ except IndexError:
+ raise errors.ProgrammingError(
+ "Not enough parameters for the SQL statement")
+
+ @property
+ def remaining(self):
+ """Returns number of parameters remaining to be substituted"""
+ return len(self.params) - self.index
+
+
+class CMySQLCursor(MySQLCursorAbstract):
+
+ """Default cursor for interacting with MySQL using C Extension"""
+
+ _raw = False
+ _buffered = False
+ _raw_as_string = False
+
+ def __init__(self, connection):
+ """Initialize"""
+ MySQLCursorAbstract.__init__(self)
+
+ self._insert_id = 0
+ self._warning_count = 0
+ self._warnings = None
+ self._affected_rows = -1
+ self._rowcount = -1
+ self._nextrow = (None, None)
+ self._executed = None
+ self._executed_list = []
+ self._stored_results = []
+
+ if not isinstance(connection, MySQLConnectionAbstract):
+ raise errors.InterfaceError(errno=2048)
+ self._cnx = weakref.proxy(connection)
+
+ def reset(self, free=True):
+ """Reset the cursor
+
+ When free is True (default) the result will be freed.
+ """
+ self._rowcount = -1
+ self._nextrow = None
+ self._affected_rows = -1
+ self._insert_id = 0
+ self._warning_count = 0
+ self._warnings = None
+ self._warnings = None
+ self._warning_count = 0
+ self._description = None
+ self._executed_list = []
+ if free and self._cnx:
+ self._cnx.free_result()
+ super(CMySQLCursor, self).reset()
+
+
+ def _check_executed(self):
+ """Check if the statement has been executed.
+
+ Raises an error if the statement has not been executed.
+ """
+ if self._executed is None:
+ raise errors.InterfaceError(ERR_NO_RESULT_TO_FETCH)
+
+ def _fetch_warnings(self):
+ """Fetch warnings
+
+ Fetch warnings doing a SHOW WARNINGS. Can be called after getting
+ the result.
+
+ Returns a result set or None when there were no warnings.
+
+ Raises errors.Error (or subclass) on errors.
+
+ Returns list of tuples or None.
+ """
+ warnings = []
+ try:
+ # force freeing result
+ self._cnx.consume_results()
+ _ = self._cnx.cmd_query("SHOW WARNINGS")
+ warnings = self._cnx.get_rows()[0]
+ self._cnx.consume_results()
+ except MySQLInterfaceError as exc:
+ raise errors.get_mysql_exception(msg=exc.msg, errno=exc.errno,
+ sqlstate=exc.sqlstate)
+ except Exception as err:
+ raise errors.InterfaceError(
+ "Failed getting warnings; {0}".format(str(err)))
+
+ if warnings:
+ return warnings
+
+ return None
+
+ def _handle_warnings(self):
+ """Handle possible warnings after all results are consumed"""
+ if self._cnx.get_warnings is True and self._warning_count:
+ self._warnings = self._fetch_warnings()
+
+ def _handle_result(self, result):
+ """Handles the result after statement execution"""
+ if 'columns' in result:
+ self._description = result['columns']
+ self._rowcount = 0
+ self._handle_resultset()
+ else:
+ self._insert_id = result['insert_id']
+ self._warning_count = result['warning_count']
+ self._affected_rows = result['affected_rows']
+ self._rowcount = -1
+ self._handle_warnings()
+ if self._cnx.raise_on_warnings is True and self._warnings:
+ raise errors.get_mysql_exception(*self._warnings[0][1:3])
+
+ def _handle_resultset(self):
+ """Handle a result set"""
+ pass
+
+ def _handle_eof(self):
+ """Handle end of reading the result
+
+ Raises an errors.Error on errors.
+ """
+ self._warning_count = self._cnx.warning_count
+ self._handle_warnings()
+ if self._cnx.raise_on_warnings is True and self._warnings:
+ raise errors.get_mysql_exception(*self._warnings[0][1:3])
+
+ if not self._cnx.more_results:
+ self._cnx.free_result()
+
+ def _execute_iter(self):
+ """Generator returns MySQLCursor objects for multiple statements
+
+ Deprecated: use nextset() method directly.
+
+ This method is only used when multiple statements are executed
+ by the execute() method. It uses zip() to make an iterator from the
+ given query_iter (result of MySQLConnection.cmd_query_iter()) and
+ the list of statements that were executed.
+ """
+ executed_list = RE_SQL_SPLIT_STMTS.split(self._executed)
+ i = 0
+ self._executed = executed_list[i]
+ yield self
+
+ while True:
+ try:
+ if not self.nextset():
+ raise StopIteration
+ except errors.InterfaceError as exc:
+ # Result without result set
+ if exc.errno != CR_NO_RESULT_SET:
+ raise
+ except StopIteration:
+ return
+ i += 1
+ try:
+ self._executed = executed_list[i].strip()
+ except IndexError:
+ self._executed = executed_list[0]
+ yield self
+ return
+
+ def execute(self, operation, params=(), multi=False):
+ """Execute given statement using given parameters
+
+ Deprecated: The multi argument is not needed and nextset() should
+ be used to handle multiple result sets.
+ """
+ if not operation:
+ return None
+
+ if not self._cnx or self._cnx.is_closed():
+ raise errors.ProgrammingError("Cursor is not connected", 2055)
+ self._cnx.handle_unread_result()
+
+ stmt = ''
+ self.reset()
+
+ try:
+ if isinstance(operation, str):
+ stmt = operation.encode(self._cnx.python_charset)
+ else:
+ stmt = operation
+ except (UnicodeDecodeError, UnicodeEncodeError) as err:
+ raise errors.ProgrammingError(str(err))
+
+ if params:
+ prepared = self._cnx.prepare_for_mysql(params)
+ if isinstance(prepared, dict):
+ for key, value in prepared.items():
+ stmt = stmt.replace("%({0})s".format(key).encode(), value)
+ elif isinstance(prepared, (list, tuple)):
+ psub = _ParamSubstitutor(prepared)
+ stmt = RE_PY_PARAM.sub(psub, stmt)
+ if psub.remaining != 0:
+ raise errors.ProgrammingError(
+ "Not all parameters were used in the SQL statement")
+
+ try:
+ result = self._cnx.cmd_query(stmt, raw=self._raw,
+ buffered=self._buffered,
+ raw_as_string=self._raw_as_string)
+ except MySQLInterfaceError as exc:
+ raise errors.get_mysql_exception(msg=exc.msg, errno=exc.errno,
+ sqlstate=exc.sqlstate)
+
+ self._executed = stmt
+ self._handle_result(result)
+
+ if multi:
+ return self._execute_iter()
+
+ return None
+
+ def _batch_insert(self, operation, seq_params):
+ """Implements multi row insert"""
+ def remove_comments(match):
+ """Remove comments from INSERT statements.
+
+ This function is used while removing comments from INSERT
+ statements. If the matched string is a comment not enclosed
+ by quotes, it returns an empty string, else the string itself.
+ """
+ if match.group(1):
+ return ""
+ return match.group(2)
+
+ tmp = re.sub(RE_SQL_ON_DUPLICATE, '',
+ re.sub(RE_SQL_COMMENT, remove_comments, operation))
+
+ matches = re.search(RE_SQL_INSERT_VALUES, tmp)
+ if not matches:
+ raise errors.InterfaceError(
+ "Failed rewriting statement for multi-row INSERT. "
+ "Check SQL syntax."
+ )
+ fmt = matches.group(1).encode(self._cnx.python_charset)
+ values = []
+
+ try:
+ stmt = operation.encode(self._cnx.python_charset)
+ for params in seq_params:
+ tmp = fmt
+ prepared = self._cnx.prepare_for_mysql(params)
+ if isinstance(prepared, dict):
+ for key, value in prepared.items():
+ tmp = tmp.replace("%({0})s".format(key).encode(), value)
+ elif isinstance(prepared, (list, tuple)):
+ psub = _ParamSubstitutor(prepared)
+ tmp = RE_PY_PARAM.sub(psub, tmp)
+ if psub.remaining != 0:
+ raise errors.ProgrammingError(
+ "Not all parameters were used in the SQL statement")
+ values.append(tmp)
+
+ if fmt in stmt:
+ stmt = stmt.replace(fmt, b','.join(values), 1)
+ self._executed = stmt
+ return stmt
+ return None
+ except (UnicodeDecodeError, UnicodeEncodeError) as err:
+ raise errors.ProgrammingError(str(err))
+ except Exception as err:
+ raise errors.InterfaceError(
+ "Failed executing the operation; %s" % err)
+
+
+ def executemany(self, operation, seq_params):
+ """Execute the given operation multiple times"""
+ if not operation or not seq_params:
+ return None
+
+ if not self._cnx:
+ raise errors.ProgrammingError("Cursor is not connected")
+ self._cnx.handle_unread_result()
+
+ if not isinstance(seq_params, (list, tuple)):
+ raise errors.ProgrammingError(
+ "Parameters for query must be list or tuple.")
+
+ # Optimize INSERTs by batching them
+ if re.match(RE_SQL_INSERT_STMT, operation):
+ if not seq_params:
+ self._rowcount = 0
+ return None
+ stmt = self._batch_insert(operation, seq_params)
+ if stmt is not None:
+ self._executed = stmt
+ return self.execute(stmt)
+
+ rowcnt = 0
+ try:
+ for params in seq_params:
+ self.execute(operation, params)
+ try:
+ while True:
+ if self._description:
+ rowcnt += len(self._cnx.get_rows()[0])
+ else:
+ rowcnt += self._affected_rows
+ if not self.nextset():
+ break
+ except StopIteration:
+ # No more results
+ pass
+
+ except (ValueError, TypeError) as err:
+ raise errors.ProgrammingError(
+ "Failed executing the operation; {0}".format(err))
+
+ self._rowcount = rowcnt
+ return None
+
+ @property
+ def description(self):
+ """Returns description of columns in a result"""
+ return self._description
+
+ @property
+ def rowcount(self):
+ """Returns the number of rows produced or affected"""
+ if self._rowcount == -1:
+ return self._affected_rows
+ return self._rowcount
+
+ @property
+ def lastrowid(self):
+ """Returns the value generated for an AUTO_INCREMENT column"""
+ return self._insert_id
+
+ def close(self):
+ """Close the cursor
+
+ The result will be freed.
+ """
+ if not self._cnx:
+ return False
+
+ self._cnx.handle_unread_result()
+ self._warnings = None
+ self._cnx = None
+ return True
+
+ def callproc(self, procname, args=()):
+ """Calls a stored procedure with the given arguments"""
+ if not procname or not isinstance(procname, str):
+ raise ValueError("procname must be a string")
+
+ if not isinstance(args, (tuple, list)):
+ raise ValueError("args must be a sequence")
+
+ argfmt = "@_{name}_arg{index}"
+ self._stored_results = []
+
+ try:
+ argnames = []
+ argtypes = []
+ if args:
+ for idx, arg in enumerate(args):
+ argname = argfmt.format(name=procname, index=idx + 1)
+ argnames.append(argname)
+ if isinstance(arg, tuple):
+ argtypes.append(" CAST({0} AS {1})".format(argname,
+ arg[1]))
+ self.execute("SET {0}=%s".format(argname), (arg[0],))
+ else:
+ argtypes.append(argname)
+ self.execute("SET {0}=%s".format(argname), (arg,))
+
+ call = "CALL {0}({1})".format(procname, ','.join(argnames))
+
+ result = self._cnx.cmd_query(call, raw=self._raw,
+ raw_as_string=self._raw_as_string)
+
+ results = []
+ while self._cnx.result_set_available:
+ result = self._cnx.fetch_eof_columns()
+ # pylint: disable=W0212
+ if isinstance(self, (CMySQLCursorDict,
+ CMySQLCursorBufferedDict)):
+ cursor_class = CMySQLCursorBufferedDict
+ elif isinstance(self, (CMySQLCursorNamedTuple,
+ CMySQLCursorBufferedNamedTuple)):
+ cursor_class = CMySQLCursorBufferedNamedTuple
+ elif self._raw:
+ cursor_class = CMySQLCursorBufferedRaw
+ else:
+ cursor_class = CMySQLCursorBuffered
+ cur = cursor_class(self._cnx._get_self())
+ cur._executed = "(a result of {0})".format(call)
+ cur._handle_result(result)
+ # pylint: enable=W0212
+ results.append(cur)
+ self._cnx.next_result()
+ self._stored_results = results
+ self._handle_eof()
+
+ if argnames:
+ self.reset()
+ # Create names aliases to be compatible with namedtuples
+ args = [
+ "{} AS {}".format(name, alias) for name, alias in
+ zip(argtypes, [arg.lstrip("@_") for arg in argnames])
+ ]
+ select = "SELECT {}".format(",".join(args))
+ self.execute(select)
+
+ return self.fetchone()
+ return tuple()
+
+ except errors.Error:
+ raise
+ except Exception as err:
+ raise errors.InterfaceError(
+ "Failed calling stored routine; {0}".format(err))
+
+ def nextset(self):
+ """Skip to the next available result set"""
+ if not self._cnx.next_result():
+ self.reset(free=True)
+ return None
+ self.reset(free=False)
+
+ if not self._cnx.result_set_available:
+ eof = self._cnx.fetch_eof_status()
+ self._handle_result(eof)
+ raise errors.InterfaceError(errno=CR_NO_RESULT_SET)
+
+ self._handle_result(self._cnx.fetch_eof_columns())
+ return True
+
+ def fetchall(self):
+ """Returns all rows of a query result set
+
+ Returns a list of tuples.
+ """
+ self._check_executed()
+ if not self._cnx.unread_result:
+ return []
+
+ rows = self._cnx.get_rows()
+ if self._nextrow and self._nextrow[0]:
+ rows[0].insert(0, self._nextrow[0])
+
+ if not rows[0]:
+ self._handle_eof()
+ return []
+
+ self._rowcount += len(rows[0])
+ self._handle_eof()
+ #self._cnx.handle_unread_result()
+ return rows[0]
+
+ def fetchmany(self, size=1):
+ """Returns the next set of rows of a result set"""
+ self._check_executed()
+ if self._nextrow and self._nextrow[0]:
+ rows = [self._nextrow[0]]
+ size -= 1
+ else:
+ rows = []
+
+ if size and self._cnx.unread_result:
+ rows.extend(self._cnx.get_rows(size)[0])
+
+ if size:
+ if self._cnx.unread_result:
+ self._nextrow = self._cnx.get_row()
+ if self._nextrow and not self._nextrow[0] and \
+ not self._cnx.more_results:
+ self._cnx.free_result()
+ else:
+ self._nextrow = (None, None)
+
+ if not rows:
+ self._handle_eof()
+ return []
+
+ self._rowcount += len(rows)
+ return rows
+
+ def fetchone(self):
+ """Returns next row of a query result set"""
+ self._check_executed()
+ row = self._nextrow
+ if not row and self._cnx.unread_result:
+ row = self._cnx.get_row()
+
+ if row and row[0]:
+ self._nextrow = self._cnx.get_row()
+ if not self._nextrow[0] and not self._cnx.more_results:
+ self._cnx.free_result()
+ else:
+ self._handle_eof()
+ return None
+ self._rowcount += 1
+ return row[0]
+
+ def __iter__(self):
+ """Iteration over the result set
+
+ Iteration over the result set which calls self.fetchone()
+ and returns the next row.
+ """
+ return iter(self.fetchone, None)
+
+ def stored_results(self):
+ """Returns an iterator for stored results
+
+ This method returns an iterator over results which are stored when
+ callproc() is called. The iterator will provide MySQLCursorBuffered
+ instances.
+
+ Returns a iterator.
+ """
+ for i in range(len(self._stored_results)):
+ yield self._stored_results[i]
+
+ self._stored_results = []
+
+ def __next__(self):
+ """Iteration over the result set
+ Used for iterating over the result set. Calls self.fetchone()
+ to get the next row.
+
+ Raises StopIteration when no more rows are available.
+ """
+ try:
+ row = self.fetchone()
+ except errors.InterfaceError:
+ raise StopIteration
+ if not row:
+ raise StopIteration
+ return row
+
+ @property
+ def column_names(self):
+ """Returns column names
+
+ This property returns the columns names as a tuple.
+
+ Returns a tuple.
+ """
+ if not self.description:
+ return ()
+ return tuple([d[0] for d in self.description])
+
+ @property
+ def statement(self):
+ """Returns the executed statement
+
+ This property returns the executed statement. When multiple
+ statements were executed, the current statement in the iterator
+ will be returned.
+ """
+ try:
+ return self._executed.strip().decode('utf8')
+ except AttributeError:
+ return self._executed.strip()
+
+ @property
+ def with_rows(self):
+ """Returns whether the cursor could have rows returned
+
+ This property returns True when column descriptions are available
+ and possibly also rows, which will need to be fetched.
+
+ Returns True or False.
+ """
+ if self.description:
+ return True
+ return False
+
+ def __str__(self):
+ fmt = "{class_name}: {stmt}"
+ if self._executed:
+ try:
+ executed = self._executed.decode('utf-8')
+ except AttributeError:
+ executed = self._executed
+ if len(executed) > 40:
+ executed = executed[:40] + '..'
+ else:
+ executed = '(Nothing executed yet)'
+
+ return fmt.format(class_name=self.__class__.__name__, stmt=executed)
+
+
+class CMySQLCursorBuffered(CMySQLCursor):
+
+ """Cursor using C Extension buffering results"""
+
+ def __init__(self, connection):
+ """Initialize"""
+ super(CMySQLCursorBuffered, self).__init__(connection)
+
+ self._rows = None
+ self._next_row = 0
+
+ def _handle_resultset(self):
+ """Handle a result set"""
+ self._rows = self._cnx.get_rows()[0]
+ self._next_row = 0
+ self._rowcount = len(self._rows)
+ self._handle_eof()
+
+ def reset(self, free=True):
+ """Reset the cursor to default"""
+ self._rows = None
+ self._next_row = 0
+ super(CMySQLCursorBuffered, self).reset(free=free)
+
+ def _fetch_row(self):
+ """Returns the next row in the result set
+
+ Returns a tuple or None.
+ """
+ row = None
+ try:
+ row = self._rows[self._next_row]
+ except IndexError:
+ return None
+ else:
+ self._next_row += 1
+
+ return row
+
+ def fetchall(self):
+ self._check_executed()
+ res = self._rows[self._next_row:]
+ self._next_row = len(self._rows)
+ return res
+
+ def fetchmany(self, size=1):
+ self._check_executed()
+ res = []
+ cnt = size or self.arraysize
+ while cnt > 0:
+ cnt -= 1
+ row = self._fetch_row()
+ if row:
+ res.append(row)
+ else:
+ break
+ return res
+
+ def fetchone(self):
+ self._check_executed()
+ return self._fetch_row()
+
+
+class CMySQLCursorRaw(CMySQLCursor):
+
+ """Cursor using C Extension return raw results"""
+
+ _raw = True
+
+
+class CMySQLCursorBufferedRaw(CMySQLCursorBuffered):
+
+ """Cursor using C Extension buffering raw results"""
+
+ _raw = True
+
+
+class CMySQLCursorDict(CMySQLCursor):
+
+ """Cursor using C Extension returning rows as dictionaries"""
+
+ _raw = False
+
+ def fetchone(self):
+ """Returns all rows of a query result set
+ """
+ row = super(CMySQLCursorDict, self).fetchone()
+ if row:
+ return dict(zip(self.column_names, row))
+ return None
+
+ def fetchmany(self, size=1):
+ """Returns next set of rows as list of dictionaries"""
+ res = super(CMySQLCursorDict, self).fetchmany(size=size)
+ return [dict(zip(self.column_names, row)) for row in res]
+
+ def fetchall(self):
+ """Returns all rows of a query result set as list of dictionaries"""
+ res = super(CMySQLCursorDict, self).fetchall()
+ return [dict(zip(self.column_names, row)) for row in res]
+
+
+class CMySQLCursorBufferedDict(CMySQLCursorBuffered):
+
+ """Cursor using C Extension buffering and returning rows as dictionaries"""
+
+ _raw = False
+
+ def _fetch_row(self):
+ row = super(CMySQLCursorBufferedDict, self)._fetch_row()
+ if row:
+ return dict(zip(self.column_names, row))
+ return None
+
+ def fetchall(self):
+ res = super(CMySQLCursorBufferedDict, self).fetchall()
+ return [dict(zip(self.column_names, row)) for row in res]
+
+
+class CMySQLCursorNamedTuple(CMySQLCursor):
+
+ """Cursor using C Extension returning rows as named tuples"""
+
+ def _handle_resultset(self):
+ """Handle a result set"""
+ super(CMySQLCursorNamedTuple, self)._handle_resultset()
+ # pylint: disable=W0201
+ columns = tuple(self.column_names)
+ try:
+ self.named_tuple = NAMED_TUPLE_CACHE[columns]
+ except KeyError:
+ self.named_tuple = namedtuple('Row', columns)
+ NAMED_TUPLE_CACHE[columns] = self.named_tuple
+ # pylint: enable=W0201
+
+ def fetchone(self):
+ """Returns all rows of a query result set
+ """
+ row = super(CMySQLCursorNamedTuple, self).fetchone()
+ if row:
+ return self.named_tuple(*row)
+ return None
+
+ def fetchmany(self, size=1):
+ """Returns next set of rows as list of named tuples"""
+ res = super(CMySQLCursorNamedTuple, self).fetchmany(size=size)
+ if not res:
+ return []
+ return [self.named_tuple(*res[0])]
+
+ def fetchall(self):
+ """Returns all rows of a query result set as list of named tuples"""
+ res = super(CMySQLCursorNamedTuple, self).fetchall()
+ return [self.named_tuple(*row) for row in res]
+
+
+class CMySQLCursorBufferedNamedTuple(CMySQLCursorBuffered):
+
+ """Cursor using C Extension buffering and returning rows as named tuples"""
+
+ def _handle_resultset(self):
+ super(CMySQLCursorBufferedNamedTuple, self)._handle_resultset()
+ # pylint: disable=W0201
+ self.named_tuple = namedtuple('Row', self.column_names)
+ # pylint: enable=W0201
+
+ def _fetch_row(self):
+ row = super(CMySQLCursorBufferedNamedTuple, self)._fetch_row()
+ if row:
+ return self.named_tuple(*row)
+ return None
+
+ def fetchall(self):
+ res = super(CMySQLCursorBufferedNamedTuple, self).fetchall()
+ return [self.named_tuple(*row) for row in res]
+
+
+class CMySQLCursorPrepared(CMySQLCursor):
+
+ """Cursor using MySQL Prepared Statements"""
+
+ def __init__(self, connection):
+ super(CMySQLCursorPrepared, self).__init__(connection)
+ self._rows = None
+ self._rowcount = 0
+ self._next_row = 0
+ self._binary = True
+ self._stmt = None
+
+ def _handle_eof(self):
+ """Handle EOF packet"""
+ self._nextrow = (None, None)
+ self._handle_warnings()
+ if self._cnx.raise_on_warnings is True and self._warnings:
+ raise errors.get_mysql_exception(
+ self._warnings[0][1], self._warnings[0][2])
+
+ def _fetch_row(self, raw=False):
+ """Returns the next row in the result set
+
+ Returns a tuple or None.
+ """
+ if not self._stmt or not self._stmt.have_result_set:
+ return None
+ row = None
+
+ if self._nextrow == (None, None):
+ (row, eof) = self._cnx.get_row(
+ binary=self._binary, columns=self.description, raw=raw,
+ prep_stmt=self._stmt)
+ else:
+ (row, eof) = self._nextrow
+
+ if row:
+ self._nextrow = self._cnx.get_row(
+ binary=self._binary, columns=self.description, raw=raw,
+ prep_stmt=self._stmt)
+ eof = self._nextrow[1]
+ if eof is not None:
+ self._warning_count = eof["warning_count"]
+ self._handle_eof()
+ if self._rowcount == -1:
+ self._rowcount = 1
+ else:
+ self._rowcount += 1
+ if eof:
+ self._warning_count = eof["warning_count"]
+ self._handle_eof()
+
+ return row
+
+ def callproc(self, procname, args=None):
+ """Calls a stored procedue
+
+ Not supported with CMySQLCursorPrepared.
+ """
+ raise errors.NotSupportedError()
+
+ def close(self):
+ """Close the cursor
+
+ This method will try to deallocate the prepared statement and close
+ the cursor.
+ """
+ if self._stmt:
+ self.reset()
+ self._cnx.cmd_stmt_close(self._stmt)
+ self._stmt = None
+ super(CMySQLCursorPrepared, self).close()
+
+ def reset(self, free=True):
+ """Resets the prepared statement."""
+ if self._stmt:
+ self._cnx.cmd_stmt_reset(self._stmt)
+ super(CMySQLCursorPrepared, self).reset(free=free)
+
+ def execute(self, operation, params=None, multi=False): # multi is unused
+ """Prepare and execute a MySQL Prepared Statement
+
+ This method will prepare the given operation and execute it using
+ the given parameters.
+
+ If the cursor instance already had a prepared statement, it is
+ first closed.
+ """
+ if not operation:
+ return
+
+ if not self._cnx or self._cnx.is_closed():
+ raise errors.ProgrammingError("Cursor is not connected", 2055)
+
+ self._cnx.handle_unread_result(prepared=True)
+
+ if operation is not self._executed:
+ if self._stmt:
+ self._cnx.cmd_stmt_close(self._stmt)
+
+ self._executed = operation
+
+ try:
+ if not isinstance(operation, bytes):
+ charset = self._cnx.charset
+ if charset == "utf8mb4":
+ charset = "utf8"
+ operation = operation.encode(charset)
+ except (UnicodeDecodeError, UnicodeEncodeError) as err:
+ raise errors.ProgrammingError(str(err))
+
+ # need to convert %s to ? before sending it to MySQL
+ if b"%s" in operation:
+ operation = re.sub(RE_SQL_FIND_PARAM, b"?", operation)
+
+ try:
+ self._stmt = self._cnx.cmd_stmt_prepare(operation)
+ except errors.Error:
+ self._executed = None
+ self._stmt = None
+ raise
+
+ self._cnx.cmd_stmt_reset(self._stmt)
+
+ if self._stmt.param_count > 0 and not params:
+ return
+ elif params:
+ if not isinstance(params, (tuple, list)):
+ raise errors.ProgrammingError(
+ errno=1210,
+ msg=f"Incorrect type of argument: {type(params).__name__}({params})"
+ ", it must be of type tuple or list the argument given to "
+ "the prepared statement")
+ if self._stmt.param_count != len(params):
+ raise errors.ProgrammingError(
+ errno=1210,
+ msg="Incorrect number of arguments executing prepared "
+ "statement")
+
+ if params is None:
+ params = ()
+ res = self._cnx.cmd_stmt_execute(self._stmt, *params)
+ if res:
+ self._handle_result(res)
+
+ def executemany(self, operation, seq_params):
+ """Prepare and execute a MySQL Prepared Statement many times
+
+ This method will prepare the given operation and execute with each
+ tuple found the list seq_params.
+
+ If the cursor instance already had a prepared statement, it is
+ first closed.
+ """
+ rowcnt = 0
+ try:
+ for params in seq_params:
+ self.execute(operation, params)
+ if self.with_rows:
+ self.fetchall()
+ rowcnt += self._rowcount
+ except (ValueError, TypeError) as err:
+ raise errors.InterfaceError(
+ "Failed executing the operation; {error}".format(error=err))
+ except:
+ # Raise whatever execute() raises
+ raise
+ self._rowcount = rowcnt
+
+ def fetchone(self):
+ """Returns next row of a query result set
+
+ Returns a tuple or None.
+ """
+ self._check_executed()
+ return self._fetch_row() or None
+
+ def fetchmany(self, size=None):
+ """Returns the next set of rows of a result set
+
+ Returns a list of tuples.
+ """
+ self._check_executed()
+ res = []
+ cnt = size or self.arraysize
+ while cnt > 0 and self._stmt.have_result_set:
+ cnt -= 1
+ row = self._fetch_row()
+ if row:
+ res.append(row)
+ return res
+
+ def fetchall(self):
+ """Returns all rows of a query result set
+
+ Returns a list of tuples.
+ """
+ self._check_executed()
+ if not self._stmt.have_result_set:
+ return []
+
+ rows = self._cnx.get_rows(prep_stmt=self._stmt)
+ if self._nextrow and self._nextrow[0]:
+ rows[0].insert(0, self._nextrow[0])
+
+ if not rows[0]:
+ self._handle_eof()
+ return []
+
+ self._rowcount += len(rows[0])
+ self._handle_eof()
+ return rows[0]
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/custom_types.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/custom_types.py
new file mode 100644
index 0000000000000000000000000000000000000000..3613af6935ec1b93b24a9d70b420a2cdcd826261
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/custom_types.py
@@ -0,0 +1,50 @@
+# Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Custom Python types used by MySQL Connector/Python"""
+
+
+import sys
+
+
+class HexLiteral(str):
+
+ """Class holding MySQL hex literals"""
+
+ def __new__(cls, str_, charset='utf8'):
+ if sys.version_info[0] == 2:
+ hexed = ["%02x" % ord(i) for i in str_.encode(charset)]
+ else:
+ hexed = ["%02x" % i for i in str_.encode(charset)]
+ obj = str.__new__(cls, ''.join(hexed))
+ obj.charset = charset
+ obj.original = str_
+ return obj
+
+ def __str__(self):
+ return '0x' + self
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/dbapi.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/dbapi.py
new file mode 100644
index 0000000000000000000000000000000000000000..873cfbb8c009290124d202cd1f3ff6cfcf888283
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/dbapi.py
@@ -0,0 +1,80 @@
+# Copyright (c) 2009, 2017, Oracle and/or its affiliates. All rights reserved.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""
+This module implements some constructors and singletons as required by the
+DB API v2.0 (PEP-249).
+"""
+
+# Python Db API v2
+apilevel = '2.0'
+threadsafety = 1
+paramstyle = 'pyformat'
+
+import time
+import datetime
+
+from . import constants
+
+class _DBAPITypeObject(object):
+
+ def __init__(self, *values):
+ self.values = values
+
+ def __eq__(self, other):
+ if other in self.values:
+ return True
+ else:
+ return False
+
+ def __ne__(self, other):
+ if other in self.values:
+ return False
+ else:
+ return True
+
+Date = datetime.date
+Time = datetime.time
+Timestamp = datetime.datetime
+
+def DateFromTicks(ticks):
+ return Date(*time.localtime(ticks)[:3])
+
+def TimeFromTicks(ticks):
+ return Time(*time.localtime(ticks)[3:6])
+
+def TimestampFromTicks(ticks):
+ return Timestamp(*time.localtime(ticks)[:6])
+
+Binary = bytes
+
+STRING = _DBAPITypeObject(*constants.FieldType.get_string_types())
+BINARY = _DBAPITypeObject(*constants.FieldType.get_binary_types())
+NUMBER = _DBAPITypeObject(*constants.FieldType.get_number_types())
+DATETIME = _DBAPITypeObject(*constants.FieldType.get_timestamp_types())
+ROWID = _DBAPITypeObject()
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__init__.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2c9dc25c8364c9644c0f60d6354c92d68c36a593
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/base.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/base.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..395a68b0ec2130f7cd2582a7b6344c5607f1e62f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/base.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/client.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/client.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..dfe39bf3bdf4b4b116e175e5a68b81316ed20b55
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/client.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/compiler.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/compiler.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..df2244df2c6c97577040a8fdefb5ce2502fc40ad
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/compiler.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/creation.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/creation.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2c776892b5186995beeae1dfce5dca24fb5642e5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/creation.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/features.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/features.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cfa81dc729ba84a953f41c7b5e05121e2741b7de
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/features.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/introspection.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/introspection.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f0eb97d4cfcce1e608f55c7cd16494de374a36dc
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/introspection.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/operations.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/operations.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5d9425831b18756349a9ecbdce1d9de15f2f1519
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/operations.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/schema.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/schema.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f8f3d534c0a7d134e3205fb5d55e11dd50aeb969
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/schema.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/validation.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/validation.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ab5369d949fbe64d13371b11f93cabc31bfc1dba
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/__pycache__/validation.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/base.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..e0fe11ffd9ae09427c6e06299d517371975cd4c8
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/base.py
@@ -0,0 +1,538 @@
+# Copyright (c) 2020, 2021, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Django database Backend using MySQL Connector/Python.
+
+This Django database backend is heavily based on the MySQL backend from Django.
+
+Changes include:
+* Support for microseconds (MySQL 5.6.3 and later)
+* Using INFORMATION_SCHEMA where possible
+* Using new defaults for, for example SQL_AUTO_IS_NULL
+
+Requires and comes with MySQL Connector/Python v8.0.22 and later:
+ http://dev.mysql.com/downloads/connector/python/
+"""
+
+import warnings
+import sys
+
+from datetime import datetime
+
+from django.conf import settings
+from django.core.exceptions import ImproperlyConfigured
+from django.db import IntegrityError
+from django.db.backends.base.base import BaseDatabaseWrapper
+from django.db import utils
+from django.utils.functional import cached_property
+from django.utils import dateparse, timezone
+
+try:
+ import mysql.connector
+ from mysql.connector.conversion import MySQLConverter
+except ImportError as err:
+ raise ImproperlyConfigured(
+ "Error loading mysql.connector module: {0}".format(err))
+
+try:
+ from _mysql_connector import datetime_to_mysql, time_to_mysql
+except ImportError:
+ HAVE_CEXT = False
+else:
+ HAVE_CEXT = True
+
+from .client import DatabaseClient
+from .creation import DatabaseCreation
+from .introspection import DatabaseIntrospection
+from .validation import DatabaseValidation
+from .features import DatabaseFeatures
+from .operations import DatabaseOperations
+from .schema import DatabaseSchemaEditor
+
+
+Error = mysql.connector.Error
+DatabaseError = mysql.connector.DatabaseError
+NotSupportedError = mysql.connector.NotSupportedError
+OperationalError = mysql.connector.OperationalError
+ProgrammingError = mysql.connector.ProgrammingError
+
+
+def adapt_datetime_with_timezone_support(value):
+ # Equivalent to DateTimeField.get_db_prep_value. Used only by raw SQL.
+ if settings.USE_TZ:
+ if timezone.is_naive(value):
+ warnings.warn("MySQL received a naive datetime (%s)"
+ " while time zone support is active." % value,
+ RuntimeWarning)
+ default_timezone = timezone.get_default_timezone()
+ value = timezone.make_aware(value, default_timezone)
+ value = value.astimezone(timezone.utc).replace(tzinfo=None)
+ if HAVE_CEXT:
+ return datetime_to_mysql(value)
+ else:
+ return value.strftime("%Y-%m-%d %H:%M:%S.%f")
+
+
+class CursorWrapper:
+ """Wrapper around MySQL Connector/Python's cursor class.
+
+ The cursor class is defined by the options passed to MySQL
+ Connector/Python. If buffered option is True in those options,
+ MySQLCursorBuffered will be used.
+ """
+ codes_for_integrityerror = (
+ 1048, # Column cannot be null
+ 1690, # BIGINT UNSIGNED value is out of range
+ 3819, # CHECK constraint is violated
+ 4025, # CHECK constraint failed
+ )
+
+ def __init__(self, cursor):
+ self.cursor = cursor
+
+ def _adapt_execute_args_dict(self, args):
+ if not args:
+ return args
+ new_args = dict(args)
+ for key, value in args.items():
+ if isinstance(value, datetime):
+ new_args[key] = adapt_datetime_with_timezone_support(value)
+
+ return new_args
+
+ def _adapt_execute_args(self, args):
+ if not args:
+ return args
+ new_args = list(args)
+ for i, arg in enumerate(args):
+ if isinstance(arg, datetime):
+ new_args[i] = adapt_datetime_with_timezone_support(arg)
+
+ return tuple(new_args)
+
+ def execute(self, query, args=None):
+ """Executes the given operation
+
+ This wrapper method around the execute()-method of the cursor is
+ mainly needed to re-raise using different exceptions.
+ """
+ if isinstance(args, dict):
+ new_args = self._adapt_execute_args_dict(args)
+ else:
+ new_args = self._adapt_execute_args(args)
+ try:
+ return self.cursor.execute(query, new_args)
+ except mysql.connector.OperationalError as e:
+ if e.args[0] in self.codes_for_integrityerror:
+ raise IntegrityError(*tuple(e.args))
+ raise
+
+ def executemany(self, query, args):
+ """Executes the given operation
+
+ This wrapper method around the executemany()-method of the cursor is
+ mainly needed to re-raise using different exceptions.
+ """
+ try:
+ return self.cursor.executemany(query, args)
+ except mysql.connector.OperationalError as e:
+ if e.args[0] in self.codes_for_integrityerror:
+ raise IntegrityError(*tuple(e.args))
+ raise
+
+ def __getattr__(self, attr):
+ """Return attribute of wrapped cursor"""
+ return getattr(self.cursor, attr)
+
+ def __iter__(self):
+ """Returns iterator over wrapped cursor"""
+ return iter(self.cursor)
+
+
+class DatabaseWrapper(BaseDatabaseWrapper):
+ vendor = 'mysql'
+ # This dictionary maps Field objects to their associated MySQL column
+ # types, as strings. Column-type strings can contain format strings; they'll
+ # be interpolated against the values of Field.__dict__ before being output.
+ # If a column type is set to None, it won't be included in the output.
+ data_types = {
+ 'AutoField': 'integer AUTO_INCREMENT',
+ 'BigAutoField': 'bigint AUTO_INCREMENT',
+ 'BinaryField': 'longblob',
+ 'BooleanField': 'bool',
+ 'CharField': 'varchar(%(max_length)s)',
+ 'DateField': 'date',
+ 'DateTimeField': 'datetime(6)',
+ 'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)',
+ 'DurationField': 'bigint',
+ 'FileField': 'varchar(%(max_length)s)',
+ 'FilePathField': 'varchar(%(max_length)s)',
+ 'FloatField': 'double precision',
+ 'IntegerField': 'integer',
+ 'BigIntegerField': 'bigint',
+ 'IPAddressField': 'char(15)',
+ 'GenericIPAddressField': 'char(39)',
+ 'JSONField': 'json',
+ 'NullBooleanField': 'bool',
+ 'OneToOneField': 'integer',
+ 'PositiveBigIntegerField': 'bigint UNSIGNED',
+ 'PositiveIntegerField': 'integer UNSIGNED',
+ 'PositiveSmallIntegerField': 'smallint UNSIGNED',
+ 'SlugField': 'varchar(%(max_length)s)',
+ 'SmallAutoField': 'smallint AUTO_INCREMENT',
+ 'SmallIntegerField': 'smallint',
+ 'TextField': 'longtext',
+ 'TimeField': 'time(6)',
+ 'UUIDField': 'char(32)',
+ }
+
+ # For these data types:
+ # - MySQL < 8.0.13 doesn't accept default values and
+ # implicitly treat them as nullable
+ # - all versions of MySQL doesn't support full width database
+ # indexes
+ _limited_data_types = (
+ 'tinyblob', 'blob', 'mediumblob', 'longblob', 'tinytext', 'text',
+ 'mediumtext', 'longtext', 'json',
+ )
+
+ operators = {
+ 'exact': '= %s',
+ 'iexact': 'LIKE %s',
+ 'contains': 'LIKE BINARY %s',
+ 'icontains': 'LIKE %s',
+ 'regex': 'REGEXP BINARY %s',
+ 'iregex': 'REGEXP %s',
+ 'gt': '> %s',
+ 'gte': '>= %s',
+ 'lt': '< %s',
+ 'lte': '<= %s',
+ 'startswith': 'LIKE BINARY %s',
+ 'endswith': 'LIKE BINARY %s',
+ 'istartswith': 'LIKE %s',
+ 'iendswith': 'LIKE %s',
+ }
+
+ # The patterns below are used to generate SQL pattern lookup clauses when
+ # the right-hand side of the lookup isn't a raw string (it might be an expression
+ # or the result of a bilateral transformation).
+ # In those cases, special characters for LIKE operators (e.g. \, *, _) should be
+ # escaped on database side.
+ #
+ # Note: we use str.format() here for readability as '%' is used as a wildcard for
+ # the LIKE operator.
+ pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\\', '\\\\'), '%%', '\%%'), '_', '\_')"
+ pattern_ops = {
+ 'contains': "LIKE BINARY CONCAT('%%', {}, '%%')",
+ 'icontains': "LIKE CONCAT('%%', {}, '%%')",
+ 'startswith': "LIKE BINARY CONCAT({}, '%%')",
+ 'istartswith': "LIKE CONCAT({}, '%%')",
+ 'endswith': "LIKE BINARY CONCAT('%%', {})",
+ 'iendswith': "LIKE CONCAT('%%', {})",
+ }
+
+ isolation_levels = {
+ 'read uncommitted',
+ 'read committed',
+ 'repeatable read',
+ 'serializable',
+ }
+
+ Database = mysql.connector
+ SchemaEditorClass = DatabaseSchemaEditor
+ # Classes instantiated in __init__().
+ client_class = DatabaseClient
+ creation_class = DatabaseCreation
+ features_class = DatabaseFeatures
+ introspection_class = DatabaseIntrospection
+ ops_class = DatabaseOperations
+ validation_class = DatabaseValidation
+
+ def __init__(self, *args, **kwargs):
+ super(DatabaseWrapper, self).__init__(*args, **kwargs)
+
+ try:
+ self._use_pure = self.settings_dict['OPTIONS']['use_pure']
+ except KeyError:
+ self._use_pure = not HAVE_CEXT
+
+ self.converter = DjangoMySQLConverter()
+
+ def __getattr__(self, attr):
+ if attr.startswith("mysql_is"):
+ return False
+ raise AttributeError
+
+ def get_connection_params(self):
+ kwargs = {
+ 'charset': 'utf8',
+ 'use_unicode': True,
+ 'buffered': False,
+ 'consume_results': True,
+ }
+
+ settings_dict = self.settings_dict
+
+ if settings_dict['USER']:
+ kwargs['user'] = settings_dict['USER']
+ if settings_dict['NAME']:
+ kwargs['database'] = settings_dict['NAME']
+ if settings_dict['PASSWORD']:
+ kwargs['passwd'] = settings_dict['PASSWORD']
+ if settings_dict['HOST'].startswith('/'):
+ kwargs['unix_socket'] = settings_dict['HOST']
+ elif settings_dict['HOST']:
+ kwargs['host'] = settings_dict['HOST']
+ if settings_dict['PORT']:
+ kwargs['port'] = int(settings_dict['PORT'])
+
+ # Raise exceptions for database warnings if DEBUG is on
+ kwargs['raise_on_warnings'] = settings.DEBUG
+
+ kwargs['client_flags'] = [
+ # Need potentially affected rows on UPDATE
+ mysql.connector.constants.ClientFlag.FOUND_ROWS,
+ ]
+ try:
+ kwargs.update(settings_dict['OPTIONS'])
+ except KeyError:
+ # OPTIONS missing is OK
+ pass
+
+ return kwargs
+
+ def get_new_connection(self, conn_params):
+ if not 'converter_class' in conn_params:
+ conn_params['converter_class'] = DjangoMySQLConverter
+ cnx = mysql.connector.connect(**conn_params)
+
+ return cnx
+
+ def init_connection_state(self):
+ assignments = []
+ if self.features.is_sql_auto_is_null_enabled:
+ # SQL_AUTO_IS_NULL controls whether an AUTO_INCREMENT column on
+ # a recently inserted row will return when the field is tested
+ # for NULL. Disabling this brings this aspect of MySQL in line
+ # with SQL standards.
+ assignments.append('SET SQL_AUTO_IS_NULL = 0')
+
+ if assignments:
+ with self.cursor() as cursor:
+ cursor.execute('; '.join(assignments))
+
+ if 'AUTOCOMMIT' in self.settings_dict:
+ try:
+ self.set_autocommit(self.settings_dict['AUTOCOMMIT'])
+ except AttributeError:
+ self._set_autocommit(self.settings_dict['AUTOCOMMIT'])
+
+ def create_cursor(self, name=None):
+ cursor = self.connection.cursor()
+ return CursorWrapper(cursor)
+
+ def _rollback(self):
+ try:
+ BaseDatabaseWrapper._rollback(self)
+ except NotSupportedError:
+ pass
+
+ def _set_autocommit(self, autocommit):
+ with self.wrap_database_errors:
+ self.connection.autocommit = autocommit
+
+ def disable_constraint_checking(self):
+ """
+ Disable foreign key checks, primarily for use in adding rows with
+ forward references. Always return True to indicate constraint checks
+ need to be re-enabled.
+ """
+ with self.cursor() as cursor:
+ cursor.execute('SET foreign_key_checks=0')
+ return True
+
+ def enable_constraint_checking(self):
+ """
+ Re-enable foreign key checks after they have been disabled.
+ """
+ # Override needs_rollback in case constraint_checks_disabled is
+ # nested inside transaction.atomic.
+ self.needs_rollback, needs_rollback = False, self.needs_rollback
+ try:
+ with self.cursor() as cursor:
+ cursor.execute('SET foreign_key_checks=1')
+ finally:
+ self.needs_rollback = needs_rollback
+
+ def check_constraints(self, table_names=None):
+ """
+ Check each table name in `table_names` for rows with invalid foreign
+ key references. This method is intended to be used in conjunction with
+ `disable_constraint_checking()` and `enable_constraint_checking()`, to
+ determine if rows with invalid references were entered while constraint
+ checks were off.
+ """
+ with self.cursor() as cursor:
+ if table_names is None:
+ table_names = self.introspection.table_names(cursor)
+ for table_name in table_names:
+ primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name)
+ if not primary_key_column_name:
+ continue
+ key_columns = self.introspection.get_key_columns(cursor, table_name)
+ for column_name, referenced_table_name, referenced_column_name in key_columns:
+ cursor.execute(
+ """
+ SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING
+ LEFT JOIN `%s` as REFERRED
+ ON (REFERRING.`%s` = REFERRED.`%s`)
+ WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL
+ """ % (
+ primary_key_column_name, column_name, table_name,
+ referenced_table_name, column_name, referenced_column_name,
+ column_name, referenced_column_name,
+ )
+ )
+ for bad_row in cursor.fetchall():
+ raise IntegrityError(
+ "The row in table '%s' with primary key '%s' has an invalid "
+ "foreign key: %s.%s contains a value '%s' that does not "
+ "have a corresponding value in %s.%s."
+ % (
+ table_name, bad_row[0], table_name, column_name,
+ bad_row[1], referenced_table_name, referenced_column_name,
+ )
+ )
+
+ def is_usable(self):
+ try:
+ self.connection.ping()
+ except Error:
+ return False
+ else:
+ return True
+
+ @cached_property
+ def display_name(self):
+ return 'MySQL'
+
+ @cached_property
+ def data_type_check_constraints(self):
+ if self.features.supports_column_check_constraints:
+ check_constraints = {
+ 'PositiveBigIntegerField': '`%(column)s` >= 0',
+ 'PositiveIntegerField': '`%(column)s` >= 0',
+ 'PositiveSmallIntegerField': '`%(column)s` >= 0',
+ }
+ return check_constraints
+ return {}
+
+ @cached_property
+ def mysql_server_data(self):
+ with self.temporary_connection() as cursor:
+ # Select some server variables and test if the time zone
+ # definitions are installed. CONVERT_TZ returns NULL if 'UTC'
+ # timezone isn't loaded into the mysql.time_zone table.
+ cursor.execute("""
+ SELECT VERSION(),
+ @@sql_mode,
+ @@default_storage_engine,
+ @@sql_auto_is_null,
+ @@lower_case_table_names,
+ CONVERT_TZ('2001-01-01 01:00:00', 'UTC', 'UTC') IS NOT NULL
+ """)
+ row = cursor.fetchone()
+ return {
+ 'version': row[0],
+ 'sql_mode': row[1],
+ 'default_storage_engine': row[2],
+ 'sql_auto_is_null': bool(row[3]),
+ 'lower_case_table_names': bool(row[4]),
+ 'has_zoneinfo_database': bool(row[5]),
+ }
+
+ @cached_property
+ def mysql_server_info(self):
+ with self.temporary_connection() as cursor:
+ cursor.execute('SELECT VERSION()')
+ return cursor.fetchone()[0]
+
+ @cached_property
+ def mysql_version(self):
+ config = self.get_connection_params()
+ with mysql.connector.connect(**config) as conn:
+ server_version = conn.get_server_version()
+ return server_version
+
+ @cached_property
+ def sql_mode(self):
+ with self.cursor() as cursor:
+ cursor.execute('SELECT @@sql_mode')
+ sql_mode = cursor.fetchone()
+ return set(sql_mode[0].split(',') if sql_mode else ())
+
+ @property
+ def use_pure(self):
+ return self._use_pure
+
+
+class DjangoMySQLConverter(MySQLConverter):
+ """Custom converter for Django."""
+ def _TIME_to_python(self, value, dsc=None):
+ """Return MySQL TIME data type as datetime.time()
+
+ Returns datetime.time()
+ """
+ return dateparse.parse_time(value.decode('utf-8'))
+
+ def __DATETIME_to_python(self, value, dsc=None):
+ """Connector/Python always returns naive datetime.datetime
+
+ Connector/Python always returns naive timestamps since MySQL has
+ no time zone support. Since Django needs non-naive, we need to add
+ the UTC time zone.
+
+ Returns datetime.datetime()
+ """
+ if not value:
+ return None
+ dt = MySQLConverter._DATETIME_to_python(self, value)
+ if dt is None:
+ return None
+ if settings.USE_TZ and timezone.is_naive(dt):
+ dt = dt.replace(tzinfo=timezone.utc)
+ return dt
+
+ def _safestring_to_mysql(self, value):
+ return self._str_to_mysql(value)
+
+ def _safetext_to_mysql(self, value):
+ return self._str_to_mysql(value)
+
+ def _safebytes_to_mysql(self, value):
+ return self._bytes_to_mysql(value)
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/client.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/client.py
new file mode 100644
index 0000000000000000000000000000000000000000..8bf8fa81eb6e48d82e65a31b334aec72c1b95767
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/client.py
@@ -0,0 +1,79 @@
+# Copyright (c) 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+import subprocess
+
+from django.db.backends.base.client import BaseDatabaseClient
+
+
+class DatabaseClient(BaseDatabaseClient):
+ executable_name = 'mysql'
+
+ @classmethod
+ def settings_to_cmd_args(cls, settings_dict):
+ args = [cls.executable_name]
+
+ db = settings_dict['OPTIONS'].get('database', settings_dict['NAME'])
+ user = settings_dict['OPTIONS'].get('user',
+ settings_dict['USER'])
+ passwd = settings_dict['OPTIONS'].get('password',
+ settings_dict['PASSWORD'])
+ host = settings_dict['OPTIONS'].get('host', settings_dict['HOST'])
+ port = settings_dict['OPTIONS'].get('port', settings_dict['PORT'])
+ defaults_file = settings_dict['OPTIONS'].get('read_default_file')
+
+ # --defaults-file should always be the first option
+ if defaults_file:
+ args.append('--defaults-file={0}'.format(defaults_file))
+
+ # We force SQL_MODE to TRADITIONAL
+ args.append('--init-command=SET @@session.SQL_MODE=TRADITIONAL')
+
+ if user:
+ args.append('--user={0}'.format(user))
+ if passwd:
+ args.append('--password={0}'.format(passwd))
+
+ if host:
+ if '/' in host:
+ args.append('--socket={0}'.format(host))
+ else:
+ args.append('--host={0}'.format(host))
+
+ if port:
+ args.append('--port={0}'.format(port))
+
+ if db:
+ args.append('--database={0}'.format(db))
+
+ return args
+
+ def runshell(self):
+ args = DatabaseClient.settings_to_cmd_args(
+ self.connection.settings_dict)
+ subprocess.call(args)
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/compiler.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/compiler.py
new file mode 100644
index 0000000000000000000000000000000000000000..b5fc0cc0b843788071c0e8c7eaf3bbc8d376108d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/compiler.py
@@ -0,0 +1,35 @@
+# Copyright (c) 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+from django.db.backends.mysql.compiler import (
+ SQLCompiler,
+ SQLInsertCompiler,
+ SQLDeleteCompiler,
+ SQLUpdateCompiler,
+ SQLAggregateCompiler
+)
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/creation.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/creation.py
new file mode 100644
index 0000000000000000000000000000000000000000..8cc9d48e72b4b78ce779e136134b48e1b8a8200c
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/creation.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+from django.db.backends.mysql.creation import DatabaseCreation
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/features.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/features.py
new file mode 100644
index 0000000000000000000000000000000000000000..6a8c84e1a6209716c4d30205a7945d5e997d4b92
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/features.py
@@ -0,0 +1,44 @@
+# Copyright (c) 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+from django.db.backends.mysql.features import DatabaseFeatures as MySQLDatabaseFeatures
+from django.utils.functional import cached_property
+
+
+class DatabaseFeatures(MySQLDatabaseFeatures):
+ empty_fetchmany_value = []
+
+ @cached_property
+ def can_introspect_check_constraints(self):
+ return self.connection.mysql_version >= (8, 0, 16)
+
+ @cached_property
+ def supports_microsecond_precision(self):
+ if self.connection.mysql_version >= (5, 6, 3):
+ return True
+ return False
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/introspection.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/introspection.py
new file mode 100644
index 0000000000000000000000000000000000000000..450bb1dc8a11f34288ed1ee333100182c9903b81
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/introspection.py
@@ -0,0 +1,380 @@
+# Copyright (c) 2020, 2021, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+from collections import namedtuple
+
+import sqlparse
+
+from mysql.connector.constants import FieldType
+
+from django import VERSION as DJANGO_VERSION
+from django.db.backends.base.introspection import (
+ BaseDatabaseIntrospection, FieldInfo as BaseFieldInfo, TableInfo,
+)
+from django.db.models import Index
+from django.utils.datastructures import OrderedSet
+
+FieldInfo = namedtuple(
+ 'FieldInfo',
+ BaseFieldInfo._fields + ('extra', 'is_unsigned', 'has_json_constraint')
+)
+if DJANGO_VERSION < (3, 2, 0):
+ InfoLine = namedtuple(
+ 'InfoLine',
+ 'col_name data_type max_len num_prec num_scale extra column_default '
+ 'is_unsigned'
+ )
+else:
+ InfoLine = namedtuple(
+ 'InfoLine',
+ 'col_name data_type max_len num_prec num_scale extra column_default '
+ 'collation is_unsigned'
+ )
+
+
+class DatabaseIntrospection(BaseDatabaseIntrospection):
+
+ data_types_reverse = {
+ FieldType.BLOB: 'TextField',
+ FieldType.DECIMAL: 'DecimalField',
+ FieldType.NEWDECIMAL: 'DecimalField',
+ FieldType.DATE: 'DateField',
+ FieldType.DATETIME: 'DateTimeField',
+ FieldType.DOUBLE: 'FloatField',
+ FieldType.FLOAT: 'FloatField',
+ FieldType.INT24: 'IntegerField',
+ FieldType.LONG: 'IntegerField',
+ FieldType.LONGLONG: 'BigIntegerField',
+ FieldType.SHORT: 'SmallIntegerField',
+ FieldType.STRING: 'CharField',
+ FieldType.TIME: 'TimeField',
+ FieldType.TIMESTAMP: 'DateTimeField',
+ FieldType.TINY: 'IntegerField',
+ FieldType.TINY_BLOB: 'TextField',
+ FieldType.MEDIUM_BLOB: 'TextField',
+ FieldType.LONG_BLOB: 'TextField',
+ FieldType.VAR_STRING: 'CharField',
+ }
+
+ def get_field_type(self, data_type, description):
+ field_type = super().get_field_type(data_type, description)
+ if 'auto_increment' in description.extra:
+ if field_type == 'IntegerField':
+ return 'AutoField'
+ elif field_type == 'BigIntegerField':
+ return 'BigAutoField'
+ elif field_type == 'SmallIntegerField':
+ return 'SmallAutoField'
+ if description.is_unsigned:
+ if field_type == 'BigIntegerField':
+ return 'PositiveBigIntegerField'
+ elif field_type == 'IntegerField':
+ return 'PositiveIntegerField'
+ elif field_type == 'SmallIntegerField':
+ return 'PositiveSmallIntegerField'
+ # JSON data type is an alias for LONGTEXT in MariaDB, use check
+ # constraints clauses to introspect JSONField.
+ if description.has_json_constraint:
+ return 'JSONField'
+ return field_type
+
+ def get_table_list(self, cursor):
+ """Return a list of table and view names in the current database."""
+ cursor.execute("SHOW FULL TABLES")
+ return [TableInfo(row[0], {'BASE TABLE': 't', 'VIEW': 'v'}.get(row[1]))
+ for row in cursor.fetchall()]
+
+ def get_table_description(self, cursor, table_name):
+ """
+ Return a description of the table with the DB-API cursor.description
+ interface."
+ """
+ json_constraints = {}
+ # A default collation for the given table.
+ cursor.execute("""
+ SELECT table_collation
+ FROM information_schema.tables
+ WHERE table_schema = DATABASE()
+ AND table_name = %s
+ """, [table_name])
+ row = cursor.fetchone()
+ default_column_collation = row[0] if row else ''
+ # information_schema database gives more accurate results for some figures:
+ # - varchar length returned by cursor.description is an internal length,
+ # not visible length (#5725)
+ # - precision and scale (for decimal fields) (#5014)
+ # - auto_increment is not available in cursor.description
+ if DJANGO_VERSION < (3, 2, 0):
+ cursor.execute("""
+ SELECT
+ column_name, data_type, character_maximum_length,
+ numeric_precision, numeric_scale, extra, column_default,
+ CASE
+ WHEN column_type LIKE '%% unsigned' THEN 1
+ ELSE 0
+ END AS is_unsigned
+ FROM information_schema.columns
+ WHERE table_name = %s AND table_schema = DATABASE()
+ """, [table_name])
+ else:
+ cursor.execute("""
+ SELECT
+ column_name, data_type, character_maximum_length,
+ numeric_precision, numeric_scale, extra, column_default,
+ CASE
+ WHEN collation_name = %s THEN NULL
+ ELSE collation_name
+ END AS collation_name,
+ CASE
+ WHEN column_type LIKE '%% unsigned' THEN 1
+ ELSE 0
+ END AS is_unsigned
+ FROM information_schema.columns
+ WHERE table_name = %s AND table_schema = DATABASE()
+ """, [default_column_collation, table_name])
+ field_info = {line[0]: InfoLine(*line) for line in cursor.fetchall()}
+
+ cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name))
+
+ def to_int(i):
+ return int(i) if i is not None else i
+
+ fields = []
+ for line in cursor.description:
+ info = field_info[line[0]]
+ if DJANGO_VERSION < (3, 2, 0):
+ fields.append(FieldInfo(
+ *line[:3],
+ to_int(info.max_len) or line[3],
+ to_int(info.num_prec) or line[4],
+ to_int(info.num_scale) or line[5],
+ line[6],
+ info.column_default,
+ info.extra,
+ info.is_unsigned,
+ line[0] in json_constraints
+ ))
+ else:
+ fields.append(FieldInfo(
+ *line[:3],
+ to_int(info.max_len) or line[3],
+ to_int(info.num_prec) or line[4],
+ to_int(info.num_scale) or line[5],
+ line[6],
+ info.column_default,
+ info.collation,
+ info.extra,
+ info.is_unsigned,
+ line[0] in json_constraints,
+ ))
+ return fields
+
+ def get_indexes(self, cursor, table_name):
+ cursor.execute("SHOW INDEX FROM {0}"
+ "".format(self.connection.ops.quote_name(table_name)))
+ # Do a two-pass search for indexes: on first pass check which indexes
+ # are multicolumn, on second pass check which single-column indexes
+ # are present.
+ rows = list(cursor.fetchall())
+ multicol_indexes = set()
+ for row in rows:
+ if row[3] > 1:
+ multicol_indexes.add(row[2])
+ indexes = {}
+ for row in rows:
+ if row[2] in multicol_indexes:
+ continue
+ if row[4] not in indexes:
+ indexes[row[4]] = {'primary_key': False, 'unique': False}
+ # It's possible to have the unique and PK constraints in
+ # separate indexes.
+ if row[2] == 'PRIMARY':
+ indexes[row[4]]['primary_key'] = True
+ if not row[1]:
+ indexes[row[4]]['unique'] = True
+ return indexes
+
+ def get_primary_key_column(self, cursor, table_name):
+ """
+ Returns the name of the primary key column for the given table
+ """
+ for column in self.get_indexes(cursor, table_name).items():
+ if column[1]['primary_key']:
+ return column[0]
+ return None
+
+ def get_sequences(self, cursor, table_name, table_fields=()):
+ for field_info in self.get_table_description(cursor, table_name):
+ if 'auto_increment' in field_info.extra:
+ # MySQL allows only one auto-increment column per table.
+ return [{'table': table_name, 'column': field_info.name}]
+ return []
+
+ def get_relations(self, cursor, table_name):
+ """
+ Return a dictionary of {field_name: (field_name_other_table, other_table)}
+ representing all relationships to the given table.
+ """
+ constraints = self.get_key_columns(cursor, table_name)
+ relations = {}
+ for my_fieldname, other_table, other_field in constraints:
+ relations[my_fieldname] = (other_field, other_table)
+ return relations
+
+ def get_key_columns(self, cursor, table_name):
+ """
+ Return a list of (column_name, referenced_table_name, referenced_column_name)
+ for all key columns in the given table.
+ """
+ key_columns = []
+ cursor.execute("""
+ SELECT column_name, referenced_table_name, referenced_column_name
+ FROM information_schema.key_column_usage
+ WHERE table_name = %s
+ AND table_schema = DATABASE()
+ AND referenced_table_name IS NOT NULL
+ AND referenced_column_name IS NOT NULL""", [table_name])
+ key_columns.extend(cursor.fetchall())
+ return key_columns
+
+ def get_storage_engine(self, cursor, table_name):
+ """
+ Retrieve the storage engine for a given table. Return the default
+ storage engine if the table doesn't exist.
+ """
+ cursor.execute(
+ "SELECT engine "
+ "FROM information_schema.tables "
+ "WHERE table_name = %s", [table_name])
+ result = cursor.fetchone()
+ if not result:
+ return self.connection.features._mysql_storage_engine
+ return result[0]
+
+ def get_constraints(self, cursor, table_name):
+ """
+ Retrieve any constraints or keys (unique, pk, fk, check, index) across
+ one or more columns.
+ """
+ constraints = {}
+ # Get the actual constraint names and columns
+ name_query = """
+ SELECT kc.`constraint_name`, kc.`column_name`,
+ kc.`referenced_table_name`, kc.`referenced_column_name`
+ FROM information_schema.key_column_usage AS kc
+ WHERE
+ kc.table_schema = DATABASE() AND
+ kc.table_name = %s
+ ORDER BY kc.`ordinal_position`
+ """
+ cursor.execute(name_query, [table_name])
+ for constraint, column, ref_table, ref_column in cursor.fetchall():
+ if constraint not in constraints:
+ constraints[constraint] = {
+ 'columns': OrderedSet(),
+ 'primary_key': False,
+ 'unique': False,
+ 'index': False,
+ 'check': False,
+ 'foreign_key': (ref_table, ref_column) if ref_column else None,
+ }
+ if self.connection.features.supports_index_column_ordering:
+ constraints[constraint]['orders'] = []
+ constraints[constraint]['columns'].add(column)
+ # Now get the constraint types
+ type_query = """
+ SELECT c.constraint_name, c.constraint_type
+ FROM information_schema.table_constraints AS c
+ WHERE
+ c.table_schema = DATABASE() AND
+ c.table_name = %s
+ """
+ cursor.execute(type_query, [table_name])
+ for constraint, kind in cursor.fetchall():
+ if kind.lower() == "primary key":
+ constraints[constraint]['primary_key'] = True
+ constraints[constraint]['unique'] = True
+ elif kind.lower() == "unique":
+ constraints[constraint]['unique'] = True
+ # Add check constraints.
+ if self.connection.features.can_introspect_check_constraints:
+ unnamed_constraints_index = 0
+ columns = {info.name for info in self.get_table_description(cursor, table_name)}
+ type_query = """
+ SELECT cc.constraint_name, cc.check_clause
+ FROM
+ information_schema.check_constraints AS cc,
+ information_schema.table_constraints AS tc
+ WHERE
+ cc.constraint_schema = DATABASE() AND
+ tc.table_schema = cc.constraint_schema AND
+ cc.constraint_name = tc.constraint_name AND
+ tc.constraint_type = 'CHECK' AND
+ tc.table_name = %s
+ """
+ cursor.execute(type_query, [table_name])
+ for constraint, check_clause in cursor.fetchall():
+ constraint_columns = self._parse_constraint_columns(check_clause, columns)
+ # Ensure uniqueness of unnamed constraints. Unnamed unique
+ # and check columns constraints have the same name as
+ # a column.
+ if set(constraint_columns) == {constraint}:
+ unnamed_constraints_index += 1
+ constraint = '__unnamed_constraint_%s__' % unnamed_constraints_index
+ constraints[constraint] = {
+ 'columns': constraint_columns,
+ 'primary_key': False,
+ 'unique': False,
+ 'index': False,
+ 'check': True,
+ 'foreign_key': None,
+ }
+ # Now add in the indexes
+ cursor.execute("SHOW INDEX FROM %s" % self.connection.ops.quote_name(table_name))
+ for table, non_unique, index, colseq, column, order, type_ in [
+ x[:6] + (x[10],) for x in cursor.fetchall()
+ ]:
+ if index not in constraints:
+ constraints[index] = {
+ 'columns': OrderedSet(),
+ 'primary_key': False,
+ 'unique': False,
+ 'check': False,
+ 'foreign_key': None,
+ }
+ if self.connection.features.supports_index_column_ordering:
+ constraints[index]['orders'] = []
+ constraints[index]['index'] = True
+ constraints[index]['type'] = Index.suffix if type_ == 'BTREE' else type_.lower()
+ constraints[index]['columns'].add(column)
+ if self.connection.features.supports_index_column_ordering:
+ constraints[index]['orders'].append('DESC' if order == 'D' else 'ASC')
+ # Convert the sorted sets to lists
+ for constraint in constraints.values():
+ constraint['columns'] = list(constraint['columns'])
+ return constraints
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/operations.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/operations.py
new file mode 100644
index 0000000000000000000000000000000000000000..027bb58949d3c8f6a256519e0b2b4c03ff79bbff
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/operations.py
@@ -0,0 +1,87 @@
+# Copyright (c) 2020, 2021, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+from django.db.backends.mysql.operations import DatabaseOperations as MySQLDatabaseOperations
+from django.conf import settings
+from django.utils import timezone
+
+try:
+ from _mysql_connector import datetime_to_mysql, time_to_mysql
+except ImportError:
+ HAVE_CEXT = False
+else:
+ HAVE_CEXT = True
+
+
+class DatabaseOperations(MySQLDatabaseOperations):
+ compiler_module = "mysql.connector.django.compiler"
+
+ def regex_lookup(self, lookup_type):
+ if self.connection.mysql_version < (8, 0, 0):
+ if lookup_type == 'regex':
+ return '%s REGEXP BINARY %s'
+ return '%s REGEXP %s'
+
+ match_option = 'c' if lookup_type == 'regex' else 'i'
+ return "REGEXP_LIKE(%s, %s, '%s')" % match_option
+
+ def adapt_datetimefield_value(self, value):
+ return self.value_to_db_datetime(value)
+
+ def value_to_db_datetime(self, value):
+ if value is None:
+ return None
+ # MySQL doesn't support tz-aware times
+ if timezone.is_aware(value):
+ if settings.USE_TZ:
+ value = value.astimezone(timezone.utc).replace(tzinfo=None)
+ else:
+ raise ValueError(
+ "MySQL backend does not support timezone-aware times."
+ )
+ if not self.connection.features.supports_microsecond_precision:
+ value = value.replace(microsecond=0)
+ if not self.connection.use_pure:
+ return datetime_to_mysql(value)
+ return self.connection.converter.to_mysql(value)
+
+ def adapt_timefield_value(self, value):
+ return self.value_to_db_time(value)
+
+ def value_to_db_time(self, value):
+ if value is None:
+ return None
+
+ # MySQL doesn't support tz-aware times
+ if timezone.is_aware(value):
+ raise ValueError("MySQL backend does not support timezone-aware "
+ "times.")
+
+ if not self.connection.use_pure:
+ return time_to_mysql(value)
+ return self.connection.converter.to_mysql(value)
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/schema.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/schema.py
new file mode 100644
index 0000000000000000000000000000000000000000..76967286ffc84d262a9a5bb573c50fac58f01f35
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/schema.py
@@ -0,0 +1,41 @@
+# Copyright (c) 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+from django.db.backends.mysql.schema import DatabaseSchemaEditor as MySQLDatabaseSchemaEditor
+
+
+class DatabaseSchemaEditor(MySQLDatabaseSchemaEditor):
+
+ def quote_value(self, value):
+ self.connection.ensure_connection()
+ if isinstance(value, str):
+ value = value.replace('%', '%%')
+ quoted = self.connection.connection.converter.escape(value)
+ if isinstance(value, str) and isinstance(quoted, bytes):
+ quoted = quoted.decode()
+ return quoted
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/validation.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/validation.py
new file mode 100644
index 0000000000000000000000000000000000000000..20c285e391382c8b9382b604ceeba703eb1d56f5
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/django/validation.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+from django.db.backends.mysql.validation import DatabaseValidation
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/errorcode.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/errorcode.py
new file mode 100644
index 0000000000000000000000000000000000000000..3c15f69319ae51d8f6c750da230d9cc7fd6153f4
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/errorcode.py
@@ -0,0 +1,1877 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2013, 2021, Oracle and/or its affiliates. All rights reserved.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+# This file was auto-generated.
+_GENERATED_ON = '2021-08-11'
+_MYSQL_VERSION = (8, 0, 27)
+
+"""This module contains the MySQL Server and Client error codes"""
+
+# Start MySQL Errors
+OBSOLETE_ER_HASHCHK = 1000
+OBSOLETE_ER_NISAMCHK = 1001
+ER_NO = 1002
+ER_YES = 1003
+ER_CANT_CREATE_FILE = 1004
+ER_CANT_CREATE_TABLE = 1005
+ER_CANT_CREATE_DB = 1006
+ER_DB_CREATE_EXISTS = 1007
+ER_DB_DROP_EXISTS = 1008
+OBSOLETE_ER_DB_DROP_DELETE = 1009
+ER_DB_DROP_RMDIR = 1010
+OBSOLETE_ER_CANT_DELETE_FILE = 1011
+ER_CANT_FIND_SYSTEM_REC = 1012
+ER_CANT_GET_STAT = 1013
+OBSOLETE_ER_CANT_GET_WD = 1014
+ER_CANT_LOCK = 1015
+ER_CANT_OPEN_FILE = 1016
+ER_FILE_NOT_FOUND = 1017
+ER_CANT_READ_DIR = 1018
+OBSOLETE_ER_CANT_SET_WD = 1019
+ER_CHECKREAD = 1020
+OBSOLETE_ER_DISK_FULL = 1021
+ER_DUP_KEY = 1022
+OBSOLETE_ER_ERROR_ON_CLOSE = 1023
+ER_ERROR_ON_READ = 1024
+ER_ERROR_ON_RENAME = 1025
+ER_ERROR_ON_WRITE = 1026
+ER_FILE_USED = 1027
+OBSOLETE_ER_FILSORT_ABORT = 1028
+OBSOLETE_ER_FORM_NOT_FOUND = 1029
+ER_GET_ERRNO = 1030
+ER_ILLEGAL_HA = 1031
+ER_KEY_NOT_FOUND = 1032
+ER_NOT_FORM_FILE = 1033
+ER_NOT_KEYFILE = 1034
+ER_OLD_KEYFILE = 1035
+ER_OPEN_AS_READONLY = 1036
+ER_OUTOFMEMORY = 1037
+ER_OUT_OF_SORTMEMORY = 1038
+OBSOLETE_ER_UNEXPECTED_EOF = 1039
+ER_CON_COUNT_ERROR = 1040
+ER_OUT_OF_RESOURCES = 1041
+ER_BAD_HOST_ERROR = 1042
+ER_HANDSHAKE_ERROR = 1043
+ER_DBACCESS_DENIED_ERROR = 1044
+ER_ACCESS_DENIED_ERROR = 1045
+ER_NO_DB_ERROR = 1046
+ER_UNKNOWN_COM_ERROR = 1047
+ER_BAD_NULL_ERROR = 1048
+ER_BAD_DB_ERROR = 1049
+ER_TABLE_EXISTS_ERROR = 1050
+ER_BAD_TABLE_ERROR = 1051
+ER_NON_UNIQ_ERROR = 1052
+ER_SERVER_SHUTDOWN = 1053
+ER_BAD_FIELD_ERROR = 1054
+ER_WRONG_FIELD_WITH_GROUP = 1055
+ER_WRONG_GROUP_FIELD = 1056
+ER_WRONG_SUM_SELECT = 1057
+ER_WRONG_VALUE_COUNT = 1058
+ER_TOO_LONG_IDENT = 1059
+ER_DUP_FIELDNAME = 1060
+ER_DUP_KEYNAME = 1061
+ER_DUP_ENTRY = 1062
+ER_WRONG_FIELD_SPEC = 1063
+ER_PARSE_ERROR = 1064
+ER_EMPTY_QUERY = 1065
+ER_NONUNIQ_TABLE = 1066
+ER_INVALID_DEFAULT = 1067
+ER_MULTIPLE_PRI_KEY = 1068
+ER_TOO_MANY_KEYS = 1069
+ER_TOO_MANY_KEY_PARTS = 1070
+ER_TOO_LONG_KEY = 1071
+ER_KEY_COLUMN_DOES_NOT_EXITS = 1072
+ER_BLOB_USED_AS_KEY = 1073
+ER_TOO_BIG_FIELDLENGTH = 1074
+ER_WRONG_AUTO_KEY = 1075
+ER_READY = 1076
+OBSOLETE_ER_NORMAL_SHUTDOWN = 1077
+OBSOLETE_ER_GOT_SIGNAL = 1078
+ER_SHUTDOWN_COMPLETE = 1079
+ER_FORCING_CLOSE = 1080
+ER_IPSOCK_ERROR = 1081
+ER_NO_SUCH_INDEX = 1082
+ER_WRONG_FIELD_TERMINATORS = 1083
+ER_BLOBS_AND_NO_TERMINATED = 1084
+ER_TEXTFILE_NOT_READABLE = 1085
+ER_FILE_EXISTS_ERROR = 1086
+ER_LOAD_INFO = 1087
+ER_ALTER_INFO = 1088
+ER_WRONG_SUB_KEY = 1089
+ER_CANT_REMOVE_ALL_FIELDS = 1090
+ER_CANT_DROP_FIELD_OR_KEY = 1091
+ER_INSERT_INFO = 1092
+ER_UPDATE_TABLE_USED = 1093
+ER_NO_SUCH_THREAD = 1094
+ER_KILL_DENIED_ERROR = 1095
+ER_NO_TABLES_USED = 1096
+ER_TOO_BIG_SET = 1097
+ER_NO_UNIQUE_LOGFILE = 1098
+ER_TABLE_NOT_LOCKED_FOR_WRITE = 1099
+ER_TABLE_NOT_LOCKED = 1100
+ER_BLOB_CANT_HAVE_DEFAULT = 1101
+ER_WRONG_DB_NAME = 1102
+ER_WRONG_TABLE_NAME = 1103
+ER_TOO_BIG_SELECT = 1104
+ER_UNKNOWN_ERROR = 1105
+ER_UNKNOWN_PROCEDURE = 1106
+ER_WRONG_PARAMCOUNT_TO_PROCEDURE = 1107
+ER_WRONG_PARAMETERS_TO_PROCEDURE = 1108
+ER_UNKNOWN_TABLE = 1109
+ER_FIELD_SPECIFIED_TWICE = 1110
+ER_INVALID_GROUP_FUNC_USE = 1111
+ER_UNSUPPORTED_EXTENSION = 1112
+ER_TABLE_MUST_HAVE_COLUMNS = 1113
+ER_RECORD_FILE_FULL = 1114
+ER_UNKNOWN_CHARACTER_SET = 1115
+ER_TOO_MANY_TABLES = 1116
+ER_TOO_MANY_FIELDS = 1117
+ER_TOO_BIG_ROWSIZE = 1118
+ER_STACK_OVERRUN = 1119
+ER_WRONG_OUTER_JOIN_UNUSED = 1120
+ER_NULL_COLUMN_IN_INDEX = 1121
+ER_CANT_FIND_UDF = 1122
+ER_CANT_INITIALIZE_UDF = 1123
+ER_UDF_NO_PATHS = 1124
+ER_UDF_EXISTS = 1125
+ER_CANT_OPEN_LIBRARY = 1126
+ER_CANT_FIND_DL_ENTRY = 1127
+ER_FUNCTION_NOT_DEFINED = 1128
+ER_HOST_IS_BLOCKED = 1129
+ER_HOST_NOT_PRIVILEGED = 1130
+ER_PASSWORD_ANONYMOUS_USER = 1131
+ER_PASSWORD_NOT_ALLOWED = 1132
+ER_PASSWORD_NO_MATCH = 1133
+ER_UPDATE_INFO = 1134
+ER_CANT_CREATE_THREAD = 1135
+ER_WRONG_VALUE_COUNT_ON_ROW = 1136
+ER_CANT_REOPEN_TABLE = 1137
+ER_INVALID_USE_OF_NULL = 1138
+ER_REGEXP_ERROR = 1139
+ER_MIX_OF_GROUP_FUNC_AND_FIELDS = 1140
+ER_NONEXISTING_GRANT = 1141
+ER_TABLEACCESS_DENIED_ERROR = 1142
+ER_COLUMNACCESS_DENIED_ERROR = 1143
+ER_ILLEGAL_GRANT_FOR_TABLE = 1144
+ER_GRANT_WRONG_HOST_OR_USER = 1145
+ER_NO_SUCH_TABLE = 1146
+ER_NONEXISTING_TABLE_GRANT = 1147
+ER_NOT_ALLOWED_COMMAND = 1148
+ER_SYNTAX_ERROR = 1149
+OBSOLETE_ER_UNUSED1 = 1150
+OBSOLETE_ER_UNUSED2 = 1151
+ER_ABORTING_CONNECTION = 1152
+ER_NET_PACKET_TOO_LARGE = 1153
+ER_NET_READ_ERROR_FROM_PIPE = 1154
+ER_NET_FCNTL_ERROR = 1155
+ER_NET_PACKETS_OUT_OF_ORDER = 1156
+ER_NET_UNCOMPRESS_ERROR = 1157
+ER_NET_READ_ERROR = 1158
+ER_NET_READ_INTERRUPTED = 1159
+ER_NET_ERROR_ON_WRITE = 1160
+ER_NET_WRITE_INTERRUPTED = 1161
+ER_TOO_LONG_STRING = 1162
+ER_TABLE_CANT_HANDLE_BLOB = 1163
+ER_TABLE_CANT_HANDLE_AUTO_INCREMENT = 1164
+OBSOLETE_ER_UNUSED3 = 1165
+ER_WRONG_COLUMN_NAME = 1166
+ER_WRONG_KEY_COLUMN = 1167
+ER_WRONG_MRG_TABLE = 1168
+ER_DUP_UNIQUE = 1169
+ER_BLOB_KEY_WITHOUT_LENGTH = 1170
+ER_PRIMARY_CANT_HAVE_NULL = 1171
+ER_TOO_MANY_ROWS = 1172
+ER_REQUIRES_PRIMARY_KEY = 1173
+OBSOLETE_ER_NO_RAID_COMPILED = 1174
+ER_UPDATE_WITHOUT_KEY_IN_SAFE_MODE = 1175
+ER_KEY_DOES_NOT_EXITS = 1176
+ER_CHECK_NO_SUCH_TABLE = 1177
+ER_CHECK_NOT_IMPLEMENTED = 1178
+ER_CANT_DO_THIS_DURING_AN_TRANSACTION = 1179
+ER_ERROR_DURING_COMMIT = 1180
+ER_ERROR_DURING_ROLLBACK = 1181
+ER_ERROR_DURING_FLUSH_LOGS = 1182
+OBSOLETE_ER_ERROR_DURING_CHECKPOINT = 1183
+ER_NEW_ABORTING_CONNECTION = 1184
+OBSOLETE_ER_DUMP_NOT_IMPLEMENTED = 1185
+OBSOLETE_ER_FLUSH_MASTER_BINLOG_CLOSED = 1186
+OBSOLETE_ER_INDEX_REBUILD = 1187
+ER_MASTER = 1188
+ER_MASTER_NET_READ = 1189
+ER_MASTER_NET_WRITE = 1190
+ER_FT_MATCHING_KEY_NOT_FOUND = 1191
+ER_LOCK_OR_ACTIVE_TRANSACTION = 1192
+ER_UNKNOWN_SYSTEM_VARIABLE = 1193
+ER_CRASHED_ON_USAGE = 1194
+ER_CRASHED_ON_REPAIR = 1195
+ER_WARNING_NOT_COMPLETE_ROLLBACK = 1196
+ER_TRANS_CACHE_FULL = 1197
+OBSOLETE_ER_SLAVE_MUST_STOP = 1198
+ER_SLAVE_NOT_RUNNING = 1199
+ER_BAD_SLAVE = 1200
+ER_MASTER_INFO = 1201
+ER_SLAVE_THREAD = 1202
+ER_TOO_MANY_USER_CONNECTIONS = 1203
+ER_SET_CONSTANTS_ONLY = 1204
+ER_LOCK_WAIT_TIMEOUT = 1205
+ER_LOCK_TABLE_FULL = 1206
+ER_READ_ONLY_TRANSACTION = 1207
+OBSOLETE_ER_DROP_DB_WITH_READ_LOCK = 1208
+OBSOLETE_ER_CREATE_DB_WITH_READ_LOCK = 1209
+ER_WRONG_ARGUMENTS = 1210
+ER_NO_PERMISSION_TO_CREATE_USER = 1211
+OBSOLETE_ER_UNION_TABLES_IN_DIFFERENT_DIR = 1212
+ER_LOCK_DEADLOCK = 1213
+ER_TABLE_CANT_HANDLE_FT = 1214
+ER_CANNOT_ADD_FOREIGN = 1215
+ER_NO_REFERENCED_ROW = 1216
+ER_ROW_IS_REFERENCED = 1217
+ER_CONNECT_TO_MASTER = 1218
+OBSOLETE_ER_QUERY_ON_MASTER = 1219
+ER_ERROR_WHEN_EXECUTING_COMMAND = 1220
+ER_WRONG_USAGE = 1221
+ER_WRONG_NUMBER_OF_COLUMNS_IN_SELECT = 1222
+ER_CANT_UPDATE_WITH_READLOCK = 1223
+ER_MIXING_NOT_ALLOWED = 1224
+ER_DUP_ARGUMENT = 1225
+ER_USER_LIMIT_REACHED = 1226
+ER_SPECIFIC_ACCESS_DENIED_ERROR = 1227
+ER_LOCAL_VARIABLE = 1228
+ER_GLOBAL_VARIABLE = 1229
+ER_NO_DEFAULT = 1230
+ER_WRONG_VALUE_FOR_VAR = 1231
+ER_WRONG_TYPE_FOR_VAR = 1232
+ER_VAR_CANT_BE_READ = 1233
+ER_CANT_USE_OPTION_HERE = 1234
+ER_NOT_SUPPORTED_YET = 1235
+ER_MASTER_FATAL_ERROR_READING_BINLOG = 1236
+ER_SLAVE_IGNORED_TABLE = 1237
+ER_INCORRECT_GLOBAL_LOCAL_VAR = 1238
+ER_WRONG_FK_DEF = 1239
+ER_KEY_REF_DO_NOT_MATCH_TABLE_REF = 1240
+ER_OPERAND_COLUMNS = 1241
+ER_SUBQUERY_NO_1_ROW = 1242
+ER_UNKNOWN_STMT_HANDLER = 1243
+ER_CORRUPT_HELP_DB = 1244
+OBSOLETE_ER_CYCLIC_REFERENCE = 1245
+ER_AUTO_CONVERT = 1246
+ER_ILLEGAL_REFERENCE = 1247
+ER_DERIVED_MUST_HAVE_ALIAS = 1248
+ER_SELECT_REDUCED = 1249
+ER_TABLENAME_NOT_ALLOWED_HERE = 1250
+ER_NOT_SUPPORTED_AUTH_MODE = 1251
+ER_SPATIAL_CANT_HAVE_NULL = 1252
+ER_COLLATION_CHARSET_MISMATCH = 1253
+OBSOLETE_ER_SLAVE_WAS_RUNNING = 1254
+OBSOLETE_ER_SLAVE_WAS_NOT_RUNNING = 1255
+ER_TOO_BIG_FOR_UNCOMPRESS = 1256
+ER_ZLIB_Z_MEM_ERROR = 1257
+ER_ZLIB_Z_BUF_ERROR = 1258
+ER_ZLIB_Z_DATA_ERROR = 1259
+ER_CUT_VALUE_GROUP_CONCAT = 1260
+ER_WARN_TOO_FEW_RECORDS = 1261
+ER_WARN_TOO_MANY_RECORDS = 1262
+ER_WARN_NULL_TO_NOTNULL = 1263
+ER_WARN_DATA_OUT_OF_RANGE = 1264
+WARN_DATA_TRUNCATED = 1265
+ER_WARN_USING_OTHER_HANDLER = 1266
+ER_CANT_AGGREGATE_2COLLATIONS = 1267
+OBSOLETE_ER_DROP_USER = 1268
+ER_REVOKE_GRANTS = 1269
+ER_CANT_AGGREGATE_3COLLATIONS = 1270
+ER_CANT_AGGREGATE_NCOLLATIONS = 1271
+ER_VARIABLE_IS_NOT_STRUCT = 1272
+ER_UNKNOWN_COLLATION = 1273
+ER_SLAVE_IGNORED_SSL_PARAMS = 1274
+OBSOLETE_ER_SERVER_IS_IN_SECURE_AUTH_MODE = 1275
+ER_WARN_FIELD_RESOLVED = 1276
+ER_BAD_SLAVE_UNTIL_COND = 1277
+ER_MISSING_SKIP_SLAVE = 1278
+ER_UNTIL_COND_IGNORED = 1279
+ER_WRONG_NAME_FOR_INDEX = 1280
+ER_WRONG_NAME_FOR_CATALOG = 1281
+OBSOLETE_ER_WARN_QC_RESIZE = 1282
+ER_BAD_FT_COLUMN = 1283
+ER_UNKNOWN_KEY_CACHE = 1284
+ER_WARN_HOSTNAME_WONT_WORK = 1285
+ER_UNKNOWN_STORAGE_ENGINE = 1286
+ER_WARN_DEPRECATED_SYNTAX = 1287
+ER_NON_UPDATABLE_TABLE = 1288
+ER_FEATURE_DISABLED = 1289
+ER_OPTION_PREVENTS_STATEMENT = 1290
+ER_DUPLICATED_VALUE_IN_TYPE = 1291
+ER_TRUNCATED_WRONG_VALUE = 1292
+OBSOLETE_ER_TOO_MUCH_AUTO_TIMESTAMP_COLS = 1293
+ER_INVALID_ON_UPDATE = 1294
+ER_UNSUPPORTED_PS = 1295
+ER_GET_ERRMSG = 1296
+ER_GET_TEMPORARY_ERRMSG = 1297
+ER_UNKNOWN_TIME_ZONE = 1298
+ER_WARN_INVALID_TIMESTAMP = 1299
+ER_INVALID_CHARACTER_STRING = 1300
+ER_WARN_ALLOWED_PACKET_OVERFLOWED = 1301
+ER_CONFLICTING_DECLARATIONS = 1302
+ER_SP_NO_RECURSIVE_CREATE = 1303
+ER_SP_ALREADY_EXISTS = 1304
+ER_SP_DOES_NOT_EXIST = 1305
+ER_SP_DROP_FAILED = 1306
+ER_SP_STORE_FAILED = 1307
+ER_SP_LILABEL_MISMATCH = 1308
+ER_SP_LABEL_REDEFINE = 1309
+ER_SP_LABEL_MISMATCH = 1310
+ER_SP_UNINIT_VAR = 1311
+ER_SP_BADSELECT = 1312
+ER_SP_BADRETURN = 1313
+ER_SP_BADSTATEMENT = 1314
+ER_UPDATE_LOG_DEPRECATED_IGNORED = 1315
+ER_UPDATE_LOG_DEPRECATED_TRANSLATED = 1316
+ER_QUERY_INTERRUPTED = 1317
+ER_SP_WRONG_NO_OF_ARGS = 1318
+ER_SP_COND_MISMATCH = 1319
+ER_SP_NORETURN = 1320
+ER_SP_NORETURNEND = 1321
+ER_SP_BAD_CURSOR_QUERY = 1322
+ER_SP_BAD_CURSOR_SELECT = 1323
+ER_SP_CURSOR_MISMATCH = 1324
+ER_SP_CURSOR_ALREADY_OPEN = 1325
+ER_SP_CURSOR_NOT_OPEN = 1326
+ER_SP_UNDECLARED_VAR = 1327
+ER_SP_WRONG_NO_OF_FETCH_ARGS = 1328
+ER_SP_FETCH_NO_DATA = 1329
+ER_SP_DUP_PARAM = 1330
+ER_SP_DUP_VAR = 1331
+ER_SP_DUP_COND = 1332
+ER_SP_DUP_CURS = 1333
+ER_SP_CANT_ALTER = 1334
+ER_SP_SUBSELECT_NYI = 1335
+ER_STMT_NOT_ALLOWED_IN_SF_OR_TRG = 1336
+ER_SP_VARCOND_AFTER_CURSHNDLR = 1337
+ER_SP_CURSOR_AFTER_HANDLER = 1338
+ER_SP_CASE_NOT_FOUND = 1339
+ER_FPARSER_TOO_BIG_FILE = 1340
+ER_FPARSER_BAD_HEADER = 1341
+ER_FPARSER_EOF_IN_COMMENT = 1342
+ER_FPARSER_ERROR_IN_PARAMETER = 1343
+ER_FPARSER_EOF_IN_UNKNOWN_PARAMETER = 1344
+ER_VIEW_NO_EXPLAIN = 1345
+OBSOLETE_ER_FRM_UNKNOWN_TYPE = 1346
+ER_WRONG_OBJECT = 1347
+ER_NONUPDATEABLE_COLUMN = 1348
+OBSOLETE_ER_VIEW_SELECT_DERIVED_UNUSED = 1349
+ER_VIEW_SELECT_CLAUSE = 1350
+ER_VIEW_SELECT_VARIABLE = 1351
+ER_VIEW_SELECT_TMPTABLE = 1352
+ER_VIEW_WRONG_LIST = 1353
+ER_WARN_VIEW_MERGE = 1354
+ER_WARN_VIEW_WITHOUT_KEY = 1355
+ER_VIEW_INVALID = 1356
+ER_SP_NO_DROP_SP = 1357
+OBSOLETE_ER_SP_GOTO_IN_HNDLR = 1358
+ER_TRG_ALREADY_EXISTS = 1359
+ER_TRG_DOES_NOT_EXIST = 1360
+ER_TRG_ON_VIEW_OR_TEMP_TABLE = 1361
+ER_TRG_CANT_CHANGE_ROW = 1362
+ER_TRG_NO_SUCH_ROW_IN_TRG = 1363
+ER_NO_DEFAULT_FOR_FIELD = 1364
+ER_DIVISION_BY_ZERO = 1365
+ER_TRUNCATED_WRONG_VALUE_FOR_FIELD = 1366
+ER_ILLEGAL_VALUE_FOR_TYPE = 1367
+ER_VIEW_NONUPD_CHECK = 1368
+ER_VIEW_CHECK_FAILED = 1369
+ER_PROCACCESS_DENIED_ERROR = 1370
+ER_RELAY_LOG_FAIL = 1371
+OBSOLETE_ER_PASSWD_LENGTH = 1372
+ER_UNKNOWN_TARGET_BINLOG = 1373
+ER_IO_ERR_LOG_INDEX_READ = 1374
+ER_BINLOG_PURGE_PROHIBITED = 1375
+ER_FSEEK_FAIL = 1376
+ER_BINLOG_PURGE_FATAL_ERR = 1377
+ER_LOG_IN_USE = 1378
+ER_LOG_PURGE_UNKNOWN_ERR = 1379
+ER_RELAY_LOG_INIT = 1380
+ER_NO_BINARY_LOGGING = 1381
+ER_RESERVED_SYNTAX = 1382
+OBSOLETE_ER_WSAS_FAILED = 1383
+OBSOLETE_ER_DIFF_GROUPS_PROC = 1384
+OBSOLETE_ER_NO_GROUP_FOR_PROC = 1385
+OBSOLETE_ER_ORDER_WITH_PROC = 1386
+OBSOLETE_ER_LOGGING_PROHIBIT_CHANGING_OF = 1387
+OBSOLETE_ER_NO_FILE_MAPPING = 1388
+OBSOLETE_ER_WRONG_MAGIC = 1389
+ER_PS_MANY_PARAM = 1390
+ER_KEY_PART_0 = 1391
+ER_VIEW_CHECKSUM = 1392
+ER_VIEW_MULTIUPDATE = 1393
+ER_VIEW_NO_INSERT_FIELD_LIST = 1394
+ER_VIEW_DELETE_MERGE_VIEW = 1395
+ER_CANNOT_USER = 1396
+ER_XAER_NOTA = 1397
+ER_XAER_INVAL = 1398
+ER_XAER_RMFAIL = 1399
+ER_XAER_OUTSIDE = 1400
+ER_XAER_RMERR = 1401
+ER_XA_RBROLLBACK = 1402
+ER_NONEXISTING_PROC_GRANT = 1403
+ER_PROC_AUTO_GRANT_FAIL = 1404
+ER_PROC_AUTO_REVOKE_FAIL = 1405
+ER_DATA_TOO_LONG = 1406
+ER_SP_BAD_SQLSTATE = 1407
+ER_STARTUP = 1408
+ER_LOAD_FROM_FIXED_SIZE_ROWS_TO_VAR = 1409
+ER_CANT_CREATE_USER_WITH_GRANT = 1410
+ER_WRONG_VALUE_FOR_TYPE = 1411
+ER_TABLE_DEF_CHANGED = 1412
+ER_SP_DUP_HANDLER = 1413
+ER_SP_NOT_VAR_ARG = 1414
+ER_SP_NO_RETSET = 1415
+ER_CANT_CREATE_GEOMETRY_OBJECT = 1416
+OBSOLETE_ER_FAILED_ROUTINE_BREAK_BINLOG = 1417
+ER_BINLOG_UNSAFE_ROUTINE = 1418
+ER_BINLOG_CREATE_ROUTINE_NEED_SUPER = 1419
+OBSOLETE_ER_EXEC_STMT_WITH_OPEN_CURSOR = 1420
+ER_STMT_HAS_NO_OPEN_CURSOR = 1421
+ER_COMMIT_NOT_ALLOWED_IN_SF_OR_TRG = 1422
+ER_NO_DEFAULT_FOR_VIEW_FIELD = 1423
+ER_SP_NO_RECURSION = 1424
+ER_TOO_BIG_SCALE = 1425
+ER_TOO_BIG_PRECISION = 1426
+ER_M_BIGGER_THAN_D = 1427
+ER_WRONG_LOCK_OF_SYSTEM_TABLE = 1428
+ER_CONNECT_TO_FOREIGN_DATA_SOURCE = 1429
+ER_QUERY_ON_FOREIGN_DATA_SOURCE = 1430
+ER_FOREIGN_DATA_SOURCE_DOESNT_EXIST = 1431
+ER_FOREIGN_DATA_STRING_INVALID_CANT_CREATE = 1432
+ER_FOREIGN_DATA_STRING_INVALID = 1433
+OBSOLETE_ER_CANT_CREATE_FEDERATED_TABLE = 1434
+ER_TRG_IN_WRONG_SCHEMA = 1435
+ER_STACK_OVERRUN_NEED_MORE = 1436
+ER_TOO_LONG_BODY = 1437
+ER_WARN_CANT_DROP_DEFAULT_KEYCACHE = 1438
+ER_TOO_BIG_DISPLAYWIDTH = 1439
+ER_XAER_DUPID = 1440
+ER_DATETIME_FUNCTION_OVERFLOW = 1441
+ER_CANT_UPDATE_USED_TABLE_IN_SF_OR_TRG = 1442
+ER_VIEW_PREVENT_UPDATE = 1443
+ER_PS_NO_RECURSION = 1444
+ER_SP_CANT_SET_AUTOCOMMIT = 1445
+OBSOLETE_ER_MALFORMED_DEFINER = 1446
+ER_VIEW_FRM_NO_USER = 1447
+ER_VIEW_OTHER_USER = 1448
+ER_NO_SUCH_USER = 1449
+ER_FORBID_SCHEMA_CHANGE = 1450
+ER_ROW_IS_REFERENCED_2 = 1451
+ER_NO_REFERENCED_ROW_2 = 1452
+ER_SP_BAD_VAR_SHADOW = 1453
+ER_TRG_NO_DEFINER = 1454
+ER_OLD_FILE_FORMAT = 1455
+ER_SP_RECURSION_LIMIT = 1456
+OBSOLETE_ER_SP_PROC_TABLE_CORRUPT = 1457
+ER_SP_WRONG_NAME = 1458
+ER_TABLE_NEEDS_UPGRADE = 1459
+ER_SP_NO_AGGREGATE = 1460
+ER_MAX_PREPARED_STMT_COUNT_REACHED = 1461
+ER_VIEW_RECURSIVE = 1462
+ER_NON_GROUPING_FIELD_USED = 1463
+ER_TABLE_CANT_HANDLE_SPKEYS = 1464
+ER_NO_TRIGGERS_ON_SYSTEM_SCHEMA = 1465
+ER_REMOVED_SPACES = 1466
+ER_AUTOINC_READ_FAILED = 1467
+ER_USERNAME = 1468
+ER_HOSTNAME = 1469
+ER_WRONG_STRING_LENGTH = 1470
+ER_NON_INSERTABLE_TABLE = 1471
+ER_ADMIN_WRONG_MRG_TABLE = 1472
+ER_TOO_HIGH_LEVEL_OF_NESTING_FOR_SELECT = 1473
+ER_NAME_BECOMES_EMPTY = 1474
+ER_AMBIGUOUS_FIELD_TERM = 1475
+ER_FOREIGN_SERVER_EXISTS = 1476
+ER_FOREIGN_SERVER_DOESNT_EXIST = 1477
+ER_ILLEGAL_HA_CREATE_OPTION = 1478
+ER_PARTITION_REQUIRES_VALUES_ERROR = 1479
+ER_PARTITION_WRONG_VALUES_ERROR = 1480
+ER_PARTITION_MAXVALUE_ERROR = 1481
+OBSOLETE_ER_PARTITION_SUBPARTITION_ERROR = 1482
+OBSOLETE_ER_PARTITION_SUBPART_MIX_ERROR = 1483
+ER_PARTITION_WRONG_NO_PART_ERROR = 1484
+ER_PARTITION_WRONG_NO_SUBPART_ERROR = 1485
+ER_WRONG_EXPR_IN_PARTITION_FUNC_ERROR = 1486
+OBSOLETE_ER_NO_CONST_EXPR_IN_RANGE_OR_LIST_ERROR = 1487
+ER_FIELD_NOT_FOUND_PART_ERROR = 1488
+OBSOLETE_ER_LIST_OF_FIELDS_ONLY_IN_HASH_ERROR = 1489
+ER_INCONSISTENT_PARTITION_INFO_ERROR = 1490
+ER_PARTITION_FUNC_NOT_ALLOWED_ERROR = 1491
+ER_PARTITIONS_MUST_BE_DEFINED_ERROR = 1492
+ER_RANGE_NOT_INCREASING_ERROR = 1493
+ER_INCONSISTENT_TYPE_OF_FUNCTIONS_ERROR = 1494
+ER_MULTIPLE_DEF_CONST_IN_LIST_PART_ERROR = 1495
+ER_PARTITION_ENTRY_ERROR = 1496
+ER_MIX_HANDLER_ERROR = 1497
+ER_PARTITION_NOT_DEFINED_ERROR = 1498
+ER_TOO_MANY_PARTITIONS_ERROR = 1499
+ER_SUBPARTITION_ERROR = 1500
+ER_CANT_CREATE_HANDLER_FILE = 1501
+ER_BLOB_FIELD_IN_PART_FUNC_ERROR = 1502
+ER_UNIQUE_KEY_NEED_ALL_FIELDS_IN_PF = 1503
+ER_NO_PARTS_ERROR = 1504
+ER_PARTITION_MGMT_ON_NONPARTITIONED = 1505
+ER_FOREIGN_KEY_ON_PARTITIONED = 1506
+ER_DROP_PARTITION_NON_EXISTENT = 1507
+ER_DROP_LAST_PARTITION = 1508
+ER_COALESCE_ONLY_ON_HASH_PARTITION = 1509
+ER_REORG_HASH_ONLY_ON_SAME_NO = 1510
+ER_REORG_NO_PARAM_ERROR = 1511
+ER_ONLY_ON_RANGE_LIST_PARTITION = 1512
+ER_ADD_PARTITION_SUBPART_ERROR = 1513
+ER_ADD_PARTITION_NO_NEW_PARTITION = 1514
+ER_COALESCE_PARTITION_NO_PARTITION = 1515
+ER_REORG_PARTITION_NOT_EXIST = 1516
+ER_SAME_NAME_PARTITION = 1517
+ER_NO_BINLOG_ERROR = 1518
+ER_CONSECUTIVE_REORG_PARTITIONS = 1519
+ER_REORG_OUTSIDE_RANGE = 1520
+ER_PARTITION_FUNCTION_FAILURE = 1521
+OBSOLETE_ER_PART_STATE_ERROR = 1522
+ER_LIMITED_PART_RANGE = 1523
+ER_PLUGIN_IS_NOT_LOADED = 1524
+ER_WRONG_VALUE = 1525
+ER_NO_PARTITION_FOR_GIVEN_VALUE = 1526
+ER_FILEGROUP_OPTION_ONLY_ONCE = 1527
+ER_CREATE_FILEGROUP_FAILED = 1528
+ER_DROP_FILEGROUP_FAILED = 1529
+ER_TABLESPACE_AUTO_EXTEND_ERROR = 1530
+ER_WRONG_SIZE_NUMBER = 1531
+ER_SIZE_OVERFLOW_ERROR = 1532
+ER_ALTER_FILEGROUP_FAILED = 1533
+ER_BINLOG_ROW_LOGGING_FAILED = 1534
+OBSOLETE_ER_BINLOG_ROW_WRONG_TABLE_DEF = 1535
+OBSOLETE_ER_BINLOG_ROW_RBR_TO_SBR = 1536
+ER_EVENT_ALREADY_EXISTS = 1537
+OBSOLETE_ER_EVENT_STORE_FAILED = 1538
+ER_EVENT_DOES_NOT_EXIST = 1539
+OBSOLETE_ER_EVENT_CANT_ALTER = 1540
+OBSOLETE_ER_EVENT_DROP_FAILED = 1541
+ER_EVENT_INTERVAL_NOT_POSITIVE_OR_TOO_BIG = 1542
+ER_EVENT_ENDS_BEFORE_STARTS = 1543
+ER_EVENT_EXEC_TIME_IN_THE_PAST = 1544
+OBSOLETE_ER_EVENT_OPEN_TABLE_FAILED = 1545
+OBSOLETE_ER_EVENT_NEITHER_M_EXPR_NOR_M_AT = 1546
+OBSOLETE_ER_COL_COUNT_DOESNT_MATCH_CORRUPTED = 1547
+OBSOLETE_ER_CANNOT_LOAD_FROM_TABLE = 1548
+OBSOLETE_ER_EVENT_CANNOT_DELETE = 1549
+OBSOLETE_ER_EVENT_COMPILE_ERROR = 1550
+ER_EVENT_SAME_NAME = 1551
+OBSOLETE_ER_EVENT_DATA_TOO_LONG = 1552
+ER_DROP_INDEX_FK = 1553
+ER_WARN_DEPRECATED_SYNTAX_WITH_VER = 1554
+OBSOLETE_ER_CANT_WRITE_LOCK_LOG_TABLE = 1555
+ER_CANT_LOCK_LOG_TABLE = 1556
+ER_FOREIGN_DUPLICATE_KEY_OLD_UNUSED = 1557
+ER_COL_COUNT_DOESNT_MATCH_PLEASE_UPDATE = 1558
+OBSOLETE_ER_TEMP_TABLE_PREVENTS_SWITCH_OUT_OF_RBR = 1559
+ER_STORED_FUNCTION_PREVENTS_SWITCH_BINLOG_FORMAT = 1560
+OBSOLETE_ER_NDB_CANT_SWITCH_BINLOG_FORMAT = 1561
+ER_PARTITION_NO_TEMPORARY = 1562
+ER_PARTITION_CONST_DOMAIN_ERROR = 1563
+ER_PARTITION_FUNCTION_IS_NOT_ALLOWED = 1564
+OBSOLETE_ER_DDL_LOG_ERROR_UNUSED = 1565
+ER_NULL_IN_VALUES_LESS_THAN = 1566
+ER_WRONG_PARTITION_NAME = 1567
+ER_CANT_CHANGE_TX_CHARACTERISTICS = 1568
+ER_DUP_ENTRY_AUTOINCREMENT_CASE = 1569
+OBSOLETE_ER_EVENT_MODIFY_QUEUE_ERROR = 1570
+ER_EVENT_SET_VAR_ERROR = 1571
+ER_PARTITION_MERGE_ERROR = 1572
+OBSOLETE_ER_CANT_ACTIVATE_LOG = 1573
+OBSOLETE_ER_RBR_NOT_AVAILABLE = 1574
+ER_BASE64_DECODE_ERROR = 1575
+ER_EVENT_RECURSION_FORBIDDEN = 1576
+OBSOLETE_ER_EVENTS_DB_ERROR = 1577
+ER_ONLY_INTEGERS_ALLOWED = 1578
+ER_UNSUPORTED_LOG_ENGINE = 1579
+ER_BAD_LOG_STATEMENT = 1580
+ER_CANT_RENAME_LOG_TABLE = 1581
+ER_WRONG_PARAMCOUNT_TO_NATIVE_FCT = 1582
+ER_WRONG_PARAMETERS_TO_NATIVE_FCT = 1583
+ER_WRONG_PARAMETERS_TO_STORED_FCT = 1584
+ER_NATIVE_FCT_NAME_COLLISION = 1585
+ER_DUP_ENTRY_WITH_KEY_NAME = 1586
+ER_BINLOG_PURGE_EMFILE = 1587
+ER_EVENT_CANNOT_CREATE_IN_THE_PAST = 1588
+ER_EVENT_CANNOT_ALTER_IN_THE_PAST = 1589
+OBSOLETE_ER_SLAVE_INCIDENT = 1590
+ER_NO_PARTITION_FOR_GIVEN_VALUE_SILENT = 1591
+ER_BINLOG_UNSAFE_STATEMENT = 1592
+ER_BINLOG_FATAL_ERROR = 1593
+OBSOLETE_ER_SLAVE_RELAY_LOG_READ_FAILURE = 1594
+OBSOLETE_ER_SLAVE_RELAY_LOG_WRITE_FAILURE = 1595
+OBSOLETE_ER_SLAVE_CREATE_EVENT_FAILURE = 1596
+OBSOLETE_ER_SLAVE_MASTER_COM_FAILURE = 1597
+ER_BINLOG_LOGGING_IMPOSSIBLE = 1598
+ER_VIEW_NO_CREATION_CTX = 1599
+ER_VIEW_INVALID_CREATION_CTX = 1600
+OBSOLETE_ER_SR_INVALID_CREATION_CTX = 1601
+ER_TRG_CORRUPTED_FILE = 1602
+ER_TRG_NO_CREATION_CTX = 1603
+ER_TRG_INVALID_CREATION_CTX = 1604
+ER_EVENT_INVALID_CREATION_CTX = 1605
+ER_TRG_CANT_OPEN_TABLE = 1606
+OBSOLETE_ER_CANT_CREATE_SROUTINE = 1607
+OBSOLETE_ER_NEVER_USED = 1608
+ER_NO_FORMAT_DESCRIPTION_EVENT_BEFORE_BINLOG_STATEMENT = 1609
+ER_SLAVE_CORRUPT_EVENT = 1610
+OBSOLETE_ER_LOAD_DATA_INVALID_COLUMN_UNUSED = 1611
+ER_LOG_PURGE_NO_FILE = 1612
+ER_XA_RBTIMEOUT = 1613
+ER_XA_RBDEADLOCK = 1614
+ER_NEED_REPREPARE = 1615
+OBSOLETE_ER_DELAYED_NOT_SUPPORTED = 1616
+WARN_NO_MASTER_INFO = 1617
+WARN_OPTION_IGNORED = 1618
+ER_PLUGIN_DELETE_BUILTIN = 1619
+WARN_PLUGIN_BUSY = 1620
+ER_VARIABLE_IS_READONLY = 1621
+ER_WARN_ENGINE_TRANSACTION_ROLLBACK = 1622
+OBSOLETE_ER_SLAVE_HEARTBEAT_FAILURE = 1623
+ER_SLAVE_HEARTBEAT_VALUE_OUT_OF_RANGE = 1624
+ER_NDB_REPLICATION_SCHEMA_ERROR = 1625
+ER_CONFLICT_FN_PARSE_ERROR = 1626
+ER_EXCEPTIONS_WRITE_ERROR = 1627
+ER_TOO_LONG_TABLE_COMMENT = 1628
+ER_TOO_LONG_FIELD_COMMENT = 1629
+ER_FUNC_INEXISTENT_NAME_COLLISION = 1630
+ER_DATABASE_NAME = 1631
+ER_TABLE_NAME = 1632
+ER_PARTITION_NAME = 1633
+ER_SUBPARTITION_NAME = 1634
+ER_TEMPORARY_NAME = 1635
+ER_RENAMED_NAME = 1636
+ER_TOO_MANY_CONCURRENT_TRXS = 1637
+WARN_NON_ASCII_SEPARATOR_NOT_IMPLEMENTED = 1638
+ER_DEBUG_SYNC_TIMEOUT = 1639
+ER_DEBUG_SYNC_HIT_LIMIT = 1640
+ER_DUP_SIGNAL_SET = 1641
+ER_SIGNAL_WARN = 1642
+ER_SIGNAL_NOT_FOUND = 1643
+ER_SIGNAL_EXCEPTION = 1644
+ER_RESIGNAL_WITHOUT_ACTIVE_HANDLER = 1645
+ER_SIGNAL_BAD_CONDITION_TYPE = 1646
+WARN_COND_ITEM_TRUNCATED = 1647
+ER_COND_ITEM_TOO_LONG = 1648
+ER_UNKNOWN_LOCALE = 1649
+ER_SLAVE_IGNORE_SERVER_IDS = 1650
+OBSOLETE_ER_QUERY_CACHE_DISABLED = 1651
+ER_SAME_NAME_PARTITION_FIELD = 1652
+ER_PARTITION_COLUMN_LIST_ERROR = 1653
+ER_WRONG_TYPE_COLUMN_VALUE_ERROR = 1654
+ER_TOO_MANY_PARTITION_FUNC_FIELDS_ERROR = 1655
+ER_MAXVALUE_IN_VALUES_IN = 1656
+ER_TOO_MANY_VALUES_ERROR = 1657
+ER_ROW_SINGLE_PARTITION_FIELD_ERROR = 1658
+ER_FIELD_TYPE_NOT_ALLOWED_AS_PARTITION_FIELD = 1659
+ER_PARTITION_FIELDS_TOO_LONG = 1660
+ER_BINLOG_ROW_ENGINE_AND_STMT_ENGINE = 1661
+ER_BINLOG_ROW_MODE_AND_STMT_ENGINE = 1662
+ER_BINLOG_UNSAFE_AND_STMT_ENGINE = 1663
+ER_BINLOG_ROW_INJECTION_AND_STMT_ENGINE = 1664
+ER_BINLOG_STMT_MODE_AND_ROW_ENGINE = 1665
+ER_BINLOG_ROW_INJECTION_AND_STMT_MODE = 1666
+ER_BINLOG_MULTIPLE_ENGINES_AND_SELF_LOGGING_ENGINE = 1667
+ER_BINLOG_UNSAFE_LIMIT = 1668
+OBSOLETE_ER_UNUSED4 = 1669
+ER_BINLOG_UNSAFE_SYSTEM_TABLE = 1670
+ER_BINLOG_UNSAFE_AUTOINC_COLUMNS = 1671
+ER_BINLOG_UNSAFE_UDF = 1672
+ER_BINLOG_UNSAFE_SYSTEM_VARIABLE = 1673
+ER_BINLOG_UNSAFE_SYSTEM_FUNCTION = 1674
+ER_BINLOG_UNSAFE_NONTRANS_AFTER_TRANS = 1675
+ER_MESSAGE_AND_STATEMENT = 1676
+OBSOLETE_ER_SLAVE_CONVERSION_FAILED = 1677
+ER_SLAVE_CANT_CREATE_CONVERSION = 1678
+ER_INSIDE_TRANSACTION_PREVENTS_SWITCH_BINLOG_FORMAT = 1679
+ER_PATH_LENGTH = 1680
+ER_WARN_DEPRECATED_SYNTAX_NO_REPLACEMENT = 1681
+ER_WRONG_NATIVE_TABLE_STRUCTURE = 1682
+ER_WRONG_PERFSCHEMA_USAGE = 1683
+ER_WARN_I_S_SKIPPED_TABLE = 1684
+ER_INSIDE_TRANSACTION_PREVENTS_SWITCH_BINLOG_DIRECT = 1685
+ER_STORED_FUNCTION_PREVENTS_SWITCH_BINLOG_DIRECT = 1686
+ER_SPATIAL_MUST_HAVE_GEOM_COL = 1687
+ER_TOO_LONG_INDEX_COMMENT = 1688
+ER_LOCK_ABORTED = 1689
+ER_DATA_OUT_OF_RANGE = 1690
+OBSOLETE_ER_WRONG_SPVAR_TYPE_IN_LIMIT = 1691
+ER_BINLOG_UNSAFE_MULTIPLE_ENGINES_AND_SELF_LOGGING_ENGINE = 1692
+ER_BINLOG_UNSAFE_MIXED_STATEMENT = 1693
+ER_INSIDE_TRANSACTION_PREVENTS_SWITCH_SQL_LOG_BIN = 1694
+ER_STORED_FUNCTION_PREVENTS_SWITCH_SQL_LOG_BIN = 1695
+ER_FAILED_READ_FROM_PAR_FILE = 1696
+ER_VALUES_IS_NOT_INT_TYPE_ERROR = 1697
+ER_ACCESS_DENIED_NO_PASSWORD_ERROR = 1698
+ER_SET_PASSWORD_AUTH_PLUGIN = 1699
+OBSOLETE_ER_GRANT_PLUGIN_USER_EXISTS = 1700
+ER_TRUNCATE_ILLEGAL_FK = 1701
+ER_PLUGIN_IS_PERMANENT = 1702
+ER_SLAVE_HEARTBEAT_VALUE_OUT_OF_RANGE_MIN = 1703
+ER_SLAVE_HEARTBEAT_VALUE_OUT_OF_RANGE_MAX = 1704
+ER_STMT_CACHE_FULL = 1705
+ER_MULTI_UPDATE_KEY_CONFLICT = 1706
+ER_TABLE_NEEDS_REBUILD = 1707
+WARN_OPTION_BELOW_LIMIT = 1708
+ER_INDEX_COLUMN_TOO_LONG = 1709
+ER_ERROR_IN_TRIGGER_BODY = 1710
+ER_ERROR_IN_UNKNOWN_TRIGGER_BODY = 1711
+ER_INDEX_CORRUPT = 1712
+ER_UNDO_RECORD_TOO_BIG = 1713
+ER_BINLOG_UNSAFE_INSERT_IGNORE_SELECT = 1714
+ER_BINLOG_UNSAFE_INSERT_SELECT_UPDATE = 1715
+ER_BINLOG_UNSAFE_REPLACE_SELECT = 1716
+ER_BINLOG_UNSAFE_CREATE_IGNORE_SELECT = 1717
+ER_BINLOG_UNSAFE_CREATE_REPLACE_SELECT = 1718
+ER_BINLOG_UNSAFE_UPDATE_IGNORE = 1719
+ER_PLUGIN_NO_UNINSTALL = 1720
+ER_PLUGIN_NO_INSTALL = 1721
+ER_BINLOG_UNSAFE_WRITE_AUTOINC_SELECT = 1722
+ER_BINLOG_UNSAFE_CREATE_SELECT_AUTOINC = 1723
+ER_BINLOG_UNSAFE_INSERT_TWO_KEYS = 1724
+ER_TABLE_IN_FK_CHECK = 1725
+ER_UNSUPPORTED_ENGINE = 1726
+ER_BINLOG_UNSAFE_AUTOINC_NOT_FIRST = 1727
+ER_CANNOT_LOAD_FROM_TABLE_V2 = 1728
+ER_MASTER_DELAY_VALUE_OUT_OF_RANGE = 1729
+ER_ONLY_FD_AND_RBR_EVENTS_ALLOWED_IN_BINLOG_STATEMENT = 1730
+ER_PARTITION_EXCHANGE_DIFFERENT_OPTION = 1731
+ER_PARTITION_EXCHANGE_PART_TABLE = 1732
+ER_PARTITION_EXCHANGE_TEMP_TABLE = 1733
+ER_PARTITION_INSTEAD_OF_SUBPARTITION = 1734
+ER_UNKNOWN_PARTITION = 1735
+ER_TABLES_DIFFERENT_METADATA = 1736
+ER_ROW_DOES_NOT_MATCH_PARTITION = 1737
+ER_BINLOG_CACHE_SIZE_GREATER_THAN_MAX = 1738
+ER_WARN_INDEX_NOT_APPLICABLE = 1739
+ER_PARTITION_EXCHANGE_FOREIGN_KEY = 1740
+OBSOLETE_ER_NO_SUCH_KEY_VALUE = 1741
+ER_RPL_INFO_DATA_TOO_LONG = 1742
+OBSOLETE_ER_NETWORK_READ_EVENT_CHECKSUM_FAILURE = 1743
+OBSOLETE_ER_BINLOG_READ_EVENT_CHECKSUM_FAILURE = 1744
+ER_BINLOG_STMT_CACHE_SIZE_GREATER_THAN_MAX = 1745
+ER_CANT_UPDATE_TABLE_IN_CREATE_TABLE_SELECT = 1746
+ER_PARTITION_CLAUSE_ON_NONPARTITIONED = 1747
+ER_ROW_DOES_NOT_MATCH_GIVEN_PARTITION_SET = 1748
+OBSOLETE_ER_NO_SUCH_PARTITION__UNUSED = 1749
+ER_CHANGE_RPL_INFO_REPOSITORY_FAILURE = 1750
+ER_WARNING_NOT_COMPLETE_ROLLBACK_WITH_CREATED_TEMP_TABLE = 1751
+ER_WARNING_NOT_COMPLETE_ROLLBACK_WITH_DROPPED_TEMP_TABLE = 1752
+ER_MTS_FEATURE_IS_NOT_SUPPORTED = 1753
+ER_MTS_UPDATED_DBS_GREATER_MAX = 1754
+ER_MTS_CANT_PARALLEL = 1755
+ER_MTS_INCONSISTENT_DATA = 1756
+ER_FULLTEXT_NOT_SUPPORTED_WITH_PARTITIONING = 1757
+ER_DA_INVALID_CONDITION_NUMBER = 1758
+ER_INSECURE_PLAIN_TEXT = 1759
+ER_INSECURE_CHANGE_MASTER = 1760
+ER_FOREIGN_DUPLICATE_KEY_WITH_CHILD_INFO = 1761
+ER_FOREIGN_DUPLICATE_KEY_WITHOUT_CHILD_INFO = 1762
+ER_SQLTHREAD_WITH_SECURE_SLAVE = 1763
+ER_TABLE_HAS_NO_FT = 1764
+ER_VARIABLE_NOT_SETTABLE_IN_SF_OR_TRIGGER = 1765
+ER_VARIABLE_NOT_SETTABLE_IN_TRANSACTION = 1766
+OBSOLETE_ER_GTID_NEXT_IS_NOT_IN_GTID_NEXT_LIST = 1767
+OBSOLETE_ER_CANT_CHANGE_GTID_NEXT_IN_TRANSACTION = 1768
+ER_SET_STATEMENT_CANNOT_INVOKE_FUNCTION = 1769
+ER_GTID_NEXT_CANT_BE_AUTOMATIC_IF_GTID_NEXT_LIST_IS_NON_NULL = 1770
+OBSOLETE_ER_SKIPPING_LOGGED_TRANSACTION = 1771
+ER_MALFORMED_GTID_SET_SPECIFICATION = 1772
+ER_MALFORMED_GTID_SET_ENCODING = 1773
+ER_MALFORMED_GTID_SPECIFICATION = 1774
+ER_GNO_EXHAUSTED = 1775
+ER_BAD_SLAVE_AUTO_POSITION = 1776
+ER_AUTO_POSITION_REQUIRES_GTID_MODE_NOT_OFF = 1777
+ER_CANT_DO_IMPLICIT_COMMIT_IN_TRX_WHEN_GTID_NEXT_IS_SET = 1778
+ER_GTID_MODE_ON_REQUIRES_ENFORCE_GTID_CONSISTENCY_ON = 1779
+OBSOLETE_ER_GTID_MODE_REQUIRES_BINLOG = 1780
+ER_CANT_SET_GTID_NEXT_TO_GTID_WHEN_GTID_MODE_IS_OFF = 1781
+ER_CANT_SET_GTID_NEXT_TO_ANONYMOUS_WHEN_GTID_MODE_IS_ON = 1782
+ER_CANT_SET_GTID_NEXT_LIST_TO_NON_NULL_WHEN_GTID_MODE_IS_OFF = 1783
+OBSOLETE_ER_FOUND_GTID_EVENT_WHEN_GTID_MODE_IS_OFF__UNUSED = 1784
+ER_GTID_UNSAFE_NON_TRANSACTIONAL_TABLE = 1785
+ER_GTID_UNSAFE_CREATE_SELECT = 1786
+OBSOLETE_ER_GTID_UNSAFE_CREATE_DROP_TEMP_TABLE_IN_TRANSACTION = 1787
+ER_GTID_MODE_CAN_ONLY_CHANGE_ONE_STEP_AT_A_TIME = 1788
+ER_MASTER_HAS_PURGED_REQUIRED_GTIDS = 1789
+ER_CANT_SET_GTID_NEXT_WHEN_OWNING_GTID = 1790
+ER_UNKNOWN_EXPLAIN_FORMAT = 1791
+ER_CANT_EXECUTE_IN_READ_ONLY_TRANSACTION = 1792
+ER_TOO_LONG_TABLE_PARTITION_COMMENT = 1793
+ER_SLAVE_CONFIGURATION = 1794
+ER_INNODB_FT_LIMIT = 1795
+ER_INNODB_NO_FT_TEMP_TABLE = 1796
+ER_INNODB_FT_WRONG_DOCID_COLUMN = 1797
+ER_INNODB_FT_WRONG_DOCID_INDEX = 1798
+ER_INNODB_ONLINE_LOG_TOO_BIG = 1799
+ER_UNKNOWN_ALTER_ALGORITHM = 1800
+ER_UNKNOWN_ALTER_LOCK = 1801
+ER_MTS_CHANGE_MASTER_CANT_RUN_WITH_GAPS = 1802
+ER_MTS_RECOVERY_FAILURE = 1803
+ER_MTS_RESET_WORKERS = 1804
+ER_COL_COUNT_DOESNT_MATCH_CORRUPTED_V2 = 1805
+ER_SLAVE_SILENT_RETRY_TRANSACTION = 1806
+ER_DISCARD_FK_CHECKS_RUNNING = 1807
+ER_TABLE_SCHEMA_MISMATCH = 1808
+ER_TABLE_IN_SYSTEM_TABLESPACE = 1809
+ER_IO_READ_ERROR = 1810
+ER_IO_WRITE_ERROR = 1811
+ER_TABLESPACE_MISSING = 1812
+ER_TABLESPACE_EXISTS = 1813
+ER_TABLESPACE_DISCARDED = 1814
+ER_INTERNAL_ERROR = 1815
+ER_INNODB_IMPORT_ERROR = 1816
+ER_INNODB_INDEX_CORRUPT = 1817
+ER_INVALID_YEAR_COLUMN_LENGTH = 1818
+ER_NOT_VALID_PASSWORD = 1819
+ER_MUST_CHANGE_PASSWORD = 1820
+ER_FK_NO_INDEX_CHILD = 1821
+ER_FK_NO_INDEX_PARENT = 1822
+ER_FK_FAIL_ADD_SYSTEM = 1823
+ER_FK_CANNOT_OPEN_PARENT = 1824
+ER_FK_INCORRECT_OPTION = 1825
+ER_FK_DUP_NAME = 1826
+ER_PASSWORD_FORMAT = 1827
+ER_FK_COLUMN_CANNOT_DROP = 1828
+ER_FK_COLUMN_CANNOT_DROP_CHILD = 1829
+ER_FK_COLUMN_NOT_NULL = 1830
+ER_DUP_INDEX = 1831
+ER_FK_COLUMN_CANNOT_CHANGE = 1832
+ER_FK_COLUMN_CANNOT_CHANGE_CHILD = 1833
+OBSOLETE_ER_UNUSED5 = 1834
+ER_MALFORMED_PACKET = 1835
+ER_READ_ONLY_MODE = 1836
+ER_GTID_NEXT_TYPE_UNDEFINED_GTID = 1837
+ER_VARIABLE_NOT_SETTABLE_IN_SP = 1838
+OBSOLETE_ER_CANT_SET_GTID_PURGED_WHEN_GTID_MODE_IS_OFF = 1839
+ER_CANT_SET_GTID_PURGED_WHEN_GTID_EXECUTED_IS_NOT_EMPTY = 1840
+ER_CANT_SET_GTID_PURGED_WHEN_OWNED_GTIDS_IS_NOT_EMPTY = 1841
+ER_GTID_PURGED_WAS_CHANGED = 1842
+ER_GTID_EXECUTED_WAS_CHANGED = 1843
+ER_BINLOG_STMT_MODE_AND_NO_REPL_TABLES = 1844
+ER_ALTER_OPERATION_NOT_SUPPORTED = 1845
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON = 1846
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_COPY = 1847
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_PARTITION = 1848
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_FK_RENAME = 1849
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_COLUMN_TYPE = 1850
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_FK_CHECK = 1851
+OBSOLETE_ER_UNUSED6 = 1852
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_NOPK = 1853
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_AUTOINC = 1854
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_HIDDEN_FTS = 1855
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_CHANGE_FTS = 1856
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_FTS = 1857
+OBSOLETE_ER_SQL_REPLICA_SKIP_COUNTER_NOT_SETTABLE_IN_GTID_MODE = 1858
+ER_DUP_UNKNOWN_IN_INDEX = 1859
+ER_IDENT_CAUSES_TOO_LONG_PATH = 1860
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_NOT_NULL = 1861
+ER_MUST_CHANGE_PASSWORD_LOGIN = 1862
+ER_ROW_IN_WRONG_PARTITION = 1863
+ER_MTS_EVENT_BIGGER_PENDING_JOBS_SIZE_MAX = 1864
+OBSOLETE_ER_INNODB_NO_FT_USES_PARSER = 1865
+ER_BINLOG_LOGICAL_CORRUPTION = 1866
+ER_WARN_PURGE_LOG_IN_USE = 1867
+ER_WARN_PURGE_LOG_IS_ACTIVE = 1868
+ER_AUTO_INCREMENT_CONFLICT = 1869
+WARN_ON_BLOCKHOLE_IN_RBR = 1870
+ER_SLAVE_MI_INIT_REPOSITORY = 1871
+ER_SLAVE_RLI_INIT_REPOSITORY = 1872
+ER_ACCESS_DENIED_CHANGE_USER_ERROR = 1873
+ER_INNODB_READ_ONLY = 1874
+ER_STOP_SLAVE_SQL_THREAD_TIMEOUT = 1875
+ER_STOP_SLAVE_IO_THREAD_TIMEOUT = 1876
+ER_TABLE_CORRUPT = 1877
+ER_TEMP_FILE_WRITE_FAILURE = 1878
+ER_INNODB_FT_AUX_NOT_HEX_ID = 1879
+ER_OLD_TEMPORALS_UPGRADED = 1880
+ER_INNODB_FORCED_RECOVERY = 1881
+ER_AES_INVALID_IV = 1882
+ER_PLUGIN_CANNOT_BE_UNINSTALLED = 1883
+ER_GTID_UNSAFE_BINLOG_SPLITTABLE_STATEMENT_AND_ASSIGNED_GTID = 1884
+ER_SLAVE_HAS_MORE_GTIDS_THAN_MASTER = 1885
+ER_MISSING_KEY = 1886
+WARN_NAMED_PIPE_ACCESS_EVERYONE = 1887
+ER_FILE_CORRUPT = 3000
+ER_ERROR_ON_MASTER = 3001
+OBSOLETE_ER_INCONSISTENT_ERROR = 3002
+ER_STORAGE_ENGINE_NOT_LOADED = 3003
+ER_GET_STACKED_DA_WITHOUT_ACTIVE_HANDLER = 3004
+ER_WARN_LEGACY_SYNTAX_CONVERTED = 3005
+ER_BINLOG_UNSAFE_FULLTEXT_PLUGIN = 3006
+ER_CANNOT_DISCARD_TEMPORARY_TABLE = 3007
+ER_FK_DEPTH_EXCEEDED = 3008
+ER_COL_COUNT_DOESNT_MATCH_PLEASE_UPDATE_V2 = 3009
+ER_WARN_TRIGGER_DOESNT_HAVE_CREATED = 3010
+ER_REFERENCED_TRG_DOES_NOT_EXIST = 3011
+ER_EXPLAIN_NOT_SUPPORTED = 3012
+ER_INVALID_FIELD_SIZE = 3013
+ER_MISSING_HA_CREATE_OPTION = 3014
+ER_ENGINE_OUT_OF_MEMORY = 3015
+ER_PASSWORD_EXPIRE_ANONYMOUS_USER = 3016
+ER_SLAVE_SQL_THREAD_MUST_STOP = 3017
+ER_NO_FT_MATERIALIZED_SUBQUERY = 3018
+ER_INNODB_UNDO_LOG_FULL = 3019
+ER_INVALID_ARGUMENT_FOR_LOGARITHM = 3020
+ER_SLAVE_CHANNEL_IO_THREAD_MUST_STOP = 3021
+ER_WARN_OPEN_TEMP_TABLES_MUST_BE_ZERO = 3022
+ER_WARN_ONLY_MASTER_LOG_FILE_NO_POS = 3023
+ER_QUERY_TIMEOUT = 3024
+ER_NON_RO_SELECT_DISABLE_TIMER = 3025
+ER_DUP_LIST_ENTRY = 3026
+OBSOLETE_ER_SQL_MODE_NO_EFFECT = 3027
+ER_AGGREGATE_ORDER_FOR_UNION = 3028
+ER_AGGREGATE_ORDER_NON_AGG_QUERY = 3029
+ER_SLAVE_WORKER_STOPPED_PREVIOUS_THD_ERROR = 3030
+ER_DONT_SUPPORT_REPLICA_PRESERVE_COMMIT_ORDER = 3031
+ER_SERVER_OFFLINE_MODE = 3032
+ER_GIS_DIFFERENT_SRIDS = 3033
+ER_GIS_UNSUPPORTED_ARGUMENT = 3034
+ER_GIS_UNKNOWN_ERROR = 3035
+ER_GIS_UNKNOWN_EXCEPTION = 3036
+ER_GIS_INVALID_DATA = 3037
+ER_BOOST_GEOMETRY_EMPTY_INPUT_EXCEPTION = 3038
+ER_BOOST_GEOMETRY_CENTROID_EXCEPTION = 3039
+ER_BOOST_GEOMETRY_OVERLAY_INVALID_INPUT_EXCEPTION = 3040
+ER_BOOST_GEOMETRY_TURN_INFO_EXCEPTION = 3041
+ER_BOOST_GEOMETRY_SELF_INTERSECTION_POINT_EXCEPTION = 3042
+ER_BOOST_GEOMETRY_UNKNOWN_EXCEPTION = 3043
+ER_STD_BAD_ALLOC_ERROR = 3044
+ER_STD_DOMAIN_ERROR = 3045
+ER_STD_LENGTH_ERROR = 3046
+ER_STD_INVALID_ARGUMENT = 3047
+ER_STD_OUT_OF_RANGE_ERROR = 3048
+ER_STD_OVERFLOW_ERROR = 3049
+ER_STD_RANGE_ERROR = 3050
+ER_STD_UNDERFLOW_ERROR = 3051
+ER_STD_LOGIC_ERROR = 3052
+ER_STD_RUNTIME_ERROR = 3053
+ER_STD_UNKNOWN_EXCEPTION = 3054
+ER_GIS_DATA_WRONG_ENDIANESS = 3055
+ER_CHANGE_MASTER_PASSWORD_LENGTH = 3056
+ER_USER_LOCK_WRONG_NAME = 3057
+ER_USER_LOCK_DEADLOCK = 3058
+ER_REPLACE_INACCESSIBLE_ROWS = 3059
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_GIS = 3060
+ER_ILLEGAL_USER_VAR = 3061
+ER_GTID_MODE_OFF = 3062
+OBSOLETE_ER_UNSUPPORTED_BY_REPLICATION_THREAD = 3063
+ER_INCORRECT_TYPE = 3064
+ER_FIELD_IN_ORDER_NOT_SELECT = 3065
+ER_AGGREGATE_IN_ORDER_NOT_SELECT = 3066
+ER_INVALID_RPL_WILD_TABLE_FILTER_PATTERN = 3067
+ER_NET_OK_PACKET_TOO_LARGE = 3068
+ER_INVALID_JSON_DATA = 3069
+ER_INVALID_GEOJSON_MISSING_MEMBER = 3070
+ER_INVALID_GEOJSON_WRONG_TYPE = 3071
+ER_INVALID_GEOJSON_UNSPECIFIED = 3072
+ER_DIMENSION_UNSUPPORTED = 3073
+ER_SLAVE_CHANNEL_DOES_NOT_EXIST = 3074
+OBSOLETE_ER_SLAVE_MULTIPLE_CHANNELS_HOST_PORT = 3075
+ER_SLAVE_CHANNEL_NAME_INVALID_OR_TOO_LONG = 3076
+ER_SLAVE_NEW_CHANNEL_WRONG_REPOSITORY = 3077
+OBSOLETE_ER_SLAVE_CHANNEL_DELETE = 3078
+ER_SLAVE_MULTIPLE_CHANNELS_CMD = 3079
+ER_SLAVE_MAX_CHANNELS_EXCEEDED = 3080
+ER_SLAVE_CHANNEL_MUST_STOP = 3081
+ER_SLAVE_CHANNEL_NOT_RUNNING = 3082
+ER_SLAVE_CHANNEL_WAS_RUNNING = 3083
+ER_SLAVE_CHANNEL_WAS_NOT_RUNNING = 3084
+ER_SLAVE_CHANNEL_SQL_THREAD_MUST_STOP = 3085
+ER_SLAVE_CHANNEL_SQL_SKIP_COUNTER = 3086
+ER_WRONG_FIELD_WITH_GROUP_V2 = 3087
+ER_MIX_OF_GROUP_FUNC_AND_FIELDS_V2 = 3088
+ER_WARN_DEPRECATED_SYSVAR_UPDATE = 3089
+ER_WARN_DEPRECATED_SQLMODE = 3090
+ER_CANNOT_LOG_PARTIAL_DROP_DATABASE_WITH_GTID = 3091
+ER_GROUP_REPLICATION_CONFIGURATION = 3092
+ER_GROUP_REPLICATION_RUNNING = 3093
+ER_GROUP_REPLICATION_APPLIER_INIT_ERROR = 3094
+ER_GROUP_REPLICATION_STOP_APPLIER_THREAD_TIMEOUT = 3095
+ER_GROUP_REPLICATION_COMMUNICATION_LAYER_SESSION_ERROR = 3096
+ER_GROUP_REPLICATION_COMMUNICATION_LAYER_JOIN_ERROR = 3097
+ER_BEFORE_DML_VALIDATION_ERROR = 3098
+ER_PREVENTS_VARIABLE_WITHOUT_RBR = 3099
+ER_RUN_HOOK_ERROR = 3100
+ER_TRANSACTION_ROLLBACK_DURING_COMMIT = 3101
+ER_GENERATED_COLUMN_FUNCTION_IS_NOT_ALLOWED = 3102
+ER_UNSUPPORTED_ALTER_INPLACE_ON_VIRTUAL_COLUMN = 3103
+ER_WRONG_FK_OPTION_FOR_GENERATED_COLUMN = 3104
+ER_NON_DEFAULT_VALUE_FOR_GENERATED_COLUMN = 3105
+ER_UNSUPPORTED_ACTION_ON_GENERATED_COLUMN = 3106
+ER_GENERATED_COLUMN_NON_PRIOR = 3107
+ER_DEPENDENT_BY_GENERATED_COLUMN = 3108
+ER_GENERATED_COLUMN_REF_AUTO_INC = 3109
+ER_FEATURE_NOT_AVAILABLE = 3110
+ER_CANT_SET_GTID_MODE = 3111
+ER_CANT_USE_AUTO_POSITION_WITH_GTID_MODE_OFF = 3112
+OBSOLETE_ER_CANT_REPLICATE_ANONYMOUS_WITH_AUTO_POSITION = 3113
+OBSOLETE_ER_CANT_REPLICATE_ANONYMOUS_WITH_GTID_MODE_ON = 3114
+OBSOLETE_ER_CANT_REPLICATE_GTID_WITH_GTID_MODE_OFF = 3115
+ER_CANT_ENFORCE_GTID_CONSISTENCY_WITH_ONGOING_GTID_VIOLATING_TX = 3116
+ER_ENFORCE_GTID_CONSISTENCY_WARN_WITH_ONGOING_GTID_VIOLATING_TX = 3117
+ER_ACCOUNT_HAS_BEEN_LOCKED = 3118
+ER_WRONG_TABLESPACE_NAME = 3119
+ER_TABLESPACE_IS_NOT_EMPTY = 3120
+ER_WRONG_FILE_NAME = 3121
+ER_BOOST_GEOMETRY_INCONSISTENT_TURNS_EXCEPTION = 3122
+ER_WARN_OPTIMIZER_HINT_SYNTAX_ERROR = 3123
+ER_WARN_BAD_MAX_EXECUTION_TIME = 3124
+ER_WARN_UNSUPPORTED_MAX_EXECUTION_TIME = 3125
+ER_WARN_CONFLICTING_HINT = 3126
+ER_WARN_UNKNOWN_QB_NAME = 3127
+ER_UNRESOLVED_HINT_NAME = 3128
+ER_WARN_ON_MODIFYING_GTID_EXECUTED_TABLE = 3129
+ER_PLUGGABLE_PROTOCOL_COMMAND_NOT_SUPPORTED = 3130
+ER_LOCKING_SERVICE_WRONG_NAME = 3131
+ER_LOCKING_SERVICE_DEADLOCK = 3132
+ER_LOCKING_SERVICE_TIMEOUT = 3133
+ER_GIS_MAX_POINTS_IN_GEOMETRY_OVERFLOWED = 3134
+ER_SQL_MODE_MERGED = 3135
+ER_VTOKEN_PLUGIN_TOKEN_MISMATCH = 3136
+ER_VTOKEN_PLUGIN_TOKEN_NOT_FOUND = 3137
+ER_CANT_SET_VARIABLE_WHEN_OWNING_GTID = 3138
+ER_SLAVE_CHANNEL_OPERATION_NOT_ALLOWED = 3139
+ER_INVALID_JSON_TEXT = 3140
+ER_INVALID_JSON_TEXT_IN_PARAM = 3141
+ER_INVALID_JSON_BINARY_DATA = 3142
+ER_INVALID_JSON_PATH = 3143
+ER_INVALID_JSON_CHARSET = 3144
+ER_INVALID_JSON_CHARSET_IN_FUNCTION = 3145
+ER_INVALID_TYPE_FOR_JSON = 3146
+ER_INVALID_CAST_TO_JSON = 3147
+ER_INVALID_JSON_PATH_CHARSET = 3148
+ER_INVALID_JSON_PATH_WILDCARD = 3149
+ER_JSON_VALUE_TOO_BIG = 3150
+ER_JSON_KEY_TOO_BIG = 3151
+ER_JSON_USED_AS_KEY = 3152
+ER_JSON_VACUOUS_PATH = 3153
+ER_JSON_BAD_ONE_OR_ALL_ARG = 3154
+ER_NUMERIC_JSON_VALUE_OUT_OF_RANGE = 3155
+ER_INVALID_JSON_VALUE_FOR_CAST = 3156
+ER_JSON_DOCUMENT_TOO_DEEP = 3157
+ER_JSON_DOCUMENT_NULL_KEY = 3158
+ER_SECURE_TRANSPORT_REQUIRED = 3159
+ER_NO_SECURE_TRANSPORTS_CONFIGURED = 3160
+ER_DISABLED_STORAGE_ENGINE = 3161
+ER_USER_DOES_NOT_EXIST = 3162
+ER_USER_ALREADY_EXISTS = 3163
+ER_AUDIT_API_ABORT = 3164
+ER_INVALID_JSON_PATH_ARRAY_CELL = 3165
+ER_BUFPOOL_RESIZE_INPROGRESS = 3166
+ER_FEATURE_DISABLED_SEE_DOC = 3167
+ER_SERVER_ISNT_AVAILABLE = 3168
+ER_SESSION_WAS_KILLED = 3169
+ER_CAPACITY_EXCEEDED = 3170
+ER_CAPACITY_EXCEEDED_IN_RANGE_OPTIMIZER = 3171
+OBSOLETE_ER_TABLE_NEEDS_UPG_PART = 3172
+ER_CANT_WAIT_FOR_EXECUTED_GTID_SET_WHILE_OWNING_A_GTID = 3173
+ER_CANNOT_ADD_FOREIGN_BASE_COL_VIRTUAL = 3174
+ER_CANNOT_CREATE_VIRTUAL_INDEX_CONSTRAINT = 3175
+ER_ERROR_ON_MODIFYING_GTID_EXECUTED_TABLE = 3176
+ER_LOCK_REFUSED_BY_ENGINE = 3177
+ER_UNSUPPORTED_ALTER_ONLINE_ON_VIRTUAL_COLUMN = 3178
+ER_MASTER_KEY_ROTATION_NOT_SUPPORTED_BY_SE = 3179
+OBSOLETE_ER_MASTER_KEY_ROTATION_ERROR_BY_SE = 3180
+ER_MASTER_KEY_ROTATION_BINLOG_FAILED = 3181
+ER_MASTER_KEY_ROTATION_SE_UNAVAILABLE = 3182
+ER_TABLESPACE_CANNOT_ENCRYPT = 3183
+ER_INVALID_ENCRYPTION_OPTION = 3184
+ER_CANNOT_FIND_KEY_IN_KEYRING = 3185
+ER_CAPACITY_EXCEEDED_IN_PARSER = 3186
+ER_UNSUPPORTED_ALTER_ENCRYPTION_INPLACE = 3187
+ER_KEYRING_UDF_KEYRING_SERVICE_ERROR = 3188
+ER_USER_COLUMN_OLD_LENGTH = 3189
+ER_CANT_RESET_MASTER = 3190
+ER_GROUP_REPLICATION_MAX_GROUP_SIZE = 3191
+ER_CANNOT_ADD_FOREIGN_BASE_COL_STORED = 3192
+ER_TABLE_REFERENCED = 3193
+OBSOLETE_ER_PARTITION_ENGINE_DEPRECATED_FOR_TABLE = 3194
+OBSOLETE_ER_WARN_USING_GEOMFROMWKB_TO_SET_SRID_ZERO = 3195
+OBSOLETE_ER_WARN_USING_GEOMFROMWKB_TO_SET_SRID = 3196
+ER_XA_RETRY = 3197
+ER_KEYRING_AWS_UDF_AWS_KMS_ERROR = 3198
+ER_BINLOG_UNSAFE_XA = 3199
+ER_UDF_ERROR = 3200
+ER_KEYRING_MIGRATION_FAILURE = 3201
+ER_KEYRING_ACCESS_DENIED_ERROR = 3202
+ER_KEYRING_MIGRATION_STATUS = 3203
+OBSOLETE_ER_PLUGIN_FAILED_TO_OPEN_TABLES = 3204
+OBSOLETE_ER_PLUGIN_FAILED_TO_OPEN_TABLE = 3205
+OBSOLETE_ER_AUDIT_LOG_NO_KEYRING_PLUGIN_INSTALLED = 3206
+OBSOLETE_ER_AUDIT_LOG_ENCRYPTION_PASSWORD_HAS_NOT_BEEN_SET = 3207
+OBSOLETE_ER_AUDIT_LOG_COULD_NOT_CREATE_AES_KEY = 3208
+OBSOLETE_ER_AUDIT_LOG_ENCRYPTION_PASSWORD_CANNOT_BE_FETCHED = 3209
+OBSOLETE_ER_AUDIT_LOG_JSON_FILTERING_NOT_ENABLED = 3210
+OBSOLETE_ER_AUDIT_LOG_UDF_INSUFFICIENT_PRIVILEGE = 3211
+OBSOLETE_ER_AUDIT_LOG_SUPER_PRIVILEGE_REQUIRED = 3212
+OBSOLETE_ER_COULD_NOT_REINITIALIZE_AUDIT_LOG_FILTERS = 3213
+OBSOLETE_ER_AUDIT_LOG_UDF_INVALID_ARGUMENT_TYPE = 3214
+OBSOLETE_ER_AUDIT_LOG_UDF_INVALID_ARGUMENT_COUNT = 3215
+OBSOLETE_ER_AUDIT_LOG_HAS_NOT_BEEN_INSTALLED = 3216
+OBSOLETE_ER_AUDIT_LOG_UDF_READ_INVALID_MAX_ARRAY_LENGTH_ARG_TYPE = 3217
+ER_AUDIT_LOG_UDF_READ_INVALID_MAX_ARRAY_LENGTH_ARG_VALUE = 3218
+OBSOLETE_ER_AUDIT_LOG_JSON_FILTER_PARSING_ERROR = 3219
+OBSOLETE_ER_AUDIT_LOG_JSON_FILTER_NAME_CANNOT_BE_EMPTY = 3220
+OBSOLETE_ER_AUDIT_LOG_JSON_USER_NAME_CANNOT_BE_EMPTY = 3221
+OBSOLETE_ER_AUDIT_LOG_JSON_FILTER_DOES_NOT_EXISTS = 3222
+OBSOLETE_ER_AUDIT_LOG_USER_FIRST_CHARACTER_MUST_BE_ALPHANUMERIC = 3223
+OBSOLETE_ER_AUDIT_LOG_USER_NAME_INVALID_CHARACTER = 3224
+OBSOLETE_ER_AUDIT_LOG_HOST_NAME_INVALID_CHARACTER = 3225
+OBSOLETE_ER_XA_REPLICATION_FILTERS = 3226
+OBSOLETE_ER_CANT_OPEN_ERROR_LOG = 3227
+OBSOLETE_ER_GROUPING_ON_TIMESTAMP_IN_DST = 3228
+OBSOLETE_ER_CANT_START_SERVER_NAMED_PIPE = 3229
+ER_WRITE_SET_EXCEEDS_LIMIT = 3230
+ER_UNSUPPORT_COMPRESSED_TEMPORARY_TABLE = 3500
+ER_ACL_OPERATION_FAILED = 3501
+ER_UNSUPPORTED_INDEX_ALGORITHM = 3502
+ER_NO_SUCH_DB = 3503
+ER_TOO_BIG_ENUM = 3504
+ER_TOO_LONG_SET_ENUM_VALUE = 3505
+ER_INVALID_DD_OBJECT = 3506
+ER_UPDATING_DD_TABLE = 3507
+ER_INVALID_DD_OBJECT_ID = 3508
+ER_INVALID_DD_OBJECT_NAME = 3509
+ER_TABLESPACE_MISSING_WITH_NAME = 3510
+ER_TOO_LONG_ROUTINE_COMMENT = 3511
+ER_SP_LOAD_FAILED = 3512
+ER_INVALID_BITWISE_OPERANDS_SIZE = 3513
+ER_INVALID_BITWISE_AGGREGATE_OPERANDS_SIZE = 3514
+ER_WARN_UNSUPPORTED_HINT = 3515
+ER_UNEXPECTED_GEOMETRY_TYPE = 3516
+ER_SRS_PARSE_ERROR = 3517
+ER_SRS_PROJ_PARAMETER_MISSING = 3518
+ER_WARN_SRS_NOT_FOUND = 3519
+ER_SRS_NOT_CARTESIAN = 3520
+ER_SRS_NOT_CARTESIAN_UNDEFINED = 3521
+ER_PK_INDEX_CANT_BE_INVISIBLE = 3522
+ER_UNKNOWN_AUTHID = 3523
+ER_FAILED_ROLE_GRANT = 3524
+ER_OPEN_ROLE_TABLES = 3525
+ER_FAILED_DEFAULT_ROLES = 3526
+ER_COMPONENTS_NO_SCHEME = 3527
+ER_COMPONENTS_NO_SCHEME_SERVICE = 3528
+ER_COMPONENTS_CANT_LOAD = 3529
+ER_ROLE_NOT_GRANTED = 3530
+ER_FAILED_REVOKE_ROLE = 3531
+ER_RENAME_ROLE = 3532
+ER_COMPONENTS_CANT_ACQUIRE_SERVICE_IMPLEMENTATION = 3533
+ER_COMPONENTS_CANT_SATISFY_DEPENDENCY = 3534
+ER_COMPONENTS_LOAD_CANT_REGISTER_SERVICE_IMPLEMENTATION = 3535
+ER_COMPONENTS_LOAD_CANT_INITIALIZE = 3536
+ER_COMPONENTS_UNLOAD_NOT_LOADED = 3537
+ER_COMPONENTS_UNLOAD_CANT_DEINITIALIZE = 3538
+ER_COMPONENTS_CANT_RELEASE_SERVICE = 3539
+ER_COMPONENTS_UNLOAD_CANT_UNREGISTER_SERVICE = 3540
+ER_COMPONENTS_CANT_UNLOAD = 3541
+ER_WARN_UNLOAD_THE_NOT_PERSISTED = 3542
+ER_COMPONENT_TABLE_INCORRECT = 3543
+ER_COMPONENT_MANIPULATE_ROW_FAILED = 3544
+ER_COMPONENTS_UNLOAD_DUPLICATE_IN_GROUP = 3545
+ER_CANT_SET_GTID_PURGED_DUE_SETS_CONSTRAINTS = 3546
+ER_CANNOT_LOCK_USER_MANAGEMENT_CACHES = 3547
+ER_SRS_NOT_FOUND = 3548
+ER_VARIABLE_NOT_PERSISTED = 3549
+ER_IS_QUERY_INVALID_CLAUSE = 3550
+ER_UNABLE_TO_STORE_STATISTICS = 3551
+ER_NO_SYSTEM_SCHEMA_ACCESS = 3552
+ER_NO_SYSTEM_TABLESPACE_ACCESS = 3553
+ER_NO_SYSTEM_TABLE_ACCESS = 3554
+ER_NO_SYSTEM_TABLE_ACCESS_FOR_DICTIONARY_TABLE = 3555
+ER_NO_SYSTEM_TABLE_ACCESS_FOR_SYSTEM_TABLE = 3556
+ER_NO_SYSTEM_TABLE_ACCESS_FOR_TABLE = 3557
+ER_INVALID_OPTION_KEY = 3558
+ER_INVALID_OPTION_VALUE = 3559
+ER_INVALID_OPTION_KEY_VALUE_PAIR = 3560
+ER_INVALID_OPTION_START_CHARACTER = 3561
+ER_INVALID_OPTION_END_CHARACTER = 3562
+ER_INVALID_OPTION_CHARACTERS = 3563
+ER_DUPLICATE_OPTION_KEY = 3564
+ER_WARN_SRS_NOT_FOUND_AXIS_ORDER = 3565
+ER_NO_ACCESS_TO_NATIVE_FCT = 3566
+ER_RESET_MASTER_TO_VALUE_OUT_OF_RANGE = 3567
+ER_UNRESOLVED_TABLE_LOCK = 3568
+ER_DUPLICATE_TABLE_LOCK = 3569
+ER_BINLOG_UNSAFE_SKIP_LOCKED = 3570
+ER_BINLOG_UNSAFE_NOWAIT = 3571
+ER_LOCK_NOWAIT = 3572
+ER_CTE_RECURSIVE_REQUIRES_UNION = 3573
+ER_CTE_RECURSIVE_REQUIRES_NONRECURSIVE_FIRST = 3574
+ER_CTE_RECURSIVE_FORBIDS_AGGREGATION = 3575
+ER_CTE_RECURSIVE_FORBIDDEN_JOIN_ORDER = 3576
+ER_CTE_RECURSIVE_REQUIRES_SINGLE_REFERENCE = 3577
+ER_SWITCH_TMP_ENGINE = 3578
+ER_WINDOW_NO_SUCH_WINDOW = 3579
+ER_WINDOW_CIRCULARITY_IN_WINDOW_GRAPH = 3580
+ER_WINDOW_NO_CHILD_PARTITIONING = 3581
+ER_WINDOW_NO_INHERIT_FRAME = 3582
+ER_WINDOW_NO_REDEFINE_ORDER_BY = 3583
+ER_WINDOW_FRAME_START_ILLEGAL = 3584
+ER_WINDOW_FRAME_END_ILLEGAL = 3585
+ER_WINDOW_FRAME_ILLEGAL = 3586
+ER_WINDOW_RANGE_FRAME_ORDER_TYPE = 3587
+ER_WINDOW_RANGE_FRAME_TEMPORAL_TYPE = 3588
+ER_WINDOW_RANGE_FRAME_NUMERIC_TYPE = 3589
+ER_WINDOW_RANGE_BOUND_NOT_CONSTANT = 3590
+ER_WINDOW_DUPLICATE_NAME = 3591
+ER_WINDOW_ILLEGAL_ORDER_BY = 3592
+ER_WINDOW_INVALID_WINDOW_FUNC_USE = 3593
+ER_WINDOW_INVALID_WINDOW_FUNC_ALIAS_USE = 3594
+ER_WINDOW_NESTED_WINDOW_FUNC_USE_IN_WINDOW_SPEC = 3595
+ER_WINDOW_ROWS_INTERVAL_USE = 3596
+ER_WINDOW_NO_GROUP_ORDER_UNUSED = 3597
+ER_WINDOW_EXPLAIN_JSON = 3598
+ER_WINDOW_FUNCTION_IGNORES_FRAME = 3599
+ER_WL9236_NOW_UNUSED = 3600
+ER_INVALID_NO_OF_ARGS = 3601
+ER_FIELD_IN_GROUPING_NOT_GROUP_BY = 3602
+ER_TOO_LONG_TABLESPACE_COMMENT = 3603
+ER_ENGINE_CANT_DROP_TABLE = 3604
+ER_ENGINE_CANT_DROP_MISSING_TABLE = 3605
+ER_TABLESPACE_DUP_FILENAME = 3606
+ER_DB_DROP_RMDIR2 = 3607
+ER_IMP_NO_FILES_MATCHED = 3608
+ER_IMP_SCHEMA_DOES_NOT_EXIST = 3609
+ER_IMP_TABLE_ALREADY_EXISTS = 3610
+ER_IMP_INCOMPATIBLE_MYSQLD_VERSION = 3611
+ER_IMP_INCOMPATIBLE_DD_VERSION = 3612
+ER_IMP_INCOMPATIBLE_SDI_VERSION = 3613
+ER_WARN_INVALID_HINT = 3614
+ER_VAR_DOES_NOT_EXIST = 3615
+ER_LONGITUDE_OUT_OF_RANGE = 3616
+ER_LATITUDE_OUT_OF_RANGE = 3617
+ER_NOT_IMPLEMENTED_FOR_GEOGRAPHIC_SRS = 3618
+ER_ILLEGAL_PRIVILEGE_LEVEL = 3619
+ER_NO_SYSTEM_VIEW_ACCESS = 3620
+ER_COMPONENT_FILTER_FLABBERGASTED = 3621
+ER_PART_EXPR_TOO_LONG = 3622
+ER_UDF_DROP_DYNAMICALLY_REGISTERED = 3623
+ER_UNABLE_TO_STORE_COLUMN_STATISTICS = 3624
+ER_UNABLE_TO_UPDATE_COLUMN_STATISTICS = 3625
+ER_UNABLE_TO_DROP_COLUMN_STATISTICS = 3626
+ER_UNABLE_TO_BUILD_HISTOGRAM = 3627
+ER_MANDATORY_ROLE = 3628
+ER_MISSING_TABLESPACE_FILE = 3629
+ER_PERSIST_ONLY_ACCESS_DENIED_ERROR = 3630
+ER_CMD_NEED_SUPER = 3631
+ER_PATH_IN_DATADIR = 3632
+ER_CLONE_DDL_IN_PROGRESS = 3633
+ER_CLONE_TOO_MANY_CONCURRENT_CLONES = 3634
+ER_APPLIER_LOG_EVENT_VALIDATION_ERROR = 3635
+ER_CTE_MAX_RECURSION_DEPTH = 3636
+ER_NOT_HINT_UPDATABLE_VARIABLE = 3637
+ER_CREDENTIALS_CONTRADICT_TO_HISTORY = 3638
+ER_WARNING_PASSWORD_HISTORY_CLAUSES_VOID = 3639
+ER_CLIENT_DOES_NOT_SUPPORT = 3640
+ER_I_S_SKIPPED_TABLESPACE = 3641
+ER_TABLESPACE_ENGINE_MISMATCH = 3642
+ER_WRONG_SRID_FOR_COLUMN = 3643
+ER_CANNOT_ALTER_SRID_DUE_TO_INDEX = 3644
+ER_WARN_BINLOG_PARTIAL_UPDATES_DISABLED = 3645
+ER_WARN_BINLOG_V1_ROW_EVENTS_DISABLED = 3646
+ER_WARN_BINLOG_PARTIAL_UPDATES_SUGGESTS_PARTIAL_IMAGES = 3647
+ER_COULD_NOT_APPLY_JSON_DIFF = 3648
+ER_CORRUPTED_JSON_DIFF = 3649
+ER_RESOURCE_GROUP_EXISTS = 3650
+ER_RESOURCE_GROUP_NOT_EXISTS = 3651
+ER_INVALID_VCPU_ID = 3652
+ER_INVALID_VCPU_RANGE = 3653
+ER_INVALID_THREAD_PRIORITY = 3654
+ER_DISALLOWED_OPERATION = 3655
+ER_RESOURCE_GROUP_BUSY = 3656
+ER_RESOURCE_GROUP_DISABLED = 3657
+ER_FEATURE_UNSUPPORTED = 3658
+ER_ATTRIBUTE_IGNORED = 3659
+ER_INVALID_THREAD_ID = 3660
+ER_RESOURCE_GROUP_BIND_FAILED = 3661
+ER_INVALID_USE_OF_FORCE_OPTION = 3662
+ER_GROUP_REPLICATION_COMMAND_FAILURE = 3663
+ER_SDI_OPERATION_FAILED = 3664
+ER_MISSING_JSON_TABLE_VALUE = 3665
+ER_WRONG_JSON_TABLE_VALUE = 3666
+ER_TF_MUST_HAVE_ALIAS = 3667
+ER_TF_FORBIDDEN_JOIN_TYPE = 3668
+ER_JT_VALUE_OUT_OF_RANGE = 3669
+ER_JT_MAX_NESTED_PATH = 3670
+ER_PASSWORD_EXPIRATION_NOT_SUPPORTED_BY_AUTH_METHOD = 3671
+ER_INVALID_GEOJSON_CRS_NOT_TOP_LEVEL = 3672
+ER_BAD_NULL_ERROR_NOT_IGNORED = 3673
+WARN_USELESS_SPATIAL_INDEX = 3674
+ER_DISK_FULL_NOWAIT = 3675
+ER_PARSE_ERROR_IN_DIGEST_FN = 3676
+ER_UNDISCLOSED_PARSE_ERROR_IN_DIGEST_FN = 3677
+ER_SCHEMA_DIR_EXISTS = 3678
+ER_SCHEMA_DIR_MISSING = 3679
+ER_SCHEMA_DIR_CREATE_FAILED = 3680
+ER_SCHEMA_DIR_UNKNOWN = 3681
+ER_ONLY_IMPLEMENTED_FOR_SRID_0_AND_4326 = 3682
+ER_BINLOG_EXPIRE_LOG_DAYS_AND_SECS_USED_TOGETHER = 3683
+ER_REGEXP_BUFFER_OVERFLOW = 3684
+ER_REGEXP_ILLEGAL_ARGUMENT = 3685
+ER_REGEXP_INDEX_OUTOFBOUNDS_ERROR = 3686
+ER_REGEXP_INTERNAL_ERROR = 3687
+ER_REGEXP_RULE_SYNTAX = 3688
+ER_REGEXP_BAD_ESCAPE_SEQUENCE = 3689
+ER_REGEXP_UNIMPLEMENTED = 3690
+ER_REGEXP_MISMATCHED_PAREN = 3691
+ER_REGEXP_BAD_INTERVAL = 3692
+ER_REGEXP_MAX_LT_MIN = 3693
+ER_REGEXP_INVALID_BACK_REF = 3694
+ER_REGEXP_LOOK_BEHIND_LIMIT = 3695
+ER_REGEXP_MISSING_CLOSE_BRACKET = 3696
+ER_REGEXP_INVALID_RANGE = 3697
+ER_REGEXP_STACK_OVERFLOW = 3698
+ER_REGEXP_TIME_OUT = 3699
+ER_REGEXP_PATTERN_TOO_BIG = 3700
+ER_CANT_SET_ERROR_LOG_SERVICE = 3701
+ER_EMPTY_PIPELINE_FOR_ERROR_LOG_SERVICE = 3702
+ER_COMPONENT_FILTER_DIAGNOSTICS = 3703
+ER_NOT_IMPLEMENTED_FOR_CARTESIAN_SRS = 3704
+ER_NOT_IMPLEMENTED_FOR_PROJECTED_SRS = 3705
+ER_NONPOSITIVE_RADIUS = 3706
+ER_RESTART_SERVER_FAILED = 3707
+ER_SRS_MISSING_MANDATORY_ATTRIBUTE = 3708
+ER_SRS_MULTIPLE_ATTRIBUTE_DEFINITIONS = 3709
+ER_SRS_NAME_CANT_BE_EMPTY_OR_WHITESPACE = 3710
+ER_SRS_ORGANIZATION_CANT_BE_EMPTY_OR_WHITESPACE = 3711
+ER_SRS_ID_ALREADY_EXISTS = 3712
+ER_WARN_SRS_ID_ALREADY_EXISTS = 3713
+ER_CANT_MODIFY_SRID_0 = 3714
+ER_WARN_RESERVED_SRID_RANGE = 3715
+ER_CANT_MODIFY_SRS_USED_BY_COLUMN = 3716
+ER_SRS_INVALID_CHARACTER_IN_ATTRIBUTE = 3717
+ER_SRS_ATTRIBUTE_STRING_TOO_LONG = 3718
+ER_DEPRECATED_UTF8_ALIAS = 3719
+ER_DEPRECATED_NATIONAL = 3720
+ER_INVALID_DEFAULT_UTF8MB4_COLLATION = 3721
+ER_UNABLE_TO_COLLECT_LOG_STATUS = 3722
+ER_RESERVED_TABLESPACE_NAME = 3723
+ER_UNABLE_TO_SET_OPTION = 3724
+ER_SLAVE_POSSIBLY_DIVERGED_AFTER_DDL = 3725
+ER_SRS_NOT_GEOGRAPHIC = 3726
+ER_POLYGON_TOO_LARGE = 3727
+ER_SPATIAL_UNIQUE_INDEX = 3728
+ER_INDEX_TYPE_NOT_SUPPORTED_FOR_SPATIAL_INDEX = 3729
+ER_FK_CANNOT_DROP_PARENT = 3730
+ER_GEOMETRY_PARAM_LONGITUDE_OUT_OF_RANGE = 3731
+ER_GEOMETRY_PARAM_LATITUDE_OUT_OF_RANGE = 3732
+ER_FK_CANNOT_USE_VIRTUAL_COLUMN = 3733
+ER_FK_NO_COLUMN_PARENT = 3734
+ER_CANT_SET_ERROR_SUPPRESSION_LIST = 3735
+ER_SRS_GEOGCS_INVALID_AXES = 3736
+ER_SRS_INVALID_SEMI_MAJOR_AXIS = 3737
+ER_SRS_INVALID_INVERSE_FLATTENING = 3738
+ER_SRS_INVALID_ANGULAR_UNIT = 3739
+ER_SRS_INVALID_PRIME_MERIDIAN = 3740
+ER_TRANSFORM_SOURCE_SRS_NOT_SUPPORTED = 3741
+ER_TRANSFORM_TARGET_SRS_NOT_SUPPORTED = 3742
+ER_TRANSFORM_SOURCE_SRS_MISSING_TOWGS84 = 3743
+ER_TRANSFORM_TARGET_SRS_MISSING_TOWGS84 = 3744
+ER_TEMP_TABLE_PREVENTS_SWITCH_SESSION_BINLOG_FORMAT = 3745
+ER_TEMP_TABLE_PREVENTS_SWITCH_GLOBAL_BINLOG_FORMAT = 3746
+ER_RUNNING_APPLIER_PREVENTS_SWITCH_GLOBAL_BINLOG_FORMAT = 3747
+ER_CLIENT_GTID_UNSAFE_CREATE_DROP_TEMP_TABLE_IN_TRX_IN_SBR = 3748
+OBSOLETE_ER_XA_CANT_CREATE_MDL_BACKUP = 3749
+ER_TABLE_WITHOUT_PK = 3750
+ER_WARN_DATA_TRUNCATED_FUNCTIONAL_INDEX = 3751
+ER_WARN_DATA_OUT_OF_RANGE_FUNCTIONAL_INDEX = 3752
+ER_FUNCTIONAL_INDEX_ON_JSON_OR_GEOMETRY_FUNCTION = 3753
+ER_FUNCTIONAL_INDEX_REF_AUTO_INCREMENT = 3754
+ER_CANNOT_DROP_COLUMN_FUNCTIONAL_INDEX = 3755
+ER_FUNCTIONAL_INDEX_PRIMARY_KEY = 3756
+ER_FUNCTIONAL_INDEX_ON_LOB = 3757
+ER_FUNCTIONAL_INDEX_FUNCTION_IS_NOT_ALLOWED = 3758
+ER_FULLTEXT_FUNCTIONAL_INDEX = 3759
+ER_SPATIAL_FUNCTIONAL_INDEX = 3760
+ER_WRONG_KEY_COLUMN_FUNCTIONAL_INDEX = 3761
+ER_FUNCTIONAL_INDEX_ON_FIELD = 3762
+ER_GENERATED_COLUMN_NAMED_FUNCTION_IS_NOT_ALLOWED = 3763
+ER_GENERATED_COLUMN_ROW_VALUE = 3764
+ER_GENERATED_COLUMN_VARIABLES = 3765
+ER_DEPENDENT_BY_DEFAULT_GENERATED_VALUE = 3766
+ER_DEFAULT_VAL_GENERATED_NON_PRIOR = 3767
+ER_DEFAULT_VAL_GENERATED_REF_AUTO_INC = 3768
+ER_DEFAULT_VAL_GENERATED_FUNCTION_IS_NOT_ALLOWED = 3769
+ER_DEFAULT_VAL_GENERATED_NAMED_FUNCTION_IS_NOT_ALLOWED = 3770
+ER_DEFAULT_VAL_GENERATED_ROW_VALUE = 3771
+ER_DEFAULT_VAL_GENERATED_VARIABLES = 3772
+ER_DEFAULT_AS_VAL_GENERATED = 3773
+ER_UNSUPPORTED_ACTION_ON_DEFAULT_VAL_GENERATED = 3774
+ER_GTID_UNSAFE_ALTER_ADD_COL_WITH_DEFAULT_EXPRESSION = 3775
+ER_FK_CANNOT_CHANGE_ENGINE = 3776
+ER_WARN_DEPRECATED_USER_SET_EXPR = 3777
+ER_WARN_DEPRECATED_UTF8MB3_COLLATION = 3778
+ER_WARN_DEPRECATED_NESTED_COMMENT_SYNTAX = 3779
+ER_FK_INCOMPATIBLE_COLUMNS = 3780
+ER_GR_HOLD_WAIT_TIMEOUT = 3781
+ER_GR_HOLD_KILLED = 3782
+ER_GR_HOLD_MEMBER_STATUS_ERROR = 3783
+ER_RPL_ENCRYPTION_FAILED_TO_FETCH_KEY = 3784
+ER_RPL_ENCRYPTION_KEY_NOT_FOUND = 3785
+ER_RPL_ENCRYPTION_KEYRING_INVALID_KEY = 3786
+ER_RPL_ENCRYPTION_HEADER_ERROR = 3787
+ER_RPL_ENCRYPTION_FAILED_TO_ROTATE_LOGS = 3788
+ER_RPL_ENCRYPTION_KEY_EXISTS_UNEXPECTED = 3789
+ER_RPL_ENCRYPTION_FAILED_TO_GENERATE_KEY = 3790
+ER_RPL_ENCRYPTION_FAILED_TO_STORE_KEY = 3791
+ER_RPL_ENCRYPTION_FAILED_TO_REMOVE_KEY = 3792
+ER_RPL_ENCRYPTION_UNABLE_TO_CHANGE_OPTION = 3793
+ER_RPL_ENCRYPTION_MASTER_KEY_RECOVERY_FAILED = 3794
+ER_SLOW_LOG_MODE_IGNORED_WHEN_NOT_LOGGING_TO_FILE = 3795
+ER_GRP_TRX_CONSISTENCY_NOT_ALLOWED = 3796
+ER_GRP_TRX_CONSISTENCY_BEFORE = 3797
+ER_GRP_TRX_CONSISTENCY_AFTER_ON_TRX_BEGIN = 3798
+ER_GRP_TRX_CONSISTENCY_BEGIN_NOT_ALLOWED = 3799
+ER_FUNCTIONAL_INDEX_ROW_VALUE_IS_NOT_ALLOWED = 3800
+ER_RPL_ENCRYPTION_FAILED_TO_ENCRYPT = 3801
+ER_PAGE_TRACKING_NOT_STARTED = 3802
+ER_PAGE_TRACKING_RANGE_NOT_TRACKED = 3803
+ER_PAGE_TRACKING_CANNOT_PURGE = 3804
+ER_RPL_ENCRYPTION_CANNOT_ROTATE_BINLOG_MASTER_KEY = 3805
+ER_BINLOG_MASTER_KEY_RECOVERY_OUT_OF_COMBINATION = 3806
+ER_BINLOG_MASTER_KEY_ROTATION_FAIL_TO_OPERATE_KEY = 3807
+ER_BINLOG_MASTER_KEY_ROTATION_FAIL_TO_ROTATE_LOGS = 3808
+ER_BINLOG_MASTER_KEY_ROTATION_FAIL_TO_REENCRYPT_LOG = 3809
+ER_BINLOG_MASTER_KEY_ROTATION_FAIL_TO_CLEANUP_UNUSED_KEYS = 3810
+ER_BINLOG_MASTER_KEY_ROTATION_FAIL_TO_CLEANUP_AUX_KEY = 3811
+ER_NON_BOOLEAN_EXPR_FOR_CHECK_CONSTRAINT = 3812
+ER_COLUMN_CHECK_CONSTRAINT_REFERENCES_OTHER_COLUMN = 3813
+ER_CHECK_CONSTRAINT_NAMED_FUNCTION_IS_NOT_ALLOWED = 3814
+ER_CHECK_CONSTRAINT_FUNCTION_IS_NOT_ALLOWED = 3815
+ER_CHECK_CONSTRAINT_VARIABLES = 3816
+ER_CHECK_CONSTRAINT_ROW_VALUE = 3817
+ER_CHECK_CONSTRAINT_REFERS_AUTO_INCREMENT_COLUMN = 3818
+ER_CHECK_CONSTRAINT_VIOLATED = 3819
+ER_CHECK_CONSTRAINT_REFERS_UNKNOWN_COLUMN = 3820
+ER_CHECK_CONSTRAINT_NOT_FOUND = 3821
+ER_CHECK_CONSTRAINT_DUP_NAME = 3822
+ER_CHECK_CONSTRAINT_CLAUSE_USING_FK_REFER_ACTION_COLUMN = 3823
+WARN_UNENCRYPTED_TABLE_IN_ENCRYPTED_DB = 3824
+ER_INVALID_ENCRYPTION_REQUEST = 3825
+ER_CANNOT_SET_TABLE_ENCRYPTION = 3826
+ER_CANNOT_SET_DATABASE_ENCRYPTION = 3827
+ER_CANNOT_SET_TABLESPACE_ENCRYPTION = 3828
+ER_TABLESPACE_CANNOT_BE_ENCRYPTED = 3829
+ER_TABLESPACE_CANNOT_BE_DECRYPTED = 3830
+ER_TABLESPACE_TYPE_UNKNOWN = 3831
+ER_TARGET_TABLESPACE_UNENCRYPTED = 3832
+ER_CANNOT_USE_ENCRYPTION_CLAUSE = 3833
+ER_INVALID_MULTIPLE_CLAUSES = 3834
+ER_UNSUPPORTED_USE_OF_GRANT_AS = 3835
+ER_UKNOWN_AUTH_ID_OR_ACCESS_DENIED_FOR_GRANT_AS = 3836
+ER_DEPENDENT_BY_FUNCTIONAL_INDEX = 3837
+ER_PLUGIN_NOT_EARLY = 3838
+ER_INNODB_REDO_LOG_ARCHIVE_START_SUBDIR_PATH = 3839
+ER_INNODB_REDO_LOG_ARCHIVE_START_TIMEOUT = 3840
+ER_INNODB_REDO_LOG_ARCHIVE_DIRS_INVALID = 3841
+ER_INNODB_REDO_LOG_ARCHIVE_LABEL_NOT_FOUND = 3842
+ER_INNODB_REDO_LOG_ARCHIVE_DIR_EMPTY = 3843
+ER_INNODB_REDO_LOG_ARCHIVE_NO_SUCH_DIR = 3844
+ER_INNODB_REDO_LOG_ARCHIVE_DIR_CLASH = 3845
+ER_INNODB_REDO_LOG_ARCHIVE_DIR_PERMISSIONS = 3846
+ER_INNODB_REDO_LOG_ARCHIVE_FILE_CREATE = 3847
+ER_INNODB_REDO_LOG_ARCHIVE_ACTIVE = 3848
+ER_INNODB_REDO_LOG_ARCHIVE_INACTIVE = 3849
+ER_INNODB_REDO_LOG_ARCHIVE_FAILED = 3850
+ER_INNODB_REDO_LOG_ARCHIVE_SESSION = 3851
+ER_STD_REGEX_ERROR = 3852
+ER_INVALID_JSON_TYPE = 3853
+ER_CANNOT_CONVERT_STRING = 3854
+ER_DEPENDENT_BY_PARTITION_FUNC = 3855
+ER_WARN_DEPRECATED_FLOAT_AUTO_INCREMENT = 3856
+ER_RPL_CANT_STOP_SLAVE_WHILE_LOCKED_BACKUP = 3857
+ER_WARN_DEPRECATED_FLOAT_DIGITS = 3858
+ER_WARN_DEPRECATED_FLOAT_UNSIGNED = 3859
+ER_WARN_DEPRECATED_INTEGER_DISPLAY_WIDTH = 3860
+ER_WARN_DEPRECATED_ZEROFILL = 3861
+ER_CLONE_DONOR = 3862
+ER_CLONE_PROTOCOL = 3863
+ER_CLONE_DONOR_VERSION = 3864
+ER_CLONE_OS = 3865
+ER_CLONE_PLATFORM = 3866
+ER_CLONE_CHARSET = 3867
+ER_CLONE_CONFIG = 3868
+ER_CLONE_SYS_CONFIG = 3869
+ER_CLONE_PLUGIN_MATCH = 3870
+ER_CLONE_LOOPBACK = 3871
+ER_CLONE_ENCRYPTION = 3872
+ER_CLONE_DISK_SPACE = 3873
+ER_CLONE_IN_PROGRESS = 3874
+ER_CLONE_DISALLOWED = 3875
+ER_CANNOT_GRANT_ROLES_TO_ANONYMOUS_USER = 3876
+ER_SECONDARY_ENGINE_PLUGIN = 3877
+ER_SECOND_PASSWORD_CANNOT_BE_EMPTY = 3878
+ER_DB_ACCESS_DENIED = 3879
+ER_DA_AUTH_ID_WITH_SYSTEM_USER_PRIV_IN_MANDATORY_ROLES = 3880
+ER_DA_RPL_GTID_TABLE_CANNOT_OPEN = 3881
+ER_GEOMETRY_IN_UNKNOWN_LENGTH_UNIT = 3882
+ER_DA_PLUGIN_INSTALL_ERROR = 3883
+ER_NO_SESSION_TEMP = 3884
+ER_DA_UNKNOWN_ERROR_NUMBER = 3885
+ER_COLUMN_CHANGE_SIZE = 3886
+ER_REGEXP_INVALID_CAPTURE_GROUP_NAME = 3887
+ER_DA_SSL_LIBRARY_ERROR = 3888
+ER_SECONDARY_ENGINE = 3889
+ER_SECONDARY_ENGINE_DDL = 3890
+ER_INCORRECT_CURRENT_PASSWORD = 3891
+ER_MISSING_CURRENT_PASSWORD = 3892
+ER_CURRENT_PASSWORD_NOT_REQUIRED = 3893
+ER_PASSWORD_CANNOT_BE_RETAINED_ON_PLUGIN_CHANGE = 3894
+ER_CURRENT_PASSWORD_CANNOT_BE_RETAINED = 3895
+ER_PARTIAL_REVOKES_EXIST = 3896
+ER_CANNOT_GRANT_SYSTEM_PRIV_TO_MANDATORY_ROLE = 3897
+ER_XA_REPLICATION_FILTERS = 3898
+ER_UNSUPPORTED_SQL_MODE = 3899
+ER_REGEXP_INVALID_FLAG = 3900
+ER_PARTIAL_REVOKE_AND_DB_GRANT_BOTH_EXISTS = 3901
+ER_UNIT_NOT_FOUND = 3902
+ER_INVALID_JSON_VALUE_FOR_FUNC_INDEX = 3903
+ER_JSON_VALUE_OUT_OF_RANGE_FOR_FUNC_INDEX = 3904
+ER_EXCEEDED_MV_KEYS_NUM = 3905
+ER_EXCEEDED_MV_KEYS_SPACE = 3906
+ER_FUNCTIONAL_INDEX_DATA_IS_TOO_LONG = 3907
+ER_WRONG_MVI_VALUE = 3908
+ER_WARN_FUNC_INDEX_NOT_APPLICABLE = 3909
+ER_GRP_RPL_UDF_ERROR = 3910
+ER_UPDATE_GTID_PURGED_WITH_GR = 3911
+ER_GROUPING_ON_TIMESTAMP_IN_DST = 3912
+ER_TABLE_NAME_CAUSES_TOO_LONG_PATH = 3913
+ER_AUDIT_LOG_INSUFFICIENT_PRIVILEGE = 3914
+OBSOLETE_ER_AUDIT_LOG_PASSWORD_HAS_BEEN_COPIED = 3915
+ER_DA_GRP_RPL_STARTED_AUTO_REJOIN = 3916
+ER_SYSVAR_CHANGE_DURING_QUERY = 3917
+ER_GLOBSTAT_CHANGE_DURING_QUERY = 3918
+ER_GRP_RPL_MESSAGE_SERVICE_INIT_FAILURE = 3919
+ER_CHANGE_MASTER_WRONG_COMPRESSION_ALGORITHM_CLIENT = 3920
+ER_CHANGE_MASTER_WRONG_COMPRESSION_LEVEL_CLIENT = 3921
+ER_WRONG_COMPRESSION_ALGORITHM_CLIENT = 3922
+ER_WRONG_COMPRESSION_LEVEL_CLIENT = 3923
+ER_CHANGE_MASTER_WRONG_COMPRESSION_ALGORITHM_LIST_CLIENT = 3924
+ER_CLIENT_PRIVILEGE_CHECKS_USER_CANNOT_BE_ANONYMOUS = 3925
+ER_CLIENT_PRIVILEGE_CHECKS_USER_DOES_NOT_EXIST = 3926
+ER_CLIENT_PRIVILEGE_CHECKS_USER_CORRUPT = 3927
+ER_CLIENT_PRIVILEGE_CHECKS_USER_NEEDS_RPL_APPLIER_PRIV = 3928
+ER_WARN_DA_PRIVILEGE_NOT_REGISTERED = 3929
+ER_CLIENT_KEYRING_UDF_KEY_INVALID = 3930
+ER_CLIENT_KEYRING_UDF_KEY_TYPE_INVALID = 3931
+ER_CLIENT_KEYRING_UDF_KEY_TOO_LONG = 3932
+ER_CLIENT_KEYRING_UDF_KEY_TYPE_TOO_LONG = 3933
+ER_JSON_SCHEMA_VALIDATION_ERROR_WITH_DETAILED_REPORT = 3934
+ER_DA_UDF_INVALID_CHARSET_SPECIFIED = 3935
+ER_DA_UDF_INVALID_CHARSET = 3936
+ER_DA_UDF_INVALID_COLLATION = 3937
+ER_DA_UDF_INVALID_EXTENSION_ARGUMENT_TYPE = 3938
+ER_MULTIPLE_CONSTRAINTS_WITH_SAME_NAME = 3939
+ER_CONSTRAINT_NOT_FOUND = 3940
+ER_ALTER_CONSTRAINT_ENFORCEMENT_NOT_SUPPORTED = 3941
+ER_TABLE_VALUE_CONSTRUCTOR_MUST_HAVE_COLUMNS = 3942
+ER_TABLE_VALUE_CONSTRUCTOR_CANNOT_HAVE_DEFAULT = 3943
+ER_CLIENT_QUERY_FAILURE_INVALID_NON_ROW_FORMAT = 3944
+ER_REQUIRE_ROW_FORMAT_INVALID_VALUE = 3945
+ER_FAILED_TO_DETERMINE_IF_ROLE_IS_MANDATORY = 3946
+ER_FAILED_TO_FETCH_MANDATORY_ROLE_LIST = 3947
+ER_CLIENT_LOCAL_FILES_DISABLED = 3948
+ER_IMP_INCOMPATIBLE_CFG_VERSION = 3949
+ER_DA_OOM = 3950
+ER_DA_UDF_INVALID_ARGUMENT_TO_SET_CHARSET = 3951
+ER_DA_UDF_INVALID_RETURN_TYPE_TO_SET_CHARSET = 3952
+ER_MULTIPLE_INTO_CLAUSES = 3953
+ER_MISPLACED_INTO = 3954
+ER_USER_ACCESS_DENIED_FOR_USER_ACCOUNT_BLOCKED_BY_PASSWORD_LOCK = 3955
+ER_WARN_DEPRECATED_YEAR_UNSIGNED = 3956
+ER_CLONE_NETWORK_PACKET = 3957
+ER_SDI_OPERATION_FAILED_MISSING_RECORD = 3958
+ER_DEPENDENT_BY_CHECK_CONSTRAINT = 3959
+ER_GRP_OPERATION_NOT_ALLOWED_GR_MUST_STOP = 3960
+ER_WARN_DEPRECATED_JSON_TABLE_ON_ERROR_ON_EMPTY = 3961
+ER_WARN_DEPRECATED_INNER_INTO = 3962
+ER_WARN_DEPRECATED_VALUES_FUNCTION_ALWAYS_NULL = 3963
+ER_WARN_DEPRECATED_SQL_CALC_FOUND_ROWS = 3964
+ER_WARN_DEPRECATED_FOUND_ROWS = 3965
+ER_MISSING_JSON_VALUE = 3966
+ER_MULTIPLE_JSON_VALUES = 3967
+ER_HOSTNAME_TOO_LONG = 3968
+ER_WARN_CLIENT_DEPRECATED_PARTITION_PREFIX_KEY = 3969
+ER_GROUP_REPLICATION_USER_EMPTY_MSG = 3970
+ER_GROUP_REPLICATION_USER_MANDATORY_MSG = 3971
+ER_GROUP_REPLICATION_PASSWORD_LENGTH = 3972
+ER_SUBQUERY_TRANSFORM_REJECTED = 3973
+ER_DA_GRP_RPL_RECOVERY_ENDPOINT_FORMAT = 3974
+ER_DA_GRP_RPL_RECOVERY_ENDPOINT_INVALID = 3975
+ER_WRONG_VALUE_FOR_VAR_PLUS_ACTIONABLE_PART = 3976
+ER_STATEMENT_NOT_ALLOWED_AFTER_START_TRANSACTION = 3977
+ER_FOREIGN_KEY_WITH_ATOMIC_CREATE_SELECT = 3978
+ER_NOT_ALLOWED_WITH_START_TRANSACTION = 3979
+ER_INVALID_JSON_ATTRIBUTE = 3980
+ER_ENGINE_ATTRIBUTE_NOT_SUPPORTED = 3981
+ER_INVALID_USER_ATTRIBUTE_JSON = 3982
+ER_INNODB_REDO_DISABLED = 3983
+ER_INNODB_REDO_ARCHIVING_ENABLED = 3984
+ER_MDL_OUT_OF_RESOURCES = 3985
+ER_IMPLICIT_COMPARISON_FOR_JSON = 3986
+ER_FUNCTION_DOES_NOT_SUPPORT_CHARACTER_SET = 3987
+ER_IMPOSSIBLE_STRING_CONVERSION = 3988
+ER_SCHEMA_READ_ONLY = 3989
+ER_RPL_ASYNC_RECONNECT_GTID_MODE_OFF = 3990
+ER_RPL_ASYNC_RECONNECT_AUTO_POSITION_OFF = 3991
+ER_DISABLE_GTID_MODE_REQUIRES_ASYNC_RECONNECT_OFF = 3992
+ER_DISABLE_AUTO_POSITION_REQUIRES_ASYNC_RECONNECT_OFF = 3993
+ER_INVALID_PARAMETER_USE = 3994
+ER_CHARACTER_SET_MISMATCH = 3995
+ER_WARN_VAR_VALUE_CHANGE_NOT_SUPPORTED = 3996
+ER_INVALID_TIME_ZONE_INTERVAL = 3997
+ER_INVALID_CAST = 3998
+ER_HYPERGRAPH_NOT_SUPPORTED_YET = 3999
+ER_WARN_HYPERGRAPH_EXPERIMENTAL = 4000
+ER_DA_NO_ERROR_LOG_PARSER_CONFIGURED = 4001
+ER_DA_ERROR_LOG_TABLE_DISABLED = 4002
+ER_DA_ERROR_LOG_MULTIPLE_FILTERS = 4003
+ER_DA_CANT_OPEN_ERROR_LOG = 4004
+ER_USER_REFERENCED_AS_DEFINER = 4005
+ER_CANNOT_USER_REFERENCED_AS_DEFINER = 4006
+ER_REGEX_NUMBER_TOO_BIG = 4007
+ER_SPVAR_NONINTEGER_TYPE = 4008
+WARN_UNSUPPORTED_ACL_TABLES_READ = 4009
+ER_BINLOG_UNSAFE_ACL_TABLE_READ_IN_DML_DDL = 4010
+ER_STOP_REPLICA_MONITOR_IO_THREAD_TIMEOUT = 4011
+ER_STARTING_REPLICA_MONITOR_IO_THREAD = 4012
+ER_CANT_USE_ANONYMOUS_TO_GTID_WITH_GTID_MODE_NOT_ON = 4013
+ER_CANT_COMBINE_ANONYMOUS_TO_GTID_AND_AUTOPOSITION = 4014
+ER_ASSIGN_GTIDS_TO_ANONYMOUS_TRANSACTIONS_REQUIRES_GTID_MODE_ON = 4015
+ER_SQL_REPLICA_SKIP_COUNTER_USED_WITH_GTID_MODE_ON = 4016
+ER_USING_ASSIGN_GTIDS_TO_ANONYMOUS_TRANSACTIONS_AS_LOCAL_OR_UUID = 4017
+ER_CANT_SET_ANONYMOUS_TO_GTID_AND_WAIT_UNTIL_SQL_THD_AFTER_GTIDS = 4018
+ER_CANT_SET_SQL_AFTER_OR_BEFORE_GTIDS_WITH_ANONYMOUS_TO_GTID = 4019
+ER_ANONYMOUS_TO_GTID_UUID_SAME_AS_GROUP_NAME = 4020
+ER_CANT_USE_SAME_UUID_AS_GROUP_NAME = 4021
+ER_GRP_RPL_RECOVERY_CHANNEL_STILL_RUNNING = 4022
+ER_INNODB_INVALID_AUTOEXTEND_SIZE_VALUE = 4023
+ER_INNODB_INCOMPATIBLE_WITH_TABLESPACE = 4024
+ER_INNODB_AUTOEXTEND_SIZE_OUT_OF_RANGE = 4025
+ER_CANNOT_USE_AUTOEXTEND_SIZE_CLAUSE = 4026
+ER_ROLE_GRANTED_TO_ITSELF = 4027
+ER_TABLE_MUST_HAVE_A_VISIBLE_COLUMN = 4028
+ER_INNODB_COMPRESSION_FAILURE = 4029
+ER_WARN_ASYNC_CONN_FAILOVER_NETWORK_NAMESPACE = 4030
+ER_CLIENT_INTERACTION_TIMEOUT = 4031
+ER_INVALID_CAST_TO_GEOMETRY = 4032
+ER_INVALID_CAST_POLYGON_RING_DIRECTION = 4033
+ER_GIS_DIFFERENT_SRIDS_AGGREGATION = 4034
+ER_RELOAD_KEYRING_FAILURE = 4035
+ER_SDI_GET_KEYS_INVALID_TABLESPACE = 4036
+ER_CHANGE_RPL_SRC_WRONG_COMPRESSION_ALGORITHM_SIZE = 4037
+ER_WARN_DEPRECATED_TLS_VERSION_FOR_CHANNEL_CLI = 4038
+ER_CANT_USE_SAME_UUID_AS_VIEW_CHANGE_UUID = 4039
+ER_ANONYMOUS_TO_GTID_UUID_SAME_AS_VIEW_CHANGE_UUID = 4040
+ER_GRP_RPL_VIEW_CHANGE_UUID_FAIL_GET_VARIABLE = 4041
+ER_WARN_ADUIT_LOG_MAX_SIZE_AND_PRUNE_SECONDS = 4042
+ER_WARN_ADUIT_LOG_MAX_SIZE_CLOSE_TO_ROTATE_ON_SIZE = 4043
+ER_KERBEROS_CREATE_USER = 4044
+ER_INSTALL_PLUGIN_CONFLICT_CLIENT = 4045
+ER_DA_ERROR_LOG_COMPONENT_FLUSH_FAILED = 4046
+ER_WARN_SQL_AFTER_MTS_GAPS_GAP_NOT_CALCULATED = 4047
+ER_INVALID_ASSIGNMENT_TARGET = 4048
+ER_OPERATION_NOT_ALLOWED_ON_GR_SECONDARY = 4049
+ER_GRP_RPL_FAILOVER_CHANNEL_STATUS_PROPAGATION = 4050
+ER_WARN_AUDIT_LOG_FORMAT_UNIX_TIMESTAMP_ONLY_WHEN_JSON = 4051
+ER_INVALID_MFA_PLUGIN_SPECIFIED = 4052
+ER_IDENTIFIED_BY_UNSUPPORTED = 4053
+ER_INVALID_PLUGIN_FOR_REGISTRATION = 4054
+ER_PLUGIN_REQUIRES_REGISTRATION = 4055
+ER_MFA_METHOD_EXISTS = 4056
+ER_MFA_METHOD_NOT_EXISTS = 4057
+ER_AUTHENTICATION_POLICY_MISMATCH = 4058
+ER_PLUGIN_REGISTRATION_DONE = 4059
+ER_INVALID_USER_FOR_REGISTRATION = 4060
+ER_USER_REGISTRATION_FAILED = 4061
+ER_MFA_METHODS_INVALID_ORDER = 4062
+ER_MFA_METHODS_IDENTICAL = 4063
+ER_INVALID_MFA_OPERATIONS_FOR_PASSWORDLESS_USER = 4064
+ER_CHANGE_REPLICATION_SOURCE_NO_OPTIONS_FOR_GTID_ONLY = 4065
+ER_CHANGE_REP_SOURCE_CANT_DISABLE_REQ_ROW_FORMAT_WITH_GTID_ONLY = 4066
+ER_CHANGE_REP_SOURCE_CANT_DISABLE_AUTO_POSITION_WITH_GTID_ONLY = 4067
+ER_CHANGE_REP_SOURCE_CANT_DISABLE_GTID_ONLY_WITHOUT_POSITIONS = 4068
+ER_CHANGE_REP_SOURCE_CANT_DISABLE_AUTO_POS_WITHOUT_POSITIONS = 4069
+ER_CHANGE_REP_SOURCE_GR_CHANNEL_WITH_GTID_MODE_NOT_ON = 4070
+ER_CANT_USE_GTID_ONLY_WITH_GTID_MODE_NOT_ON = 4071
+ER_WARN_C_DISABLE_GTID_ONLY_WITH_SOURCE_AUTO_POS_INVALID_POS = 4072
+ER_DA_SSL_FIPS_MODE_ERROR = 4073
+CR_UNKNOWN_ERROR = 2000
+CR_SOCKET_CREATE_ERROR = 2001
+CR_CONNECTION_ERROR = 2002
+CR_CONN_HOST_ERROR = 2003
+CR_IPSOCK_ERROR = 2004
+CR_UNKNOWN_HOST = 2005
+CR_SERVER_GONE_ERROR = 2006
+CR_VERSION_ERROR = 2007
+CR_OUT_OF_MEMORY = 2008
+CR_WRONG_HOST_INFO = 2009
+CR_LOCALHOST_CONNECTION = 2010
+CR_TCP_CONNECTION = 2011
+CR_SERVER_HANDSHAKE_ERR = 2012
+CR_SERVER_LOST = 2013
+CR_COMMANDS_OUT_OF_SYNC = 2014
+CR_NAMEDPIPE_CONNECTION = 2015
+CR_NAMEDPIPEWAIT_ERROR = 2016
+CR_NAMEDPIPEOPEN_ERROR = 2017
+CR_NAMEDPIPESETSTATE_ERROR = 2018
+CR_CANT_READ_CHARSET = 2019
+CR_NET_PACKET_TOO_LARGE = 2020
+CR_EMBEDDED_CONNECTION = 2021
+CR_PROBE_SLAVE_STATUS = 2022
+CR_PROBE_SLAVE_HOSTS = 2023
+CR_PROBE_SLAVE_CONNECT = 2024
+CR_PROBE_MASTER_CONNECT = 2025
+CR_SSL_CONNECTION_ERROR = 2026
+CR_MALFORMED_PACKET = 2027
+CR_WRONG_LICENSE = 2028
+CR_NULL_POINTER = 2029
+CR_NO_PREPARE_STMT = 2030
+CR_PARAMS_NOT_BOUND = 2031
+CR_DATA_TRUNCATED = 2032
+CR_NO_PARAMETERS_EXISTS = 2033
+CR_INVALID_PARAMETER_NO = 2034
+CR_INVALID_BUFFER_USE = 2035
+CR_UNSUPPORTED_PARAM_TYPE = 2036
+CR_SHARED_MEMORY_CONNECTION = 2037
+CR_SHARED_MEMORY_CONNECT_REQUEST_ERROR = 2038
+CR_SHARED_MEMORY_CONNECT_ANSWER_ERROR = 2039
+CR_SHARED_MEMORY_CONNECT_FILE_MAP_ERROR = 2040
+CR_SHARED_MEMORY_CONNECT_MAP_ERROR = 2041
+CR_SHARED_MEMORY_FILE_MAP_ERROR = 2042
+CR_SHARED_MEMORY_MAP_ERROR = 2043
+CR_SHARED_MEMORY_EVENT_ERROR = 2044
+CR_SHARED_MEMORY_CONNECT_ABANDONED_ERROR = 2045
+CR_SHARED_MEMORY_CONNECT_SET_ERROR = 2046
+CR_CONN_UNKNOW_PROTOCOL = 2047
+CR_INVALID_CONN_HANDLE = 2048
+CR_UNUSED_1 = 2049
+CR_FETCH_CANCELED = 2050
+CR_NO_DATA = 2051
+CR_NO_STMT_METADATA = 2052
+CR_NO_RESULT_SET = 2053
+CR_NOT_IMPLEMENTED = 2054
+CR_SERVER_LOST_EXTENDED = 2055
+CR_STMT_CLOSED = 2056
+CR_NEW_STMT_METADATA = 2057
+CR_ALREADY_CONNECTED = 2058
+CR_AUTH_PLUGIN_CANNOT_LOAD = 2059
+CR_DUPLICATE_CONNECTION_ATTR = 2060
+CR_AUTH_PLUGIN_ERR = 2061
+CR_INSECURE_API_ERR = 2062
+CR_FILE_NAME_TOO_LONG = 2063
+CR_SSL_FIPS_MODE_ERR = 2064
+CR_DEPRECATED_COMPRESSION_NOT_SUPPORTED = 2065
+CR_COMPRESSION_WRONGLY_CONFIGURED = 2066
+CR_KERBEROS_USER_NOT_FOUND = 2067
+CR_LOAD_DATA_LOCAL_INFILE_REJECTED = 2068
+CR_LOAD_DATA_LOCAL_INFILE_REALPATH_FAIL = 2069
+CR_DNS_SRV_LOOKUP_FAILED = 2070
+CR_MANDATORY_TRACKER_NOT_FOUND = 2071
+CR_INVALID_FACTOR_NO = 2072
+# End MySQL Errors
+
+# Start X Plugin Errors
+ER_X_BAD_MESSAGE = 5000
+ER_X_CAPABILITIES_PREPARE_FAILED = 5001
+ER_X_CAPABILITY_NOT_FOUND = 5002
+ER_X_INVALID_PROTOCOL_DATA = 5003
+ER_X_BAD_CONNECTION_SESSION_ATTRIBUTE_VALUE_LENGTH = 5004
+ER_X_BAD_CONNECTION_SESSION_ATTRIBUTE_KEY_LENGTH = 5005
+ER_X_BAD_CONNECTION_SESSION_ATTRIBUTE_EMPTY_KEY = 5006
+ER_X_BAD_CONNECTION_SESSION_ATTRIBUTE_LENGTH = 5007
+ER_X_BAD_CONNECTION_SESSION_ATTRIBUTE_TYPE = 5008
+ER_X_CAPABILITY_SET_NOT_ALLOWED = 5009
+ER_X_SERVICE_ERROR = 5010
+ER_X_SESSION = 5011
+ER_X_INVALID_ARGUMENT = 5012
+ER_X_MISSING_ARGUMENT = 5013
+ER_X_BAD_INSERT_DATA = 5014
+ER_X_CMD_NUM_ARGUMENTS = 5015
+ER_X_CMD_ARGUMENT_TYPE = 5016
+ER_X_CMD_ARGUMENT_VALUE = 5017
+ER_X_BAD_UPSERT_DATA = 5018
+ER_X_DUPLICATED_CAPABILITIES = 5019
+ER_X_CMD_ARGUMENT_OBJECT_EMPTY = 5020
+ER_X_CMD_INVALID_ARGUMENT = 5021
+ER_X_BAD_UPDATE_DATA = 5050
+ER_X_BAD_TYPE_OF_UPDATE = 5051
+ER_X_BAD_COLUMN_TO_UPDATE = 5052
+ER_X_BAD_MEMBER_TO_UPDATE = 5053
+ER_X_BAD_STATEMENT_ID = 5110
+ER_X_BAD_CURSOR_ID = 5111
+ER_X_BAD_SCHEMA = 5112
+ER_X_BAD_TABLE = 5113
+ER_X_BAD_PROJECTION = 5114
+ER_X_DOC_ID_MISSING = 5115
+ER_X_DUPLICATE_ENTRY = 5116
+ER_X_DOC_REQUIRED_FIELD_MISSING = 5117
+ER_X_PROJ_BAD_KEY_NAME = 5120
+ER_X_BAD_DOC_PATH = 5121
+ER_X_CURSOR_EXISTS = 5122
+ER_X_CURSOR_REACHED_EOF = 5123
+ER_X_PREPARED_STATMENT_CAN_HAVE_ONE_CURSOR = 5131
+ER_X_PREPARED_EXECUTE_ARGUMENT_NOT_SUPPORTED = 5133
+ER_X_PREPARED_EXECUTE_ARGUMENT_CONSISTENCY = 5134
+ER_X_EXPR_BAD_OPERATOR = 5150
+ER_X_EXPR_BAD_NUM_ARGS = 5151
+ER_X_EXPR_MISSING_ARG = 5152
+ER_X_EXPR_BAD_TYPE_VALUE = 5153
+ER_X_EXPR_BAD_VALUE = 5154
+ER_X_INVALID_COLLECTION = 5156
+ER_X_INVALID_ADMIN_COMMAND = 5157
+ER_X_EXPECT_NOT_OPEN = 5158
+ER_X_EXPECT_NO_ERROR_FAILED = 5159
+ER_X_EXPECT_BAD_CONDITION = 5160
+ER_X_EXPECT_BAD_CONDITION_VALUE = 5161
+ER_X_INVALID_NAMESPACE = 5162
+ER_X_BAD_NOTICE = 5163
+ER_X_CANNOT_DISABLE_NOTICE = 5164
+ER_X_BAD_CONFIGURATION = 5165
+ER_X_MYSQLX_ACCOUNT_MISSING_PERMISSIONS = 5167
+ER_X_EXPECT_FIELD_EXISTS_FAILED = 5168
+ER_X_BAD_LOCKING = 5169
+ER_X_FRAME_COMPRESSION_DISABLED = 5170
+ER_X_DECOMPRESSION_FAILED = 5171
+ER_X_BAD_COMPRESSED_FRAME = 5174
+ER_X_CAPABILITY_COMPRESSION_INVALID_ALGORITHM = 5175
+ER_X_CAPABILITY_COMPRESSION_INVALID_SERVER_STYLE = 5176
+ER_X_CAPABILITY_COMPRESSION_INVALID_CLIENT_STYLE = 5177
+ER_X_CAPABILITY_COMPRESSION_INVALID_OPTION = 5178
+ER_X_CAPABILITY_COMPRESSION_MISSING_REQUIRED_FIELDS = 5179
+ER_X_DOCUMENT_DOESNT_MATCH_EXPECTED_SCHEMA = 5180
+ER_X_COLLECTION_OPTION_DOESNT_EXISTS = 5181
+ER_X_INVALID_VALIDATION_SCHEMA = 5182
+# End X Plugin Errors
\ No newline at end of file
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/errors.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/errors.py
new file mode 100644
index 0000000000000000000000000000000000000000..699ad4afc1ec4932ea7a9cab00fe875f2ed630ef
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/errors.py
@@ -0,0 +1,306 @@
+# Copyright (c) 2009, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Python exceptions
+"""
+
+from . import utils
+from .locales import get_client_error
+
+# _CUSTOM_ERROR_EXCEPTIONS holds custom exceptions and is ued by the
+# function custom_error_exception. _ERROR_EXCEPTIONS (at bottom of module)
+# is similar, but hardcoded exceptions.
+_CUSTOM_ERROR_EXCEPTIONS = {}
+
+
+def custom_error_exception(error=None, exception=None):
+ """Define custom exceptions for MySQL server errors
+
+ This function defines custom exceptions for MySQL server errors and
+ returns the current set customizations.
+
+ If error is a MySQL Server error number, then you have to pass also the
+ exception class.
+
+ The error argument can also be a dictionary in which case the key is
+ the server error number, and value the exception to be raised.
+
+ If none of the arguments are given, then custom_error_exception() will
+ simply return the current set customizations.
+
+ To reset the customizations, simply supply an empty dictionary.
+
+ Examples:
+ import mysql.connector
+ from mysql.connector import errorcode
+
+ # Server error 1028 should raise a DatabaseError
+ mysql.connector.custom_error_exception(
+ 1028, mysql.connector.DatabaseError)
+
+ # Or using a dictionary:
+ mysql.connector.custom_error_exception({
+ 1028: mysql.connector.DatabaseError,
+ 1029: mysql.connector.OperationalError,
+ })
+
+ # Reset
+ mysql.connector.custom_error_exception({})
+
+ Returns a dictionary.
+ """
+ global _CUSTOM_ERROR_EXCEPTIONS # pylint: disable=W0603
+
+ if isinstance(error, dict) and not error:
+ _CUSTOM_ERROR_EXCEPTIONS = {}
+ return _CUSTOM_ERROR_EXCEPTIONS
+
+ if not error and not exception:
+ return _CUSTOM_ERROR_EXCEPTIONS
+
+ if not isinstance(error, (int, dict)):
+ raise ValueError(
+ "The error argument should be either an integer or dictionary")
+
+ if isinstance(error, int):
+ error = {error: exception}
+
+ for errno, _exception in error.items():
+ if not isinstance(errno, int):
+ raise ValueError("error number should be an integer")
+ try:
+ if not issubclass(_exception, Exception):
+ raise TypeError
+ except TypeError:
+ raise ValueError("exception should be subclass of Exception")
+ _CUSTOM_ERROR_EXCEPTIONS[errno] = _exception
+
+ return _CUSTOM_ERROR_EXCEPTIONS
+
+def get_mysql_exception(errno, msg=None, sqlstate=None):
+ """Get the exception matching the MySQL error
+
+ This function will return an exception based on the SQLState. The given
+ message will be passed on in the returned exception.
+
+ The exception returned can be customized using the
+ mysql.connector.custom_error_exception() function.
+
+ Returns an Exception
+ """
+ try:
+ return _CUSTOM_ERROR_EXCEPTIONS[errno](
+ msg=msg, errno=errno, sqlstate=sqlstate)
+ except KeyError:
+ # Error was not mapped to particular exception
+ pass
+
+ try:
+ return _ERROR_EXCEPTIONS[errno](
+ msg=msg, errno=errno, sqlstate=sqlstate)
+ except KeyError:
+ # Error was not mapped to particular exception
+ pass
+
+ if not sqlstate:
+ return DatabaseError(msg=msg, errno=errno)
+
+ try:
+ return _SQLSTATE_CLASS_EXCEPTION[sqlstate[0:2]](
+ msg=msg, errno=errno, sqlstate=sqlstate)
+ except KeyError:
+ # Return default InterfaceError
+ return DatabaseError(msg=msg, errno=errno, sqlstate=sqlstate)
+
+def get_exception(packet):
+ """Returns an exception object based on the MySQL error
+
+ Returns an exception object based on the MySQL error in the given
+ packet.
+
+ Returns an Error-Object.
+ """
+ errno = errmsg = None
+
+ try:
+ if packet[4] != 255:
+ raise ValueError("Packet is not an error packet")
+ except IndexError as err:
+ return InterfaceError("Failed getting Error information (%r)" % err)
+
+ sqlstate = None
+ try:
+ packet = packet[5:]
+ (packet, errno) = utils.read_int(packet, 2)
+ if packet[0] != 35:
+ # Error without SQLState
+ if isinstance(packet, (bytes, bytearray)):
+ errmsg = packet.decode('utf8')
+ else:
+ errmsg = packet
+ else:
+ (packet, sqlstate) = utils.read_bytes(packet[1:], 5)
+ sqlstate = sqlstate.decode('utf8')
+ errmsg = packet.decode('utf8')
+ except Exception as err: # pylint: disable=W0703
+ return InterfaceError("Failed getting Error information (%r)" % err)
+ else:
+ return get_mysql_exception(errno, errmsg, sqlstate)
+
+
+class Error(Exception):
+ """Exception that is base class for all other error exceptions"""
+ def __init__(self, msg=None, errno=None, values=None, sqlstate=None):
+ super(Error, self).__init__()
+ self.msg = msg
+ self._full_msg = self.msg
+ self.errno = errno or -1
+ self.sqlstate = sqlstate
+
+ if not self.msg and (2000 <= self.errno < 3000):
+ self.msg = get_client_error(self.errno)
+ if values is not None:
+ try:
+ self.msg = self.msg % values
+ except TypeError as err:
+ self.msg = "{0} (Warning: {1})".format(self.msg, str(err))
+ elif not self.msg:
+ self._full_msg = self.msg = 'Unknown error'
+
+ if self.msg and self.errno != -1:
+ fields = {
+ 'errno': self.errno,
+ 'msg': self.msg
+ }
+ if self.sqlstate:
+ fmt = '{errno} ({state}): {msg}'
+ fields['state'] = self.sqlstate
+ else:
+ fmt = '{errno}: {msg}'
+ self._full_msg = fmt.format(**fields)
+
+ self.args = (self.errno, self._full_msg, self.sqlstate)
+
+ def __str__(self):
+ return self._full_msg
+
+
+class Warning(Exception): # pylint: disable=W0622
+ """Exception for important warnings"""
+ pass
+
+
+class InterfaceError(Error):
+ """Exception for errors related to the interface"""
+ pass
+
+
+class DatabaseError(Error):
+ """Exception for errors related to the database"""
+ pass
+
+
+class InternalError(DatabaseError):
+ """Exception for errors internal database errors"""
+ pass
+
+
+class OperationalError(DatabaseError):
+ """Exception for errors related to the database's operation"""
+ pass
+
+
+class ProgrammingError(DatabaseError):
+ """Exception for errors programming errors"""
+ pass
+
+
+class IntegrityError(DatabaseError):
+ """Exception for errors regarding relational integrity"""
+ pass
+
+
+class DataError(DatabaseError):
+ """Exception for errors reporting problems with processed data"""
+ pass
+
+
+class NotSupportedError(DatabaseError):
+ """Exception for errors when an unsupported database feature was used"""
+ pass
+
+
+class PoolError(Error):
+ """Exception for errors relating to connection pooling"""
+ pass
+
+
+_SQLSTATE_CLASS_EXCEPTION = {
+ '02': DataError, # no data
+ '07': DatabaseError, # dynamic SQL error
+ '08': OperationalError, # connection exception
+ '0A': NotSupportedError, # feature not supported
+ '21': DataError, # cardinality violation
+ '22': DataError, # data exception
+ '23': IntegrityError, # integrity constraint violation
+ '24': ProgrammingError, # invalid cursor state
+ '25': ProgrammingError, # invalid transaction state
+ '26': ProgrammingError, # invalid SQL statement name
+ '27': ProgrammingError, # triggered data change violation
+ '28': ProgrammingError, # invalid authorization specification
+ '2A': ProgrammingError, # direct SQL syntax error or access rule violation
+ '2B': DatabaseError, # dependent privilege descriptors still exist
+ '2C': ProgrammingError, # invalid character set name
+ '2D': DatabaseError, # invalid transaction termination
+ '2E': DatabaseError, # invalid connection name
+ '33': DatabaseError, # invalid SQL descriptor name
+ '34': ProgrammingError, # invalid cursor name
+ '35': ProgrammingError, # invalid condition number
+ '37': ProgrammingError, # dynamic SQL syntax error or access rule violation
+ '3C': ProgrammingError, # ambiguous cursor name
+ '3D': ProgrammingError, # invalid catalog name
+ '3F': ProgrammingError, # invalid schema name
+ '40': InternalError, # transaction rollback
+ '42': ProgrammingError, # syntax error or access rule violation
+ '44': InternalError, # with check option violation
+ 'HZ': OperationalError, # remote database access
+ 'XA': IntegrityError,
+ '0K': OperationalError,
+ 'HY': DatabaseError, # default when no SQLState provided by MySQL server
+}
+
+_ERROR_EXCEPTIONS = {
+ 1243: ProgrammingError,
+ 1210: ProgrammingError,
+ 2002: InterfaceError,
+ 2013: OperationalError,
+ 2049: NotSupportedError,
+ 2055: OperationalError,
+ 2061: InterfaceError,
+ 2026: InterfaceError,
+}
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/locales/__init__.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/locales/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..c1a737bf812072edaac5ed24c7814c08a0d87d70
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/locales/__init__.py
@@ -0,0 +1,75 @@
+# Copyright (c) 2012, 2017, Oracle and/or its affiliates. All rights reserved.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Translations
+"""
+
+__all__ = [
+ 'get_client_error'
+]
+
+from .. import errorcode
+
+def get_client_error(error, language='eng'):
+ """Lookup client error
+
+ This function will lookup the client error message based on the given
+ error and return the error message. If the error was not found,
+ None will be returned.
+
+ Error can be either an integer or a string. For example:
+ error: 2000
+ error: CR_UNKNOWN_ERROR
+
+ The language attribute can be used to retrieve a localized message, when
+ available.
+
+ Returns a string or None.
+ """
+ try:
+ tmp = __import__('mysql.connector.locales.{0}'.format(language),
+ globals(), locals(), ['client_error'])
+ except ImportError:
+ raise ImportError("No localization support for language '{0}'".format(
+ language))
+ client_error = tmp.client_error
+
+ if isinstance(error, int):
+ errno = error
+ for key, value in errorcode.__dict__.items():
+ if value == errno:
+ error = key
+ break
+
+ if isinstance(error, (str)):
+ try:
+ return getattr(client_error, error)
+ except AttributeError:
+ return None
+
+ raise ValueError("error argument needs to be either an integer or string")
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/locales/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/locales/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..acedb82f271f07cba7ce7f97c283848e9748326b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/locales/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/locales/eng/__init__.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/locales/eng/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..2e1c02b1e14d4f421d01f64160562bfce7aa25d3
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/locales/eng/__init__.py
@@ -0,0 +1,30 @@
+# Copyright (c) 2012, 2017, Oracle and/or its affiliates. All rights reserved.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""English Content
+"""
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/locales/eng/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/locales/eng/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5eed465e577b4aed66df36b3e1d149aaf505789a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/locales/eng/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/locales/eng/__pycache__/client_error.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysql/connector/locales/eng/__pycache__/client_error.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1c19d1081b16ec8de174186f95f863ce91b74449
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/connector/locales/eng/__pycache__/client_error.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/locales/eng/client_error.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/locales/eng/client_error.py
new file mode 100644
index 0000000000000000000000000000000000000000..5927e950b7801b9cc69c488e76d32bf26d4fcfd2
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/locales/eng/client_error.py
@@ -0,0 +1,110 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2013, 2021, Oracle and/or its affiliates. All rights reserved.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+# This file was auto-generated.
+_GENERATED_ON = '2021-08-11'
+_MYSQL_VERSION = (8, 0, 27)
+
+# Start MySQL Error messages
+CR_UNKNOWN_ERROR = u"Unknown MySQL error"
+CR_SOCKET_CREATE_ERROR = u"Can't create UNIX socket (%s)"
+CR_CONNECTION_ERROR = u"Can't connect to local MySQL server through socket '%-.100s' (%s)"
+CR_CONN_HOST_ERROR = u"Can't connect to MySQL server on '%-.100s:%u' (%s)"
+CR_IPSOCK_ERROR = u"Can't create TCP/IP socket (%s)"
+CR_UNKNOWN_HOST = u"Unknown MySQL server host '%-.100s' (%s)"
+CR_SERVER_GONE_ERROR = u"MySQL server has gone away"
+CR_VERSION_ERROR = u"Protocol mismatch; server version = %s, client version = %s"
+CR_OUT_OF_MEMORY = u"MySQL client ran out of memory"
+CR_WRONG_HOST_INFO = u"Wrong host info"
+CR_LOCALHOST_CONNECTION = u"Localhost via UNIX socket"
+CR_TCP_CONNECTION = u"%-.100s via TCP/IP"
+CR_SERVER_HANDSHAKE_ERR = u"Error in server handshake"
+CR_SERVER_LOST = u"Lost connection to MySQL server during query"
+CR_COMMANDS_OUT_OF_SYNC = u"Commands out of sync; you can't run this command now"
+CR_NAMEDPIPE_CONNECTION = u"Named pipe: %-.32s"
+CR_NAMEDPIPEWAIT_ERROR = u"Can't wait for named pipe to host: %-.64s pipe: %-.32s (%s)"
+CR_NAMEDPIPEOPEN_ERROR = u"Can't open named pipe to host: %-.64s pipe: %-.32s (%s)"
+CR_NAMEDPIPESETSTATE_ERROR = u"Can't set state of named pipe to host: %-.64s pipe: %-.32s (%s)"
+CR_CANT_READ_CHARSET = u"Can't initialize character set %-.32s (path: %-.100s)"
+CR_NET_PACKET_TOO_LARGE = u"Got packet bigger than 'max_allowed_packet' bytes"
+CR_EMBEDDED_CONNECTION = u"Embedded server"
+CR_PROBE_SLAVE_STATUS = u"Error on SHOW SLAVE STATUS:"
+CR_PROBE_SLAVE_HOSTS = u"Error on SHOW SLAVE HOSTS:"
+CR_PROBE_SLAVE_CONNECT = u"Error connecting to slave:"
+CR_PROBE_MASTER_CONNECT = u"Error connecting to master:"
+CR_SSL_CONNECTION_ERROR = u"SSL connection error: %-.100s"
+CR_MALFORMED_PACKET = u"Malformed packet"
+CR_WRONG_LICENSE = u"This client library is licensed only for use with MySQL servers having '%s' license"
+CR_NULL_POINTER = u"Invalid use of null pointer"
+CR_NO_PREPARE_STMT = u"Statement not prepared"
+CR_PARAMS_NOT_BOUND = u"No data supplied for parameters in prepared statement"
+CR_DATA_TRUNCATED = u"Data truncated"
+CR_NO_PARAMETERS_EXISTS = u"No parameters exist in the statement"
+CR_INVALID_PARAMETER_NO = u"Invalid parameter number"
+CR_INVALID_BUFFER_USE = u"Can't send long data for non-string/non-binary data types (parameter: %s)"
+CR_UNSUPPORTED_PARAM_TYPE = u"Using unsupported buffer type: %s (parameter: %s)"
+CR_SHARED_MEMORY_CONNECTION = u"Shared memory: %-.100s"
+CR_SHARED_MEMORY_CONNECT_REQUEST_ERROR = u"Can't open shared memory; client could not create request event (%s)"
+CR_SHARED_MEMORY_CONNECT_ANSWER_ERROR = u"Can't open shared memory; no answer event received from server (%s)"
+CR_SHARED_MEMORY_CONNECT_FILE_MAP_ERROR = u"Can't open shared memory; server could not allocate file mapping (%s)"
+CR_SHARED_MEMORY_CONNECT_MAP_ERROR = u"Can't open shared memory; server could not get pointer to file mapping (%s)"
+CR_SHARED_MEMORY_FILE_MAP_ERROR = u"Can't open shared memory; client could not allocate file mapping (%s)"
+CR_SHARED_MEMORY_MAP_ERROR = u"Can't open shared memory; client could not get pointer to file mapping (%s)"
+CR_SHARED_MEMORY_EVENT_ERROR = u"Can't open shared memory; client could not create %s event (%s)"
+CR_SHARED_MEMORY_CONNECT_ABANDONED_ERROR = u"Can't open shared memory; no answer from server (%s)"
+CR_SHARED_MEMORY_CONNECT_SET_ERROR = u"Can't open shared memory; cannot send request event to server (%s)"
+CR_CONN_UNKNOW_PROTOCOL = u"Wrong or unknown protocol"
+CR_INVALID_CONN_HANDLE = u"Invalid connection handle"
+CR_UNUSED_1 = u"Connection using old (pre-4.1.1) authentication protocol refused (client option 'secure_auth' enabled)"
+CR_FETCH_CANCELED = u"Row retrieval was canceled by mysql_stmt_close() call"
+CR_NO_DATA = u"Attempt to read column without prior row fetch"
+CR_NO_STMT_METADATA = u"Prepared statement contains no metadata"
+CR_NO_RESULT_SET = u"Attempt to read a row while there is no result set associated with the statement"
+CR_NOT_IMPLEMENTED = u"This feature is not implemented yet"
+CR_SERVER_LOST_EXTENDED = u"Lost connection to MySQL server at '%s', system error: %s"
+CR_STMT_CLOSED = u"Statement closed indirectly because of a preceding %s() call"
+CR_NEW_STMT_METADATA = u"The number of columns in the result set differs from the number of bound buffers. You must reset the statement, rebind the result set columns, and execute the statement again"
+CR_ALREADY_CONNECTED = u"This handle is already connected. Use a separate handle for each connection."
+CR_AUTH_PLUGIN_CANNOT_LOAD = u"Authentication plugin '%s' cannot be loaded: %s"
+CR_DUPLICATE_CONNECTION_ATTR = u"There is an attribute with the same name already"
+CR_AUTH_PLUGIN_ERR = u"Authentication plugin '%s' reported error: %s"
+CR_INSECURE_API_ERR = u"Insecure API function call: '%s' Use instead: '%s'"
+CR_FILE_NAME_TOO_LONG = u"File name is too long"
+CR_SSL_FIPS_MODE_ERR = u"Set FIPS mode ON/STRICT failed"
+CR_DEPRECATED_COMPRESSION_NOT_SUPPORTED = u"Compression protocol not supported with asynchronous protocol"
+CR_COMPRESSION_WRONGLY_CONFIGURED = u"Connection failed due to wrongly configured compression algorithm"
+CR_KERBEROS_USER_NOT_FOUND = u"SSO user not found, Please perform SSO authentication using kerberos."
+CR_LOAD_DATA_LOCAL_INFILE_REJECTED = u"LOAD DATA LOCAL INFILE file request rejected due to restrictions on access."
+CR_LOAD_DATA_LOCAL_INFILE_REALPATH_FAIL = u"Determining the real path for '%s' failed with error (%s): %s"
+CR_DNS_SRV_LOOKUP_FAILED = u"DNS SRV lookup failed with error : %s"
+CR_MANDATORY_TRACKER_NOT_FOUND = u"Client does not recognise tracker type %s marked as mandatory by server."
+CR_INVALID_FACTOR_NO = u"Invalid first argument for MYSQL_OPT_USER_PASSWORD option. Valid value should be between 1 and 3 inclusive."
+# End MySQL Error messages
+
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/network.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/network.py
new file mode 100644
index 0000000000000000000000000000000000000000..6ac083f79a81d106eeb79b3ebb7e8b56365b6083
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/network.py
@@ -0,0 +1,584 @@
+# Copyright (c) 2012, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Module implementing low-level socket communication with MySQL servers.
+"""
+
+from collections import deque
+import os
+import socket
+import struct
+import sys
+import zlib
+
+try:
+ import ssl
+ TLS_VERSIONS = {
+ "TLSv1": ssl.PROTOCOL_TLSv1,
+ "TLSv1.1": ssl.PROTOCOL_TLSv1_1,
+ "TLSv1.2": ssl.PROTOCOL_TLSv1_2}
+ # TLSv1.3 included in PROTOCOL_TLS, but PROTOCOL_TLS is not included on 3.4
+ if hasattr(ssl, "PROTOCOL_TLS"):
+ TLS_VERSIONS["TLSv1.3"] = ssl.PROTOCOL_TLS # pylint: disable=E1101
+ else:
+ TLS_VERSIONS["TLSv1.3"] = ssl.PROTOCOL_SSLv23 # Alias of PROTOCOL_TLS
+ if hasattr(ssl, "HAS_TLSv1_3") and ssl.HAS_TLSv1_3:
+ TLS_V1_3_SUPPORTED = True
+ else:
+ TLS_V1_3_SUPPORTED = False
+except:
+ # If import fails, we don't have SSL support.
+ TLS_V1_3_SUPPORTED = False
+ pass
+
+from . import constants, errors
+from .errors import InterfaceError
+from .utils import init_bytearray
+
+
+def _strioerror(err):
+ """Reformat the IOError error message
+
+ This function reformats the IOError error message.
+ """
+ if not err.errno:
+ return str(err)
+ return '{errno} {strerr}'.format(errno=err.errno, strerr=err.strerror)
+
+
+def _prepare_packets(buf, pktnr):
+ """Prepare a packet for sending to the MySQL server"""
+ pkts = []
+ pllen = len(buf)
+ maxpktlen = constants.MAX_PACKET_LENGTH
+ while pllen > maxpktlen:
+ pkts.append(b'\xff\xff\xff' + struct.pack('<B', pktnr)
+ + buf[:maxpktlen])
+ buf = buf[maxpktlen:]
+ pllen = len(buf)
+ pktnr = pktnr + 1
+ pkts.append(struct.pack('<I', pllen)[0:3]
+ + struct.pack('<B', pktnr) + buf)
+ return pkts
+
+
+class BaseMySQLSocket(object):
+ """Base class for MySQL socket communication
+
+ This class should not be used directly but overloaded, changing the
+ at least the open_connection()-method. Examples of subclasses are
+ mysql.connector.network.MySQLTCPSocket
+ mysql.connector.network.MySQLUnixSocket
+ """
+
+ def __init__(self):
+ self.sock = None # holds the socket connection
+ self._connection_timeout = None
+ self._packet_number = -1
+ self._compressed_packet_number = -1
+ self._packet_queue = deque()
+ self.recvsize = 8192
+
+ @property
+ def next_packet_number(self):
+ """Increments the packet number"""
+ self._packet_number = self._packet_number + 1
+ if self._packet_number > 255:
+ self._packet_number = 0
+ return self._packet_number
+
+ @property
+ def next_compressed_packet_number(self):
+ """Increments the compressed packet number"""
+ self._compressed_packet_number = self._compressed_packet_number + 1
+ if self._compressed_packet_number > 255:
+ self._compressed_packet_number = 0
+ return self._compressed_packet_number
+
+ def open_connection(self):
+ """Open the socket"""
+ raise NotImplementedError
+
+ def get_address(self):
+ """Get the location of the socket"""
+ raise NotImplementedError
+
+ def shutdown(self):
+ """Shut down the socket before closing it"""
+ try:
+ self.sock.shutdown(socket.SHUT_RDWR)
+ self.sock.close()
+ del self._packet_queue
+ except (socket.error, AttributeError):
+ pass
+
+ def close_connection(self):
+ """Close the socket"""
+ try:
+ self.sock.close()
+ del self._packet_queue
+ except (socket.error, AttributeError):
+ pass
+
+ def __del__(self):
+ self.shutdown()
+
+ def send_plain(self, buf, packet_number=None,
+ compressed_packet_number=None):
+ """Send packets to the MySQL server"""
+ if packet_number is None:
+ self.next_packet_number # pylint: disable=W0104
+ else:
+ self._packet_number = packet_number
+ packets = _prepare_packets(buf, self._packet_number)
+ for packet in packets:
+ try:
+ self.sock.sendall(packet)
+ except IOError as err:
+ raise errors.OperationalError(
+ errno=2055, values=(self.get_address(), _strioerror(err)))
+ except AttributeError:
+ raise errors.OperationalError(errno=2006)
+
+ send = send_plain
+
+ def send_compressed(self, buf, packet_number=None,
+ compressed_packet_number=None):
+ """Send compressed packets to the MySQL server"""
+ if packet_number is None:
+ self.next_packet_number # pylint: disable=W0104
+ else:
+ self._packet_number = packet_number
+ if compressed_packet_number is None:
+ self.next_compressed_packet_number # pylint: disable=W0104
+ else:
+ self._compressed_packet_number = compressed_packet_number
+
+ pktnr = self._packet_number
+ pllen = len(buf)
+ zpkts = []
+ maxpktlen = constants.MAX_PACKET_LENGTH
+ if pllen > maxpktlen:
+ pkts = _prepare_packets(buf, pktnr)
+ tmpbuf = b''.join(pkts)
+ del pkts
+ zbuf = zlib.compress(tmpbuf[:16384])
+ header = (struct.pack('<I', len(zbuf))[0:3]
+ + struct.pack('<B', self._compressed_packet_number)
+ + b'\x00\x40\x00')
+ zpkts.append(header + zbuf)
+ tmpbuf = tmpbuf[16384:]
+ pllen = len(tmpbuf)
+ self.next_compressed_packet_number # pylint: disable=W0104
+ while pllen > maxpktlen:
+ zbuf = zlib.compress(tmpbuf[:maxpktlen])
+ header = (struct.pack('<I', len(zbuf))[0:3]
+ + struct.pack('<B', self._compressed_packet_number)
+ + b'\xff\xff\xff')
+ zpkts.append(header + zbuf)
+ tmpbuf = tmpbuf[maxpktlen:]
+ pllen = len(tmpbuf)
+ self.next_compressed_packet_number # pylint: disable=W0104
+ if tmpbuf:
+ zbuf = zlib.compress(tmpbuf)
+ header = (struct.pack('<I', len(zbuf))[0:3]
+ + struct.pack('<B', self._compressed_packet_number)
+ + struct.pack('<I', pllen)[0:3])
+ zpkts.append(header + zbuf)
+ del tmpbuf
+ else:
+ pkt = (struct.pack('<I', pllen)[0:3] +
+ struct.pack('<B', pktnr) + buf)
+ pllen = len(pkt)
+ if pllen > 50:
+ zbuf = zlib.compress(pkt)
+ zpkts.append(struct.pack('<I', len(zbuf))[0:3]
+ + struct.pack('<B', self._compressed_packet_number)
+ + struct.pack('<I', pllen)[0:3]
+ + zbuf)
+ else:
+ header = (struct.pack('<I', pllen)[0:3]
+ + struct.pack('<B', self._compressed_packet_number)
+ + struct.pack('<I', 0)[0:3])
+ zpkts.append(header + pkt)
+
+ for zip_packet in zpkts:
+ try:
+ self.sock.sendall(zip_packet)
+ except IOError as err:
+ raise errors.OperationalError(
+ errno=2055, values=(self.get_address(), _strioerror(err)))
+ except AttributeError:
+ raise errors.OperationalError(errno=2006)
+
+ def recv_plain(self):
+ """Receive packets from the MySQL server"""
+ try:
+ # Read the header of the MySQL packet, 4 bytes
+ packet = bytearray(b'')
+ packet_len = 0
+ while packet_len < 4:
+ chunk = self.sock.recv(4 - packet_len)
+ if not chunk:
+ raise errors.InterfaceError(errno=2013)
+ packet += chunk
+ packet_len = len(packet)
+
+ # Save the packet number and payload length
+ self._packet_number = packet[3]
+ payload_len = struct.unpack("<I", packet[0:3] + b'\x00')[0]
+
+ # Read the payload
+ rest = payload_len
+ packet.extend(bytearray(payload_len))
+ packet_view = memoryview(packet) # pylint: disable=E0602
+ packet_view = packet_view[4:]
+ while rest:
+ read = self.sock.recv_into(packet_view, rest)
+ if read == 0 and rest > 0:
+ raise errors.InterfaceError(errno=2013)
+ packet_view = packet_view[read:]
+ rest -= read
+ return packet
+ except IOError as err:
+ raise errors.OperationalError(
+ errno=2055, values=(self.get_address(), _strioerror(err)))
+
+ def recv_py26_plain(self):
+ """Receive packets from the MySQL server"""
+ try:
+ # Read the header of the MySQL packet, 4 bytes
+ header = bytearray(b'')
+ header_len = 0
+ while header_len < 4:
+ chunk = self.sock.recv(4 - header_len)
+ if not chunk:
+ raise errors.InterfaceError(errno=2013)
+ header += chunk
+ header_len = len(header)
+
+ # Save the packet number and payload length
+ self._packet_number = header[3]
+ payload_len = struct.unpack("<I", header[0:3] + b'\x00')[0]
+
+ # Read the payload
+ rest = payload_len
+ payload = init_bytearray(b'')
+ while rest > 0:
+ chunk = self.sock.recv(rest)
+ if not chunk:
+ raise errors.InterfaceError(errno=2013)
+ payload += chunk
+ rest = payload_len - len(payload)
+ return header + payload
+ except IOError as err:
+ raise errors.OperationalError(
+ errno=2055, values=(self.get_address(), _strioerror(err)))
+
+ if sys.version_info[0:2] == (2, 6):
+ recv = recv_py26_plain
+ recv_plain = recv_py26_plain
+ else:
+ recv = recv_plain
+
+ def _split_zipped_payload(self, packet_bunch):
+ """Split compressed payload"""
+ while packet_bunch:
+ payload_length = struct.unpack("<I", packet_bunch[0:3] + b'\x00')[0]
+ self._packet_queue.append(packet_bunch[0:payload_length + 4])
+ packet_bunch = packet_bunch[payload_length + 4:]
+
+ def recv_compressed(self):
+ """Receive compressed packets from the MySQL server"""
+ try:
+ pkt = self._packet_queue.popleft()
+ self._packet_number = pkt[3]
+ return pkt
+ except IndexError:
+ pass
+
+ header = bytearray(b'')
+ packets = []
+ try:
+ abyte = self.sock.recv(1)
+ while abyte and len(header) < 7:
+ header += abyte
+ abyte = self.sock.recv(1)
+ while header:
+ if len(header) < 7:
+ raise errors.InterfaceError(errno=2013)
+
+ # Get length of compressed packet
+ zip_payload_length = struct.unpack("<I",
+ header[0:3] + b'\x00')[0]
+ self._compressed_packet_number = header[3]
+
+ # Get payload length before compression
+ payload_length = struct.unpack("<I", header[4:7] + b'\x00')[0]
+
+ zip_payload = init_bytearray(abyte)
+ while len(zip_payload) < zip_payload_length:
+ chunk = self.sock.recv(zip_payload_length
+ - len(zip_payload))
+ if not chunk:
+ raise errors.InterfaceError(errno=2013)
+ zip_payload = zip_payload + chunk
+
+ # Payload was not compressed
+ if payload_length == 0:
+ self._split_zipped_payload(zip_payload)
+ pkt = self._packet_queue.popleft()
+ self._packet_number = pkt[3]
+ return pkt
+
+ packets.append((payload_length, zip_payload))
+
+ if zip_payload_length <= 16384:
+ # We received the full compressed packet
+ break
+
+ # Get next compressed packet
+ header = init_bytearray(b'')
+ abyte = self.sock.recv(1)
+ while abyte and len(header) < 7:
+ header += abyte
+ abyte = self.sock.recv(1)
+
+ except IOError as err:
+ raise errors.OperationalError(
+ errno=2055, values=(self.get_address(), _strioerror(err)))
+
+ # Compressed packet can contain more than 1 MySQL packets
+ # We decompress and make one so we can split it up
+ tmp = init_bytearray(b'')
+ for payload_length, payload in packets:
+ # payload_length can not be 0; this was previously handled
+ tmp += zlib.decompress(payload)
+ self._split_zipped_payload(tmp)
+ del tmp
+
+ try:
+ pkt = self._packet_queue.popleft()
+ self._packet_number = pkt[3]
+ return pkt
+ except IndexError:
+ pass
+
+ def set_connection_timeout(self, timeout):
+ """Set the connection timeout"""
+ self._connection_timeout = timeout
+ if self.sock:
+ self.sock.settimeout(timeout)
+
+ # pylint: disable=C0103,E1101
+ def switch_to_ssl(self, ca, cert, key, verify_cert=False,
+ verify_identity=False, cipher_suites=None,
+ tls_versions=None):
+ """Switch the socket to use SSL"""
+ if not self.sock:
+ raise errors.InterfaceError(errno=2048)
+
+ try:
+ if verify_cert:
+ cert_reqs = ssl.CERT_REQUIRED
+ elif verify_identity:
+ cert_reqs = ssl.CERT_OPTIONAL
+ else:
+ cert_reqs = ssl.CERT_NONE
+
+ if tls_versions is None or not tls_versions:
+ context = ssl.create_default_context()
+ if not verify_identity:
+ context.check_hostname = False
+ else:
+ tls_versions.sort(reverse=True)
+
+ tls_version = tls_versions[0]
+ if not TLS_V1_3_SUPPORTED and \
+ tls_version == "TLSv1.3" and len(tls_versions) > 1:
+ tls_version = tls_versions[1]
+ ssl_protocol = TLS_VERSIONS[tls_version]
+ context = ssl.SSLContext(ssl_protocol)
+
+ if tls_version == "TLSv1.3":
+ if "TLSv1.2" not in tls_versions:
+ context.options |= ssl.OP_NO_TLSv1_2
+ if "TLSv1.1" not in tls_versions:
+ context.options |= ssl.OP_NO_TLSv1_1
+ if "TLSv1" not in tls_versions:
+ context.options |= ssl.OP_NO_TLSv1
+
+ context.check_hostname = False
+ context.verify_mode = cert_reqs
+ context.load_default_certs()
+
+ if ca:
+ try:
+ context.load_verify_locations(ca)
+ except (IOError, ssl.SSLError) as err:
+ self.sock.close()
+ raise InterfaceError(
+ "Invalid CA Certificate: {}".format(err))
+ if cert:
+ try:
+ context.load_cert_chain(cert, key)
+ except (IOError, ssl.SSLError) as err:
+ self.sock.close()
+ raise InterfaceError(
+ "Invalid Certificate/Key: {}".format(err))
+ if cipher_suites:
+ context.set_ciphers(cipher_suites)
+
+ if hasattr(self, "server_host"):
+ self.sock = context.wrap_socket(
+ self.sock, server_hostname=self.server_host)
+ else:
+ self.sock = context.wrap_socket(self.sock)
+
+ if verify_identity:
+ context.check_hostname = True
+ hostnames = [self.server_host]
+ if os.name == 'nt' and self.server_host == 'localhost':
+ hostnames = ['localhost', '127.0.0.1']
+ aliases = socket.gethostbyaddr(self.server_host)
+ hostnames.extend([aliases[0]] + aliases[1])
+ match_found = False
+ errs = []
+ for hostname in hostnames:
+ try:
+ ssl.match_hostname(self.sock.getpeercert(), hostname)
+ except ssl.CertificateError as err:
+ errs.append(str(err))
+ else:
+ match_found = True
+ break
+ if not match_found:
+ self.sock.close()
+ raise InterfaceError("Unable to verify server identity: {}"
+ "".format(", ".join(errs)))
+ except NameError:
+ raise errors.NotSupportedError(
+ "Python installation has no SSL support")
+ except (ssl.SSLError, IOError) as err:
+ raise errors.InterfaceError(
+ errno=2055, values=(self.get_address(), _strioerror(err)))
+ except ssl.CertificateError as err:
+ raise errors.InterfaceError(str(err))
+ except NotImplementedError as err:
+ raise errors.InterfaceError(str(err))
+
+
+# pylint: enable=C0103,E1101
+
+
+class MySQLUnixSocket(BaseMySQLSocket):
+ """MySQL socket class using UNIX sockets
+
+ Opens a connection through the UNIX socket of the MySQL Server.
+ """
+
+ def __init__(self, unix_socket='/tmp/mysql.sock'):
+ super(MySQLUnixSocket, self).__init__()
+ self.unix_socket = unix_socket
+
+ def get_address(self):
+ return self.unix_socket
+
+ def open_connection(self):
+ try:
+ self.sock = socket.socket(socket.AF_UNIX, # pylint: disable=E1101
+ socket.SOCK_STREAM)
+ self.sock.settimeout(self._connection_timeout)
+ self.sock.connect(self.unix_socket)
+ except IOError as err:
+ raise errors.InterfaceError(
+ errno=2002, values=(self.get_address(), _strioerror(err)))
+ except Exception as err:
+ raise errors.InterfaceError(str(err))
+
+
+class MySQLTCPSocket(BaseMySQLSocket):
+ """MySQL socket class using TCP/IP
+
+ Opens a TCP/IP connection to the MySQL Server.
+ """
+
+ def __init__(self, host='127.0.0.1', port=3306, force_ipv6=False):
+ super(MySQLTCPSocket, self).__init__()
+ self.server_host = host
+ self.server_port = port
+ self.force_ipv6 = force_ipv6
+ self._family = 0
+
+ def get_address(self):
+ return "{0}:{1}".format(self.server_host, self.server_port)
+
+ def open_connection(self):
+ """Open the TCP/IP connection to the MySQL server
+ """
+ # Get address information
+ addrinfo = [None] * 5
+ try:
+ addrinfos = socket.getaddrinfo(self.server_host,
+ self.server_port,
+ 0, socket.SOCK_STREAM,
+ socket.SOL_TCP)
+ # If multiple results we favor IPv4, unless IPv6 was forced.
+ for info in addrinfos:
+ if self.force_ipv6 and info[0] == socket.AF_INET6:
+ addrinfo = info
+ break
+ elif info[0] == socket.AF_INET:
+ addrinfo = info
+ break
+ if self.force_ipv6 and addrinfo[0] is None:
+ raise errors.InterfaceError(
+ "No IPv6 address found for {0}".format(self.server_host))
+ if addrinfo[0] is None:
+ addrinfo = addrinfos[0]
+ except IOError as err:
+ raise errors.InterfaceError(
+ errno=2003, values=(self.get_address(), _strioerror(err)))
+ else:
+ (self._family, socktype, proto, _, sockaddr) = addrinfo
+
+ # Instanciate the socket and connect
+ try:
+ self.sock = socket.socket(self._family, socktype, proto)
+ self.sock.settimeout(self._connection_timeout)
+ self.sock.connect(sockaddr)
+ except IOError as err:
+ raise errors.InterfaceError(
+ errno=2003, values=(
+ self.server_host,
+ self.server_port,
+ _strioerror(err),
+ )
+ )
+ except Exception as err:
+ raise errors.OperationalError(str(err))
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/optionfiles.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/optionfiles.py
new file mode 100644
index 0000000000000000000000000000000000000000..e77a49a0bc0d0fb38391c221e272009dd2d46641
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/optionfiles.py
@@ -0,0 +1,345 @@
+# Copyright (c) 2014, 2021, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Implements parser to parse MySQL option files.
+"""
+
+import codecs
+import io
+import os
+import re
+
+from configparser import (
+ ConfigParser as SafeConfigParser,
+ MissingSectionHeaderError
+)
+
+from .constants import DEFAULT_CONFIGURATION, CNX_POOL_ARGS
+
+
+DEFAULT_EXTENSIONS = {
+ 'nt': ('ini', 'cnf'),
+ 'posix': ('cnf',)
+}
+
+
+def read_option_files(**config):
+ """
+ Read option files for connection parameters.
+
+ Checks if connection arguments contain option file arguments, and then
+ reads option files accordingly.
+ """
+ if 'option_files' in config:
+ try:
+ if isinstance(config['option_groups'], str):
+ config['option_groups'] = [config['option_groups']]
+ groups = config['option_groups']
+ del config['option_groups']
+ except KeyError:
+ groups = ['client', 'connector_python']
+
+ if isinstance(config['option_files'], str):
+ config['option_files'] = [config['option_files']]
+ option_parser = MySQLOptionsParser(list(config['option_files']),
+ keep_dashes=False)
+ del config['option_files']
+
+ config_from_file = option_parser.get_groups_as_dict_with_priority(
+ *groups)
+ config_options = {}
+ for group in groups:
+ try:
+ for option, value in config_from_file[group].items():
+ try:
+ if option == 'socket':
+ option = 'unix_socket'
+
+ if (option not in CNX_POOL_ARGS and
+ option != 'failover'):
+ # pylint: disable=W0104
+ DEFAULT_CONFIGURATION[option]
+ # pylint: enable=W0104
+
+ if (option not in config_options or
+ config_options[option][1] <= value[1]):
+ config_options[option] = value
+ except KeyError:
+ if group == 'connector_python':
+ raise AttributeError("Unsupported argument "
+ "'{0}'".format(option))
+ except KeyError:
+ continue
+
+ not_evaluate = ('password', 'passwd')
+ for option, value in config_options.items():
+ if option not in config:
+ try:
+ if option in not_evaluate:
+ config[option] = value[0]
+ else:
+ config[option] = eval(value[0]) # pylint: disable=W0123
+ except (NameError, SyntaxError):
+ config[option] = value[0]
+
+ return config
+
+
+class MySQLOptionsParser(SafeConfigParser): # pylint: disable=R0901
+ """This class implements methods to parse MySQL option files"""
+
+ def __init__(self, files=None, keep_dashes=True): # pylint: disable=W0231
+ """Initialize
+
+ If defaults is True, default option files are read first
+
+ Raises ValueError if defaults is set to True but defaults files
+ cannot be found.
+ """
+
+ # Regular expression to allow options with no value(For Python v2.6)
+ self.OPTCRE = re.compile( # pylint: disable=C0103
+ r'(?P<option>[^:=\s][^:=]*)'
+ r'\s*(?:'
+ r'(?P<vi>[:=])\s*'
+ r'(?P<value>.*))?$'
+ )
+
+ self._options_dict = {}
+
+ SafeConfigParser.__init__(self, strict=False)
+
+ self.default_extension = DEFAULT_EXTENSIONS[os.name]
+ self.keep_dashes = keep_dashes
+
+ if not files:
+ raise ValueError('files argument should be given')
+ if isinstance(files, str):
+ self.files = [files]
+ else:
+ self.files = files
+
+ self._parse_options(list(self.files))
+ self._sections = self.get_groups_as_dict()
+
+ def optionxform(self, optionstr):
+ """Converts option strings
+
+ Converts option strings to lower case and replaces dashes(-) with
+ underscores(_) if keep_dashes variable is set.
+ """
+ if not self.keep_dashes:
+ optionstr = optionstr.replace('-', '_')
+ return optionstr.lower()
+
+ def _parse_options(self, files):
+ """Parse options from files given as arguments.
+ This method checks for !include or !inculdedir directives and if there
+ is any, those files included by these directives are also parsed
+ for options.
+
+ Raises ValueError if any of the included or file given in arguments
+ is not readable.
+ """
+ initial_files = files[:]
+ files = []
+ index = 0
+ err_msg = "Option file '{0}' being included again in file '{1}'"
+
+ for file_ in initial_files:
+ try:
+ if file_ in initial_files[index+1:]:
+ raise ValueError("Same option file '{0}' occurring more "
+ "than once in the list".format(file_))
+ with open(file_, 'r') as op_file:
+ for line in op_file.readlines():
+ if line.startswith('!includedir'):
+ _, dir_path = line.split(None, 1)
+ dir_path = dir_path.strip()
+ for entry in os.listdir(dir_path):
+ entry = os.path.join(dir_path, entry)
+ if entry in files:
+ raise ValueError(err_msg.format(
+ entry, file_))
+ if (os.path.isfile(entry) and
+ entry.endswith(self.default_extension)):
+ files.append(entry)
+
+ elif line.startswith('!include'):
+ _, filename = line.split(None, 1)
+ filename = filename.strip()
+ if filename in files:
+ raise ValueError(err_msg.format(
+ filename, file_))
+ files.append(filename)
+
+ index += 1
+ files.append(file_)
+ except (IOError, OSError) as exc:
+ raise ValueError("Failed reading file '{0}': {1}".format(
+ file_, str(exc)))
+
+ read_files = self.read(files)
+ not_read_files = set(files) - set(read_files)
+ if not_read_files:
+ raise ValueError("File(s) {0} could not be read.".format(
+ ', '.join(not_read_files)))
+
+ def read(self, filenames): # pylint: disable=W0221
+ """Read and parse a filename or a list of filenames.
+
+ Overridden from ConfigParser and modified so as to allow options
+ which are not inside any section header
+
+ Return list of successfully read files.
+ """
+ if isinstance(filenames, str):
+ filenames = [filenames]
+ read_ok = []
+ for priority, filename in enumerate(filenames):
+ try:
+ out_file = io.StringIO()
+ for line in codecs.open(filename, encoding='utf-8'):
+ line = line.strip()
+ # Skip lines that begin with "!includedir" or "!include"
+ if line.startswith('!include'):
+ continue
+
+ match_obj = self.OPTCRE.match(line)
+ if not self.SECTCRE.match(line) and match_obj:
+ optname, delimiter, optval = match_obj.group('option',
+ 'vi',
+ 'value')
+ if optname and not optval and not delimiter:
+ out_file.write(line + "=\n")
+ else:
+ out_file.write(line + '\n')
+ else:
+ out_file.write(line + '\n')
+ out_file.seek(0)
+ except IOError:
+ continue
+ try:
+ self._read(out_file, filename)
+ for group in self._sections.keys():
+ try:
+ self._options_dict[group]
+ except KeyError:
+ self._options_dict[group] = {}
+ for option, value in self._sections[group].items():
+ self._options_dict[group][option] = (value, priority)
+
+ self._sections = self._dict()
+
+ except MissingSectionHeaderError:
+ self._read(out_file, filename)
+ out_file.close()
+ read_ok.append(filename)
+ return read_ok
+
+ def get_groups(self, *args):
+ """Returns options as a dictionary.
+
+ Returns options from all the groups specified as arguments, returns
+ the options from all groups if no argument provided. Options are
+ overridden when they are found in the next group.
+
+ Returns a dictionary
+ """
+ if not args:
+ args = self._options_dict.keys()
+
+ options = {}
+ priority = {}
+ for group in args:
+ try:
+ for option, value in [(key, value,) for key, value in
+ self._options_dict[group].items() if
+ key != "__name__" and
+ not key.startswith("!")]:
+ if option not in options or priority[option] <= value[1]:
+ priority[option] = value[1]
+ options[option] = value[0]
+ except KeyError:
+ pass
+
+ return options
+
+ def get_groups_as_dict_with_priority(self, *args): # pylint: disable=C0103
+ """Returns options as dictionary of dictionaries.
+
+ Returns options from all the groups specified as arguments. For each
+ group the option are contained in a dictionary. The order in which
+ the groups are specified is unimportant. Also options are not
+ overridden in between the groups.
+
+ The value is a tuple with two elements, first being the actual value
+ and second is the priority of the value which is higher for a value
+ read from a higher priority file.
+
+ Returns an dictionary of dictionaries
+ """
+ if not args:
+ args = self._options_dict.keys()
+
+ options = dict()
+ for group in args:
+ try:
+ options[group] = dict((key, value,) for key, value in
+ self._options_dict[group].items() if
+ key != "__name__" and
+ not key.startswith("!"))
+ except KeyError:
+ pass
+
+ return options
+
+ def get_groups_as_dict(self, *args):
+ """Returns options as dictionary of dictionaries.
+
+ Returns options from all the groups specified as arguments. For each
+ group the option are contained in a dictionary. The order in which
+ the groups are specified is unimportant. Also options are not
+ overridden in between the groups.
+
+ Returns an dictionary of dictionaries
+ """
+ if not args:
+ args = self._options_dict.keys()
+
+ options = dict()
+ for group in args:
+ try:
+ options[group] = dict((key, value[0],) for key, value in
+ self._options_dict[group].items() if
+ key != "__name__" and
+ not key.startswith("!"))
+ except KeyError:
+ pass
+
+ return options
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/pooling.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/pooling.py
new file mode 100644
index 0000000000000000000000000000000000000000..30358ec95d7b137baa4a4d0af99e6d5e8817f37a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/pooling.py
@@ -0,0 +1,373 @@
+# Copyright (c) 2013, 2021, Oracle and/or its affiliates. All rights reserved.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Implementing pooling of connections to MySQL servers.
+"""
+
+import re
+from uuid import uuid4
+# pylint: disable=F0401
+try:
+ import queue
+except ImportError:
+ # Python v2
+ import Queue as queue
+# pylint: enable=F0401
+import threading
+
+try:
+ from mysql.connector.connection_cext import (CMySQLConnection)
+except ImportError:
+ CMySQLConnection = None
+
+from . import errors
+from . import Connect
+from .connection import MySQLConnection
+
+CONNECTION_POOL_LOCK = threading.RLock()
+CNX_POOL_MAXSIZE = 32
+CNX_POOL_MAXNAMESIZE = 64
+CNX_POOL_NAMEREGEX = re.compile(r'[^a-zA-Z0-9._:\-*$#]')
+MYSQL_CNX_CLASS = ((MySQLConnection) if CMySQLConnection is None else
+ (MySQLConnection, CMySQLConnection))
+
+
+def generate_pool_name(**kwargs):
+ """Generate a pool name
+
+ This function takes keyword arguments, usually the connection
+ arguments for MySQLConnection, and tries to generate a name for
+ a pool.
+
+ Raises PoolError when no name can be generated.
+
+ Returns a string.
+ """
+ parts = []
+ for key in ('host', 'port', 'user', 'database'):
+ try:
+ parts.append(str(kwargs[key]))
+ except KeyError:
+ pass
+
+ if not parts:
+ raise errors.PoolError(
+ "Failed generating pool name; specify pool_name")
+
+ return '_'.join(parts)
+
+
+class PooledMySQLConnection(object):
+ """Class holding a MySQL Connection in a pool
+
+ PooledMySQLConnection is used by MySQLConnectionPool to return an
+ instance holding a MySQL connection. It works like a MySQLConnection
+ except for methods like close() and config().
+
+ The close()-method will add the connection back to the pool rather
+ than disconnecting from the MySQL server.
+
+ Configuring the connection have to be done through the MySQLConnectionPool
+ method set_config(). Using config() on pooled connection will raise a
+ PoolError.
+ """
+ def __init__(self, pool, cnx):
+ """Initialize
+
+ The pool argument must be an instance of MySQLConnectionPoll. cnx
+ if an instance of MySQLConnection.
+ """
+ if not isinstance(pool, MySQLConnectionPool):
+ raise AttributeError(
+ "pool should be a MySQLConnectionPool")
+ if not isinstance(cnx, MYSQL_CNX_CLASS):
+ raise AttributeError(
+ "cnx should be a MySQLConnection")
+ self._cnx_pool = pool
+ self._cnx = cnx
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+
+ def __getattr__(self, attr):
+ """Calls attributes of the MySQLConnection instance"""
+ return getattr(self._cnx, attr)
+
+ def close(self):
+ """Do not close, but add connection back to pool
+
+ The close() method does not close the connection with the
+ MySQL server. The connection is added back to the pool so it
+ can be reused.
+
+ When the pool is configured to reset the session, the session
+ state will be cleared by re-authenticating the user.
+ """
+ try:
+ cnx = self._cnx
+ if self._cnx_pool.reset_session:
+ cnx.reset_session()
+ finally:
+ self._cnx_pool.add_connection(cnx)
+ self._cnx = None
+
+ def config(self, **kwargs):
+ """Configuration is done through the pool"""
+ raise errors.PoolError(
+ "Configuration for pooled connections should "
+ "be done through the pool itself."
+ )
+
+ @property
+ def pool_name(self):
+ """Return the name of the connection pool"""
+ return self._cnx_pool.pool_name
+
+
+class MySQLConnectionPool(object):
+ """Class defining a pool of MySQL connections"""
+ def __init__(self, pool_size=5, pool_name=None, pool_reset_session=True,
+ **kwargs):
+ """Initialize
+
+ Initialize a MySQL connection pool with a maximum number of
+ connections set to pool_size. The rest of the keywords
+ arguments, kwargs, are configuration arguments for MySQLConnection
+ instances.
+ """
+ self._pool_size = None
+ self._pool_name = None
+ self._reset_session = pool_reset_session
+ self._set_pool_size(pool_size)
+ self._set_pool_name(pool_name or generate_pool_name(**kwargs))
+ self._cnx_config = {}
+ self._cnx_queue = queue.Queue(self._pool_size)
+ self._config_version = uuid4()
+
+ if kwargs:
+ self.set_config(**kwargs)
+ cnt = 0
+ while cnt < self._pool_size:
+ self.add_connection()
+ cnt += 1
+
+ @property
+ def pool_name(self):
+ """Return the name of the connection pool"""
+ return self._pool_name
+
+ @property
+ def pool_size(self):
+ """Return number of connections managed by the pool"""
+ return self._pool_size
+
+ @property
+ def reset_session(self):
+ """Return whether to reset session"""
+ return self._reset_session
+
+ def set_config(self, **kwargs):
+ """Set the connection configuration for MySQLConnection instances
+
+ This method sets the configuration used for creating MySQLConnection
+ instances. See MySQLConnection for valid connection arguments.
+
+ Raises PoolError when a connection argument is not valid, missing
+ or not supported by MySQLConnection.
+ """
+ if not kwargs:
+ return
+
+ with CONNECTION_POOL_LOCK:
+ try:
+ test_cnx = Connect()
+ test_cnx.config(**kwargs)
+ self._cnx_config = kwargs
+ self._config_version = uuid4()
+ except AttributeError as err:
+ raise errors.PoolError(
+ "Connection configuration not valid: {0}".format(err))
+
+ def _set_pool_size(self, pool_size):
+ """Set the size of the pool
+
+ This method sets the size of the pool but it will not resize the pool.
+
+ Raises an AttributeError when the pool_size is not valid. Invalid size
+ is 0, negative or higher than pooling.CNX_POOL_MAXSIZE.
+ """
+ if pool_size <= 0 or pool_size > CNX_POOL_MAXSIZE:
+ raise AttributeError(
+ "Pool size should be higher than 0 and "
+ "lower or equal to {0}".format(CNX_POOL_MAXSIZE))
+ self._pool_size = pool_size
+
+ def _set_pool_name(self, pool_name):
+ r"""Set the name of the pool
+
+ This method checks the validity and sets the name of the pool.
+
+ Raises an AttributeError when pool_name contains illegal characters
+ ([^a-zA-Z0-9._\-*$#]) or is longer than pooling.CNX_POOL_MAXNAMESIZE.
+ """
+ if CNX_POOL_NAMEREGEX.search(pool_name):
+ raise AttributeError(
+ "Pool name '{0}' contains illegal characters".format(pool_name))
+ if len(pool_name) > CNX_POOL_MAXNAMESIZE:
+ raise AttributeError(
+ "Pool name '{0}' is too long".format(pool_name))
+ self._pool_name = pool_name
+
+ def _queue_connection(self, cnx):
+ """Put connection back in the queue
+
+ This method is putting a connection back in the queue. It will not
+ acquire a lock as the methods using _queue_connection() will have it
+ set.
+
+ Raises PoolError on errors.
+ """
+ if not isinstance(cnx, MYSQL_CNX_CLASS):
+ raise errors.PoolError(
+ "Connection instance not subclass of MySQLConnection.")
+
+ try:
+ self._cnx_queue.put(cnx, block=False)
+ except queue.Full:
+ raise errors.PoolError("Failed adding connection; queue is full")
+
+ def add_connection(self, cnx=None):
+ """Add a connection to the pool
+
+ This method instantiates a MySQLConnection using the configuration
+ passed when initializing the MySQLConnectionPool instance or using
+ the set_config() method.
+ If cnx is a MySQLConnection instance, it will be added to the
+ queue.
+
+ Raises PoolError when no configuration is set, when no more
+ connection can be added (maximum reached) or when the connection
+ can not be instantiated.
+ """
+ with CONNECTION_POOL_LOCK:
+ if not self._cnx_config:
+ raise errors.PoolError(
+ "Connection configuration not available")
+
+ if self._cnx_queue.full():
+ raise errors.PoolError(
+ "Failed adding connection; queue is full")
+
+ if not cnx:
+ cnx = Connect(**self._cnx_config)
+ try:
+ if (self._reset_session and self._cnx_config['compress']
+ and cnx.get_server_version() < (5, 7, 3)):
+ raise errors.NotSupportedError("Pool reset session is "
+ "not supported with "
+ "compression for MySQL "
+ "server version 5.7.2 "
+ "or earlier.")
+ except KeyError:
+ pass
+
+ # pylint: disable=W0201,W0212
+ cnx._pool_config_version = self._config_version
+ # pylint: enable=W0201,W0212
+ else:
+ if not isinstance(cnx, MYSQL_CNX_CLASS):
+ raise errors.PoolError(
+ "Connection instance not subclass of MySQLConnection.")
+
+ self._queue_connection(cnx)
+
+ def get_connection(self):
+ """Get a connection from the pool
+
+ This method returns an PooledMySQLConnection instance which
+ has a reference to the pool that created it, and the next available
+ MySQL connection.
+
+ When the MySQL connection is not connect, a reconnect is attempted.
+
+ Raises PoolError on errors.
+
+ Returns a PooledMySQLConnection instance.
+ """
+ with CONNECTION_POOL_LOCK:
+ try:
+ cnx = self._cnx_queue.get(block=False)
+ except queue.Empty:
+ raise errors.PoolError(
+ "Failed getting connection; pool exhausted")
+
+ # pylint: disable=W0201,W0212
+ if not cnx.is_connected() \
+ or self._config_version != cnx._pool_config_version:
+ cnx.config(**self._cnx_config)
+ try:
+ cnx.reconnect()
+ except errors.InterfaceError:
+ # Failed to reconnect, give connection back to pool
+ self._queue_connection(cnx)
+ raise
+ cnx._pool_config_version = self._config_version
+ # pylint: enable=W0201,W0212
+
+ return PooledMySQLConnection(self, cnx)
+
+ def _remove_connections(self):
+ """Close all connections
+
+ This method closes all connections. It returns the number
+ of connections it closed.
+
+ Used mostly for tests.
+
+ Returns int.
+ """
+ with CONNECTION_POOL_LOCK:
+ cnt = 0
+ cnxq = self._cnx_queue
+ while cnxq.qsize():
+ try:
+ cnx = cnxq.get(block=False)
+ cnx.disconnect()
+ cnt += 1
+ except queue.Empty:
+ return cnt
+ except errors.PoolError:
+ raise
+ except errors.Error:
+ # Any other error when closing means connection is closed
+ pass
+
+ return cnt
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/protocol.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/protocol.py
new file mode 100644
index 0000000000000000000000000000000000000000..ba6081f231b535652cf1243bf90c09dd5c13d45b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/protocol.py
@@ -0,0 +1,818 @@
+# Copyright (c) 2009, 2021, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Implements the MySQL Client/Server protocol
+"""
+
+import struct
+import datetime
+
+from decimal import Decimal
+
+from .constants import (
+ FieldFlag, ServerCmd, FieldType, ClientFlag, PARAMETER_COUNT_AVAILABLE)
+from . import errors, utils
+from .authentication import get_auth_plugin
+from .errors import DatabaseError, get_exception
+
+PROTOCOL_VERSION = 10
+
+
+class MySQLProtocol(object):
+ """Implements MySQL client/server protocol
+
+ Create and parses MySQL packets.
+ """
+
+ def _connect_with_db(self, client_flags, database):
+ """Prepare database string for handshake response"""
+ if client_flags & ClientFlag.CONNECT_WITH_DB and database:
+ return database.encode('utf8') + b'\x00'
+ return b'\x00'
+
+ def _auth_response(self, client_flags, username, password, database,
+ auth_plugin, auth_data, ssl_enabled):
+ """Prepare the authentication response"""
+ if not password:
+ return b'\x00'
+
+ try:
+ auth = get_auth_plugin(auth_plugin)(
+ auth_data,
+ username=username, password=password, database=database,
+ ssl_enabled=ssl_enabled)
+ plugin_auth_response = auth.auth_response()
+ except (TypeError, errors.InterfaceError) as exc:
+ raise errors.InterfaceError(
+ "Failed authentication: {0}".format(str(exc)))
+
+ if client_flags & ClientFlag.SECURE_CONNECTION:
+ resplen = len(plugin_auth_response)
+ auth_response = struct.pack('<B', resplen) + plugin_auth_response
+ else:
+ auth_response = plugin_auth_response + b'\x00'
+ return auth_response
+
+ def make_auth(self, handshake, username=None, password=None, database=None,
+ charset=45, client_flags=0,
+ max_allowed_packet=1073741824, ssl_enabled=False,
+ auth_plugin=None, conn_attrs=None):
+ """Make a MySQL Authentication packet"""
+
+ try:
+ auth_data = handshake['auth_data']
+ auth_plugin = auth_plugin or handshake['auth_plugin']
+ except (TypeError, KeyError) as exc:
+ raise errors.ProgrammingError(
+ "Handshake misses authentication info ({0})".format(exc))
+
+ if not username:
+ username = b''
+ try:
+ username_bytes = username.encode('utf8') # pylint: disable=E1103
+ except AttributeError:
+ # Username is already bytes
+ username_bytes = username
+ packet = struct.pack('<IIH{filler}{usrlen}sx'.format(
+ filler='x' * 22, usrlen=len(username_bytes)),
+ client_flags, max_allowed_packet, charset,
+ username_bytes)
+
+ packet += self._auth_response(client_flags, username, password,
+ database,
+ auth_plugin,
+ auth_data, ssl_enabled)
+
+ packet += self._connect_with_db(client_flags, database)
+
+ if client_flags & ClientFlag.PLUGIN_AUTH:
+ packet += auth_plugin.encode('utf8') + b'\x00'
+
+ if (client_flags & ClientFlag.CONNECT_ARGS) and conn_attrs is not None:
+ packet += self.make_conn_attrs(conn_attrs)
+
+ return packet
+
+ def make_conn_attrs(self, conn_attrs):
+ """Encode the connection attributes"""
+ for attr_name in conn_attrs:
+ if conn_attrs[attr_name] is None:
+ conn_attrs[attr_name] = ""
+ conn_attrs_len = (
+ sum([len(x) + len(conn_attrs[x]) for x in conn_attrs]) +
+ len(conn_attrs.keys()) + len(conn_attrs.values()))
+
+ conn_attrs_packet = struct.pack('<B', conn_attrs_len)
+ for attr_name in conn_attrs:
+ conn_attrs_packet += struct.pack('<B', len(attr_name))
+ conn_attrs_packet += attr_name.encode('utf8')
+ conn_attrs_packet += struct.pack('<B', len(conn_attrs[attr_name]))
+ conn_attrs_packet += conn_attrs[attr_name].encode('utf8')
+ return conn_attrs_packet
+
+ def make_auth_ssl(self, charset=45, client_flags=0,
+ max_allowed_packet=1073741824):
+ """Make a SSL authentication packet"""
+ return utils.int4store(client_flags) + \
+ utils.int4store(max_allowed_packet) + \
+ utils.int2store(charset) + \
+ b'\x00' * 22
+
+ def make_command(self, command, argument=None):
+ """Make a MySQL packet containing a command"""
+ data = utils.int1store(command)
+ if argument is not None:
+ data += argument
+ return data
+
+ def make_stmt_fetch(self, statement_id, rows=1):
+ """Make a MySQL packet with Fetch Statement command"""
+ return utils.int4store(statement_id) + utils.int4store(rows)
+
+ def make_change_user(self, handshake, username=None, password=None,
+ database=None, charset=45, client_flags=0,
+ ssl_enabled=False, auth_plugin=None, conn_attrs=None):
+ """Make a MySQL packet with the Change User command"""
+
+ try:
+ auth_data = handshake['auth_data']
+ auth_plugin = auth_plugin or handshake['auth_plugin']
+ except (TypeError, KeyError) as exc:
+ raise errors.ProgrammingError(
+ "Handshake misses authentication info ({0})".format(exc))
+
+ if not username:
+ username = b''
+ try:
+ username_bytes = username.encode('utf8') # pylint: disable=E1103
+ except AttributeError:
+ # Username is already bytes
+ username_bytes = username
+ packet = struct.pack('<B{usrlen}sx'.format(usrlen=len(username_bytes)),
+ ServerCmd.CHANGE_USER, username_bytes)
+
+ packet += self._auth_response(client_flags, username, password,
+ database,
+ auth_plugin,
+ auth_data, ssl_enabled)
+
+ packet += self._connect_with_db(client_flags, database)
+
+ packet += struct.pack('<H', charset)
+
+ if client_flags & ClientFlag.PLUGIN_AUTH:
+ packet += auth_plugin.encode('utf8') + b'\x00'
+
+ if (client_flags & ClientFlag.CONNECT_ARGS) and conn_attrs is not None:
+ packet += self.make_conn_attrs(conn_attrs)
+
+ return packet
+
+ def parse_handshake(self, packet):
+ """Parse a MySQL Handshake-packet"""
+ res = {}
+ res['protocol'] = struct.unpack('<xxxxB', packet[0:5])[0]
+ if res["protocol"] != PROTOCOL_VERSION:
+ raise DatabaseError("Protocol mismatch; server version = {}, "
+ "client version = {}".format(res["protocol"],
+ PROTOCOL_VERSION))
+ (packet, res['server_version_original']) = utils.read_string(
+ packet[5:], end=b'\x00')
+
+ (res['server_threadid'],
+ auth_data1,
+ capabilities1,
+ res['charset'],
+ res['server_status'],
+ capabilities2,
+ auth_data_length
+ ) = struct.unpack('<I8sx2sBH2sBxxxxxxxxxx', packet[0:31])
+ res['server_version_original'] = res['server_version_original'].decode()
+
+ packet = packet[31:]
+
+ capabilities = utils.intread(capabilities1 + capabilities2)
+ auth_data2 = b''
+ if capabilities & ClientFlag.SECURE_CONNECTION:
+ size = min(13, auth_data_length - 8) if auth_data_length else 13
+ auth_data2 = packet[0:size]
+ packet = packet[size:]
+ if auth_data2[-1] == 0:
+ auth_data2 = auth_data2[:-1]
+
+ if capabilities & ClientFlag.PLUGIN_AUTH:
+ if (b'\x00' not in packet
+ and res['server_version_original'].startswith("5.5.8")):
+ # MySQL server 5.5.8 has a bug where end byte is not send
+ (packet, res['auth_plugin']) = (b'', packet)
+ else:
+ (packet, res['auth_plugin']) = utils.read_string(
+ packet, end=b'\x00')
+ res['auth_plugin'] = res['auth_plugin'].decode('utf-8')
+ else:
+ res['auth_plugin'] = 'mysql_native_password'
+
+ res['auth_data'] = auth_data1 + auth_data2
+ res['capabilities'] = capabilities
+ return res
+
+ def parse_auth_next_factor(self, packet):
+ """Parse a MySQL AuthNextFactor packet."""
+ packet, status = utils.read_int(packet, 1)
+ if not status == 2:
+ raise errors.InterfaceError(
+ "Failed parsing AuthNextFactor packet (invalid)"
+ )
+ packet, auth_plugin = utils.read_string(packet, end=b"\x00")
+ return packet, auth_plugin.decode("utf-8")
+
+
+ def parse_ok(self, packet):
+ """Parse a MySQL OK-packet"""
+ if not packet[4] == 0:
+ raise errors.InterfaceError("Failed parsing OK packet (invalid).")
+
+ ok_packet = {}
+ try:
+ ok_packet['field_count'] = struct.unpack('<xxxxB', packet[0:5])[0]
+ (packet, ok_packet['affected_rows']) = utils.read_lc_int(packet[5:])
+ (packet, ok_packet['insert_id']) = utils.read_lc_int(packet)
+ (ok_packet['status_flag'],
+ ok_packet['warning_count']) = struct.unpack('<HH', packet[0:4])
+ packet = packet[4:]
+ if packet:
+ (packet, ok_packet['info_msg']) = utils.read_lc_string(packet)
+ ok_packet['info_msg'] = ok_packet['info_msg'].decode('utf-8')
+ except ValueError:
+ raise errors.InterfaceError("Failed parsing OK packet.")
+ return ok_packet
+
+ def parse_column_count(self, packet):
+ """Parse a MySQL packet with the number of columns in result set"""
+ try:
+ count = utils.read_lc_int(packet[4:])[1]
+ return count
+ except (struct.error, ValueError):
+ raise errors.InterfaceError("Failed parsing column count")
+
+ def parse_column(self, packet, encoding='utf-8'):
+ """Parse a MySQL column-packet"""
+ (packet, _) = utils.read_lc_string(packet[4:]) # catalog
+ (packet, _) = utils.read_lc_string(packet) # db
+ (packet, _) = utils.read_lc_string(packet) # table
+ (packet, _) = utils.read_lc_string(packet) # org_table
+ (packet, name) = utils.read_lc_string(packet) # name
+ (packet, _) = utils.read_lc_string(packet) # org_name
+
+ try:
+ (
+ charset,
+ _,
+ column_type,
+ flags,
+ _,
+ ) = struct.unpack('<xHIBHBxx', packet)
+ except struct.error:
+ raise errors.InterfaceError("Failed parsing column information")
+
+ return (
+ name.decode(encoding),
+ column_type,
+ None, # display_size
+ None, # internal_size
+ None, # precision
+ None, # scale
+ ~flags & FieldFlag.NOT_NULL, # null_ok
+ flags, # MySQL specific
+ charset,
+ )
+
+ def parse_eof(self, packet):
+ """Parse a MySQL EOF-packet"""
+ if packet[4] == 0:
+ # EOF packet deprecation
+ return self.parse_ok(packet)
+
+ err_msg = "Failed parsing EOF packet."
+ res = {}
+ try:
+ unpacked = struct.unpack('<xxxBBHH', packet)
+ except struct.error:
+ raise errors.InterfaceError(err_msg)
+
+ if not (unpacked[1] == 254 and len(packet) <= 9):
+ raise errors.InterfaceError(err_msg)
+
+ res['warning_count'] = unpacked[2]
+ res['status_flag'] = unpacked[3]
+ return res
+
+ def parse_statistics(self, packet, with_header=True):
+ """Parse the statistics packet"""
+ errmsg = "Failed getting COM_STATISTICS information"
+ res = {}
+ # Information is separated by 2 spaces
+ if with_header:
+ pairs = packet[4:].split(b'\x20\x20')
+ else:
+ pairs = packet.split(b'\x20\x20')
+ for pair in pairs:
+ try:
+ (lbl, val) = [v.strip() for v in pair.split(b':', 2)]
+ except:
+ raise errors.InterfaceError(errmsg)
+
+ # It's either an integer or a decimal
+ lbl = lbl.decode('utf-8')
+ try:
+ res[lbl] = int(val)
+ except:
+ try:
+ res[lbl] = Decimal(val.decode('utf-8'))
+ except:
+ raise errors.InterfaceError(
+ "{0} ({1}:{2}).".format(errmsg, lbl, val))
+ return res
+
+ def read_text_result(self, sock, version, count=1):
+ """Read MySQL text result
+
+ Reads all or given number of rows from the socket.
+
+ Returns a tuple with 2 elements: a list with all rows and
+ the EOF packet.
+ """
+ rows = []
+ eof = None
+ rowdata = None
+ i = 0
+ while True:
+ if eof or i == count:
+ break
+ packet = sock.recv()
+ if packet.startswith(b'\xff\xff\xff'):
+ datas = [packet[4:]]
+ packet = sock.recv()
+ while packet.startswith(b'\xff\xff\xff'):
+ datas.append(packet[4:])
+ packet = sock.recv()
+ datas.append(packet[4:])
+ rowdata = utils.read_lc_string_list(bytearray(b'').join(datas))
+ elif packet[4] == 254 and packet[0] < 7:
+ eof = self.parse_eof(packet)
+ rowdata = None
+ else:
+ eof = None
+ rowdata = utils.read_lc_string_list(packet[4:])
+ if eof is None and rowdata is not None:
+ rows.append(rowdata)
+ elif eof is None and rowdata is None:
+ raise get_exception(packet)
+ i += 1
+ return rows, eof
+
+ def _parse_binary_integer(self, packet, field):
+ """Parse an integer from a binary packet"""
+ if field[1] == FieldType.TINY:
+ format_ = '<b'
+ length = 1
+ elif field[1] == FieldType.SHORT:
+ format_ = '<h'
+ length = 2
+ elif field[1] in (FieldType.INT24, FieldType.LONG):
+ format_ = '<i'
+ length = 4
+ elif field[1] == FieldType.LONGLONG:
+ format_ = '<q'
+ length = 8
+
+ if field[7] & FieldFlag.UNSIGNED:
+ format_ = format_.upper()
+
+ return (packet[length:], struct.unpack(format_, packet[0:length])[0])
+
+ def _parse_binary_float(self, packet, field):
+ """Parse a float/double from a binary packet"""
+ if field[1] == FieldType.DOUBLE:
+ length = 8
+ format_ = '<d'
+ else:
+ length = 4
+ format_ = '<f'
+
+ return (packet[length:], struct.unpack(format_, packet[0:length])[0])
+
+ def _parse_binary_new_decimal(self, packet, charset='utf8'):
+ """Parse a New Decimal from a binary packet"""
+ (packet, value) = utils.read_lc_string(packet)
+ return (packet, Decimal(value.decode(charset)))
+
+ def _parse_binary_timestamp(self, packet, field):
+ """Parse a timestamp from a binary packet"""
+ length = packet[0]
+ value = None
+ if length == 4:
+ value = datetime.date(
+ year=struct.unpack('<H', packet[1:3])[0],
+ month=packet[3],
+ day=packet[4])
+ elif length >= 7:
+ mcs = 0
+ if length == 11:
+ mcs = struct.unpack('<I', packet[8:length + 1])[0]
+ value = datetime.datetime(
+ year=struct.unpack('<H', packet[1:3])[0],
+ month=packet[3],
+ day=packet[4],
+ hour=packet[5],
+ minute=packet[6],
+ second=packet[7],
+ microsecond=mcs)
+
+ return (packet[length + 1:], value)
+
+ def _parse_binary_time(self, packet, field):
+ """Parse a time value from a binary packet"""
+ length = packet[0]
+ data = packet[1:length + 1]
+ mcs = 0
+ if length > 8:
+ mcs = struct.unpack('<I', data[8:])[0]
+ days = struct.unpack('<I', data[1:5])[0]
+ if data[0] == 1:
+ days *= -1
+ tmp = datetime.timedelta(days=days,
+ seconds=data[7],
+ microseconds=mcs,
+ minutes=data[6],
+ hours=data[5])
+
+ return (packet[length + 1:], tmp)
+
+ def _parse_binary_values(self, fields, packet, charset='utf-8'):
+ """Parse values from a binary result packet"""
+ null_bitmap_length = (len(fields) + 7 + 2) // 8
+ null_bitmap = [int(i) for i in packet[0:null_bitmap_length]]
+ packet = packet[null_bitmap_length:]
+
+ values = []
+ for pos, field in enumerate(fields):
+ if null_bitmap[int((pos+2)/8)] & (1 << (pos + 2) % 8):
+ values.append(None)
+ continue
+ elif field[1] in (FieldType.TINY, FieldType.SHORT,
+ FieldType.INT24,
+ FieldType.LONG, FieldType.LONGLONG):
+ (packet, value) = self._parse_binary_integer(packet, field)
+ values.append(value)
+ elif field[1] in (FieldType.DOUBLE, FieldType.FLOAT):
+ (packet, value) = self._parse_binary_float(packet, field)
+ values.append(value)
+ elif field[1] == FieldType.NEWDECIMAL:
+ (packet, value) = self._parse_binary_new_decimal(packet, charset)
+ values.append(value)
+ elif field[1] in (FieldType.DATETIME, FieldType.DATE,
+ FieldType.TIMESTAMP):
+ (packet, value) = self._parse_binary_timestamp(packet, field)
+ values.append(value)
+ elif field[1] == FieldType.TIME:
+ (packet, value) = self._parse_binary_time(packet, field)
+ values.append(value)
+ else:
+ (packet, value) = utils.read_lc_string(packet)
+ values.append(value.decode(charset))
+
+ return tuple(values)
+
+ def read_binary_result(self, sock, columns, count=1, charset='utf-8'):
+ """Read MySQL binary protocol result
+
+ Reads all or given number of binary resultset rows from the socket.
+ """
+ rows = []
+ eof = None
+ values = None
+ i = 0
+ while True:
+ if eof is not None:
+ break
+ if i == count:
+ break
+ packet = sock.recv()
+ if packet[4] == 254:
+ eof = self.parse_eof(packet)
+ values = None
+ elif packet[4] == 0:
+ eof = None
+ values = self._parse_binary_values(columns, packet[5:], charset)
+ if eof is None and values is not None:
+ rows.append(values)
+ elif eof is None and values is None:
+ raise get_exception(packet)
+ i += 1
+ return (rows, eof)
+
+ def parse_binary_prepare_ok(self, packet):
+ """Parse a MySQL Binary Protocol OK packet"""
+ if not packet[4] == 0:
+ raise errors.InterfaceError("Failed parsing Binary OK packet")
+
+ ok_pkt = {}
+ try:
+ (packet, ok_pkt['statement_id']) = utils.read_int(packet[5:], 4)
+ (packet, ok_pkt['num_columns']) = utils.read_int(packet, 2)
+ (packet, ok_pkt['num_params']) = utils.read_int(packet, 2)
+ packet = packet[1:] # Filler 1 * \x00
+ (packet, ok_pkt['warning_count']) = utils.read_int(packet, 2)
+ except ValueError:
+ raise errors.InterfaceError("Failed parsing Binary OK packet")
+
+ return ok_pkt
+
+ def _prepare_binary_integer(self, value):
+ """Prepare an integer for the MySQL binary protocol"""
+ field_type = None
+ flags = 0
+ if value < 0:
+ if value >= -128:
+ format_ = '<b'
+ field_type = FieldType.TINY
+ elif value >= -32768:
+ format_ = '<h'
+ field_type = FieldType.SHORT
+ elif value >= -2147483648:
+ format_ = '<i'
+ field_type = FieldType.LONG
+ else:
+ format_ = '<q'
+ field_type = FieldType.LONGLONG
+ else:
+ flags = 128
+ if value <= 255:
+ format_ = '<B'
+ field_type = FieldType.TINY
+ elif value <= 65535:
+ format_ = '<H'
+ field_type = FieldType.SHORT
+ elif value <= 4294967295:
+ format_ = '<I'
+ field_type = FieldType.LONG
+ else:
+ field_type = FieldType.LONGLONG
+ format_ = '<Q'
+ return (struct.pack(format_, value), field_type, flags)
+
+ def _prepare_binary_timestamp(self, value):
+ """Prepare a timestamp object for the MySQL binary protocol
+
+ This method prepares a timestamp of type datetime.datetime or
+ datetime.date for sending over the MySQL binary protocol.
+ A tuple is returned with the prepared value and field type
+ as elements.
+
+ Raises ValueError when the argument value is of invalid type.
+
+ Returns a tuple.
+ """
+ if isinstance(value, datetime.datetime):
+ field_type = FieldType.DATETIME
+ elif isinstance(value, datetime.date):
+ field_type = FieldType.DATE
+ else:
+ raise ValueError(
+ "Argument must a datetime.datetime or datetime.date")
+
+ packed = (utils.int2store(value.year) +
+ utils.int1store(value.month) +
+ utils.int1store(value.day))
+
+ if isinstance(value, datetime.datetime):
+ packed = (packed + utils.int1store(value.hour) +
+ utils.int1store(value.minute) +
+ utils.int1store(value.second))
+ if value.microsecond > 0:
+ packed += utils.int4store(value.microsecond)
+
+ packed = utils.int1store(len(packed)) + packed
+ return (packed, field_type)
+
+ def _prepare_binary_time(self, value):
+ """Prepare a time object for the MySQL binary protocol
+
+ This method prepares a time object of type datetime.timedelta or
+ datetime.time for sending over the MySQL binary protocol.
+ A tuple is returned with the prepared value and field type
+ as elements.
+
+ Raises ValueError when the argument value is of invalid type.
+
+ Returns a tuple.
+ """
+ if not isinstance(value, (datetime.timedelta, datetime.time)):
+ raise ValueError(
+ "Argument must a datetime.timedelta or datetime.time")
+
+ field_type = FieldType.TIME
+ negative = 0
+ mcs = None
+ packed = b''
+
+ if isinstance(value, datetime.timedelta):
+ if value.days < 0:
+ negative = 1
+ (hours, remainder) = divmod(value.seconds, 3600)
+ (mins, secs) = divmod(remainder, 60)
+ packed += (utils.int4store(abs(value.days)) +
+ utils.int1store(hours) +
+ utils.int1store(mins) +
+ utils.int1store(secs))
+ mcs = value.microseconds
+ else:
+ packed += (utils.int4store(0) +
+ utils.int1store(value.hour) +
+ utils.int1store(value.minute) +
+ utils.int1store(value.second))
+ mcs = value.microsecond
+ if mcs:
+ packed += utils.int4store(mcs)
+
+ packed = utils.int1store(negative) + packed
+ packed = utils.int1store(len(packed)) + packed
+
+ return (packed, field_type)
+
+ def _prepare_stmt_send_long_data(self, statement, param, data):
+ """Prepare long data for prepared statements
+
+ Returns a string.
+ """
+ packet = (
+ utils.int4store(statement) +
+ utils.int2store(param) +
+ data)
+ return packet
+
+ def make_stmt_execute(self, statement_id, data=(), parameters=(),
+ flags=0, long_data_used=None, charset='utf8',
+ query_attrs=None, converter_str_fallback=False):
+ """Make a MySQL packet with the Statement Execute command"""
+ iteration_count = 1
+ null_bitmap = [0] * ((len(data) + 7) // 8)
+ values = []
+ types = []
+ packed = b''
+ data_len = len(data)
+ query_attr_names = []
+ flags = flags if not query_attrs else flags + PARAMETER_COUNT_AVAILABLE
+ if charset == 'utf8mb4':
+ charset = 'utf8'
+ if long_data_used is None:
+ long_data_used = {}
+ if query_attrs:
+ data = list(data)
+ for _, attr_val in query_attrs:
+ data.append(attr_val)
+ null_bitmap = [0] * ((len(data) + 7) // 8)
+ if parameters or data:
+ if data_len != len(parameters):
+ raise errors.InterfaceError(
+ "Failed executing prepared statement: data values does not"
+ " match number of parameters")
+ for pos, _ in enumerate(data):
+ value = data[pos]
+ _flags = 0
+ if value is None:
+ null_bitmap[(pos // 8)] |= 1 << (pos % 8)
+ types.append(utils.int1store(FieldType.NULL) +
+ utils.int1store(_flags))
+ continue
+ elif pos in long_data_used:
+ if long_data_used[pos][0]:
+ # We suppose binary data
+ field_type = FieldType.BLOB
+ else:
+ # We suppose text data
+ field_type = FieldType.STRING
+ elif isinstance(value, int):
+ (packed, field_type,
+ _flags) = self._prepare_binary_integer(value)
+ values.append(packed)
+ elif isinstance(value, str):
+ value = value.encode(charset)
+ values.append(utils.lc_int(len(value)) + value)
+ field_type = FieldType.VARCHAR
+ elif isinstance(value, bytes):
+ values.append(utils.lc_int(len(value)) + value)
+ field_type = FieldType.BLOB
+ elif isinstance(value, Decimal):
+ values.append(
+ utils.lc_int(len(str(value).encode(
+ charset))) + str(value).encode(charset))
+ field_type = FieldType.DECIMAL
+ elif isinstance(value, float):
+ values.append(struct.pack('<d', value))
+ field_type = FieldType.DOUBLE
+ elif isinstance(value, (datetime.datetime, datetime.date)):
+ (packed, field_type) = self._prepare_binary_timestamp(
+ value)
+ values.append(packed)
+ elif isinstance(value, (datetime.timedelta, datetime.time)):
+ (packed, field_type) = self._prepare_binary_time(value)
+ values.append(packed)
+ elif converter_str_fallback:
+ value = str(value).encode(charset)
+ values.append(utils.lc_int(len(value)) + value)
+ field_type = FieldType.STRING
+ else:
+ raise errors.ProgrammingError(
+ "MySQL binary protocol can not handle "
+ "'{classname}' objects".format(
+ classname=value.__class__.__name__))
+ types.append(utils.int1store(field_type) +
+ utils.int1store(_flags))
+ if query_attrs and pos+1 > data_len:
+ name = query_attrs[pos - data_len][0].encode(charset)
+ query_attr_names.append(
+ utils.lc_int(len(name)) + name)
+ packet = (
+ utils.int4store(statement_id) +
+ utils.int1store(flags) +
+ utils.int4store(iteration_count))
+
+ # if (num_params > 0 || (CLIENT_QUERY_ATTRIBUTES \
+ # && (flags & PARAMETER_COUNT_AVAILABLE)) {
+ if query_attrs is not None:
+ parameter_count = data_len + len(query_attrs)
+ else:
+ parameter_count = data_len
+ if parameter_count:
+ # if CLIENT_QUERY_ATTRIBUTES is on
+ if query_attrs is not None:
+ packet += utils.lc_int(parameter_count)
+
+ packet += (
+ b''.join([struct.pack('B', bit) for bit in null_bitmap]) +
+ utils.int1store(1))
+ count = 0
+ for a_type in types:
+ packet += a_type
+ # if CLIENT_QUERY_ATTRIBUTES is on {
+ # string<lenenc> parameter_name Name of the parameter
+ # or empty if not present
+ # } if CLIENT_QUERY_ATTRIBUTES is on
+ if query_attrs is not None:
+ if count+1 > data_len:
+ packet += query_attr_names[count - data_len]
+ else:
+ packet += b'\x00'
+ count+=1
+
+ for a_value in values:
+ packet += a_value
+
+ return packet
+
+ def parse_auth_switch_request(self, packet):
+ """Parse a MySQL AuthSwitchRequest-packet"""
+ if not packet[4] == 254:
+ raise errors.InterfaceError(
+ "Failed parsing AuthSwitchRequest packet")
+
+ (packet, plugin_name) = utils.read_string(packet[5:], end=b'\x00')
+ if packet and packet[-1] == 0:
+ packet = packet[:-1]
+
+ return plugin_name.decode('utf8'), packet
+
+ def parse_auth_more_data(self, packet):
+ """Parse a MySQL AuthMoreData-packet"""
+ if not packet[4] == 1:
+ raise errors.InterfaceError(
+ "Failed parsing AuthMoreData packet")
+
+ return packet[5:]
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/utils.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..fa3ae22474552ddd9562a8287cb4cb858942113b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/utils.py
@@ -0,0 +1,634 @@
+# Copyright (c) 2009, 2021, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Utilities
+"""
+
+import os
+import subprocess
+from stringprep import (in_table_a1, in_table_b1, in_table_c11, in_table_c12,
+ in_table_c21_c22, in_table_c3, in_table_c4, in_table_c5,
+ in_table_c6, in_table_c7, in_table_c8, in_table_c9,
+ in_table_c12, in_table_d1, in_table_d2)
+import platform
+import struct
+import sys
+import unicodedata
+
+from decimal import Decimal
+from functools import lru_cache
+
+from .custom_types import HexLiteral
+
+
+__MYSQL_DEBUG__ = False
+
+NUMERIC_TYPES = (int, float, Decimal, HexLiteral)
+
+
+def intread(buf):
+ """Unpacks the given buffer to an integer"""
+ try:
+ if isinstance(buf, int):
+ return buf
+ length = len(buf)
+ if length == 1:
+ return buf[0]
+ elif length <= 4:
+ tmp = buf + b'\x00'*(4-length)
+ return struct.unpack('<I', tmp)[0]
+ tmp = buf + b'\x00'*(8-length)
+ return struct.unpack('<Q', tmp)[0]
+ except:
+ raise
+
+
+def int1store(i):
+ """
+ Takes an unsigned byte (1 byte) and packs it as a bytes-object.
+
+ Returns string.
+ """
+ if i < 0 or i > 255:
+ raise ValueError('int1store requires 0 <= i <= 255')
+ else:
+ return bytearray(struct.pack('<B', i))
+
+
+def int2store(i):
+ """
+ Takes an unsigned short (2 bytes) and packs it as a bytes-object.
+
+ Returns string.
+ """
+ if i < 0 or i > 65535:
+ raise ValueError('int2store requires 0 <= i <= 65535')
+ else:
+ return bytearray(struct.pack('<H', i))
+
+
+def int3store(i):
+ """
+ Takes an unsigned integer (3 bytes) and packs it as a bytes-object.
+
+ Returns string.
+ """
+ if i < 0 or i > 16777215:
+ raise ValueError('int3store requires 0 <= i <= 16777215')
+ else:
+ return bytearray(struct.pack('<I', i)[0:3])
+
+
+def int4store(i):
+ """
+ Takes an unsigned integer (4 bytes) and packs it as a bytes-object.
+
+ Returns string.
+ """
+ if i < 0 or i > 4294967295:
+ raise ValueError('int4store requires 0 <= i <= 4294967295')
+ else:
+ return bytearray(struct.pack('<I', i))
+
+
+def int8store(i):
+ """
+ Takes an unsigned integer (8 bytes) and packs it as string.
+
+ Returns string.
+ """
+ if i < 0 or i > 18446744073709551616:
+ raise ValueError('int8store requires 0 <= i <= 2^64')
+ else:
+ return bytearray(struct.pack('<Q', i))
+
+
+def intstore(i):
+ """
+ Takes an unsigned integers and packs it as a bytes-object.
+
+ This function uses int1store, int2store, int3store,
+ int4store or int8store depending on the integer value.
+
+ returns string.
+ """
+ if i < 0 or i > 18446744073709551616:
+ raise ValueError('intstore requires 0 <= i <= 2^64')
+
+ if i <= 255:
+ formed_string = int1store
+ elif i <= 65535:
+ formed_string = int2store
+ elif i <= 16777215:
+ formed_string = int3store
+ elif i <= 4294967295:
+ formed_string = int4store
+ else:
+ formed_string = int8store
+
+ return formed_string(i)
+
+
+def lc_int(i):
+ """
+ Takes an unsigned integer and packs it as bytes,
+ with the information of how much bytes the encoded int takes.
+ """
+ if i < 0 or i > 18446744073709551616:
+ raise ValueError('Requires 0 <= i <= 2^64')
+
+ if i < 251:
+ return bytearray(struct.pack('<B', i))
+ elif i <= 65535:
+ return b'\xfc' + bytearray(struct.pack('<H', i))
+ elif i <= 16777215:
+ return b'\xfd' + bytearray(struct.pack('<I', i)[0:3])
+
+ return b'\xfe' + bytearray(struct.pack('<Q', i))
+
+
+def read_bytes(buf, size):
+ """
+ Reads bytes from a buffer.
+
+ Returns a tuple with buffer less the read bytes, and the bytes.
+ """
+ res = buf[0:size]
+ return (buf[size:], res)
+
+
+def read_lc_string(buf):
+ """
+ Takes a buffer and reads a length coded string from the start.
+
+ This is how Length coded strings work
+
+ If the string is 250 bytes long or smaller, then it looks like this:
+
+ <-- 1b -->
+ +----------+-------------------------
+ | length | a string goes here
+ +----------+-------------------------
+
+ If the string is bigger than 250, then it looks like this:
+
+ <- 1b -><- 2/3/8 ->
+ +------+-----------+-------------------------
+ | type | length | a string goes here
+ +------+-----------+-------------------------
+
+ if type == \xfc:
+ length is code in next 2 bytes
+ elif type == \xfd:
+ length is code in next 3 bytes
+ elif type == \xfe:
+ length is code in next 8 bytes
+
+ NULL has a special value. If the buffer starts with \xfb then
+ it's a NULL and we return None as value.
+
+ Returns a tuple (trucated buffer, bytes).
+ """
+ if buf[0] == 251: # \xfb
+ # NULL value
+ return (buf[1:], None)
+
+ length = lsize = 0
+ fst = buf[0]
+
+ if fst <= 250: # \xFA
+ length = fst
+ return (buf[1 + length:], buf[1:length + 1])
+ elif fst == 252:
+ lsize = 2
+ elif fst == 253:
+ lsize = 3
+ if fst == 254:
+ lsize = 8
+
+ length = intread(buf[1:lsize + 1])
+ return (buf[lsize + length + 1:], buf[lsize + 1:length + lsize + 1])
+
+
+def read_lc_string_list(buf):
+ """Reads all length encoded strings from the given buffer
+
+ Returns a list of bytes
+ """
+ byteslst = []
+
+ sizes = {252: 2, 253: 3, 254: 8}
+
+ buf_len = len(buf)
+ pos = 0
+
+ while pos < buf_len:
+ first = buf[pos]
+ if first == 255:
+ # Special case when MySQL error 1317 is returned by MySQL.
+ # We simply return None.
+ return None
+ if first == 251:
+ # NULL value
+ byteslst.append(None)
+ pos += 1
+ else:
+ if first <= 250:
+ length = first
+ byteslst.append(buf[(pos + 1):length + (pos + 1)])
+ pos += 1 + length
+ else:
+ lsize = 0
+ try:
+ lsize = sizes[first]
+ except KeyError:
+ return None
+ length = intread(buf[(pos + 1):lsize + (pos + 1)])
+ byteslst.append(
+ buf[pos + 1 + lsize:length + lsize + (pos + 1)])
+ pos += 1 + lsize + length
+
+ return tuple(byteslst)
+
+
+def read_string(buf, end=None, size=None):
+ """
+ Reads a string up until a character or for a given size.
+
+ Returns a tuple (trucated buffer, string).
+ """
+ if end is None and size is None:
+ raise ValueError('read_string() needs either end or size')
+
+ if end is not None:
+ try:
+ idx = buf.index(end)
+ except ValueError:
+ raise ValueError("end byte not present in buffer")
+ return (buf[idx + 1:], buf[0:idx])
+ elif size is not None:
+ return read_bytes(buf, size)
+
+ raise ValueError('read_string() needs either end or size (weird)')
+
+
+def read_int(buf, size):
+ """Read an integer from buffer
+
+ Returns a tuple (truncated buffer, int)
+ """
+
+ try:
+ res = intread(buf[0:size])
+ except:
+ raise
+
+ return (buf[size:], res)
+
+
+def read_lc_int(buf):
+ """
+ Takes a buffer and reads an length code string from the start.
+
+ Returns a tuple with buffer less the integer and the integer read.
+ """
+ if not buf:
+ raise ValueError("Empty buffer.")
+
+ lcbyte = buf[0]
+ if lcbyte == 251:
+ return (buf[1:], None)
+ elif lcbyte < 251:
+ return (buf[1:], int(lcbyte))
+ elif lcbyte == 252:
+ return (buf[3:], struct.unpack('<xH', buf[0:3])[0])
+ elif lcbyte == 253:
+ return (buf[4:], struct.unpack('<I', buf[1:4] + b'\x00')[0])
+ elif lcbyte == 254:
+ return (buf[9:], struct.unpack('<xQ', buf[0:9])[0])
+ else:
+ raise ValueError("Failed reading length encoded integer")
+
+
+#
+# For debugging
+#
+def _digest_buffer(buf):
+ """Debug function for showing buffers"""
+ if not isinstance(buf, str):
+ return ''.join(["\\x%02x" % c for c in buf])
+ return ''.join(["\\x%02x" % ord(c) for c in buf])
+
+
+def print_buffer(abuffer, prefix=None, limit=30):
+ """Debug function printing output of _digest_buffer()"""
+ if prefix:
+ if limit and limit > 0:
+ digest = _digest_buffer(abuffer[0:limit])
+ else:
+ digest = _digest_buffer(abuffer)
+ print(prefix + ': ' + digest)
+ else:
+ print(_digest_buffer(abuffer))
+
+
+def _parse_os_release():
+ """Parse the contents of /etc/os-release file.
+
+ Returns:
+ A dictionary containing release information.
+ """
+ distro = {}
+ os_release_file = os.path.join("/etc", "os-release")
+ if not os.path.exists(os_release_file):
+ return distro
+ with open(os_release_file) as file_obj:
+ for line in file_obj:
+ key_value = line.split("=")
+ if len(key_value) != 2:
+ continue
+ key = key_value[0].lower()
+ value = key_value[1].rstrip("\n").strip('"')
+ distro[key] = value
+ return distro
+
+
+def _parse_lsb_release():
+ """Parse the contents of /etc/lsb-release file.
+
+ Returns:
+ A dictionary containing release information.
+ """
+ distro = {}
+ lsb_release_file = os.path.join("/etc", "lsb-release")
+ if os.path.exists(lsb_release_file):
+ with open(lsb_release_file) as file_obj:
+ for line in file_obj:
+ key_value = line.split("=")
+ if len(key_value) != 2:
+ continue
+ key = key_value[0].lower()
+ value = key_value[1].rstrip("\n").strip('"')
+ distro[key] = value
+ return distro
+
+
+def _parse_lsb_release_command():
+ """Parse the output of the lsb_release command.
+
+ Returns:
+ A dictionary containing release information.
+ """
+ distro = {}
+ with open(os.devnull, "w") as devnull:
+ try:
+ stdout = subprocess.check_output(
+ ("lsb_release", "-a"), stderr=devnull)
+ except OSError:
+ return None
+ lines = stdout.decode(sys.getfilesystemencoding()).splitlines()
+ for line in lines:
+ key_value = line.split(":")
+ if len(key_value) != 2:
+ continue
+ key = key_value[0].replace(" ", "_").lower()
+ value = key_value[1].strip("\t")
+ distro[key] = value
+ return distro
+
+
+def linux_distribution():
+ """Tries to determine the name of the Linux OS distribution name.
+
+ First tries to get information from ``/etc/os-release`` file.
+ If fails, tries to get the information of ``/etc/lsb-release`` file.
+ And finally the information of ``lsb-release`` command.
+
+ Returns:
+ A tuple with (`name`, `version`, `codename`)
+ """
+ distro = _parse_lsb_release()
+ if distro:
+ return (distro.get("distrib_id", ""),
+ distro.get("distrib_release", ""),
+ distro.get("distrib_codename", ""))
+
+ distro = _parse_lsb_release_command()
+ if distro:
+ return (distro.get("distributor_id", ""),
+ distro.get("release", ""),
+ distro.get("codename", ""))
+
+ distro = _parse_os_release()
+ if distro:
+ return (distro.get("name", ""),
+ distro.get("version_id", ""),
+ distro.get("version_codename", ""))
+
+ return ("", "", "")
+
+
+def _get_unicode_read_direction(unicode_str):
+ """Get the readiness direction of the unicode string.
+
+ We assume that the direction is "L-to-R" if the first character does not
+ indicate the direction is "R-to-L" or an "AL" (Arabic Letter).
+ """
+ if unicode_str and unicodedata.bidirectional(unicode_str[0]) in ("R", "AL"):
+ return "R-to-L"
+ return "L-to-R"
+
+
+def _get_unicode_direction_rule(unicode_str):
+ """
+ 1) The characters in section 5.8 MUST be prohibited.
+
+ 2) If a string contains any RandALCat character, the string MUST NOT
+ contain any LCat character.
+
+ 3) If a string contains any RandALCat character, a RandALCat
+ character MUST be the first character of the string, and a
+ RandALCat character MUST be the last character of the string.
+ """
+ read_dir = _get_unicode_read_direction(unicode_str)
+
+ # point 3)
+ if read_dir == "R-to-L":
+ if not (in_table_d1(unicode_str[0]) and in_table_d1(unicode_str[-1])):
+ raise ValueError("Invalid unicode Bidirectional sequence, if the "
+ "first character is RandALCat, the final character"
+ "must be RandALCat too.")
+ # characters from in_table_d2 are prohibited.
+ return {"Bidirectional Characters requirement 2 [StringPrep, d2]":
+ in_table_d2}
+
+ # characters from in_table_d1 are prohibited.
+ return {"Bidirectional Characters requirement 2 [StringPrep, d2]":
+ in_table_d1}
+
+
+def validate_normalized_unicode_string(normalized_str):
+ """Check for Prohibited Output according to rfc4013 profile.
+
+ This profile specifies the following characters as prohibited input:
+
+ - Non-ASCII space characters [StringPrep, C.1.2]
+ - ASCII control characters [StringPrep, C.2.1]
+ - Non-ASCII control characters [StringPrep, C.2.2]
+ - Private Use characters [StringPrep, C.3]
+ - Non-character code points [StringPrep, C.4]
+ - Surrogate code points [StringPrep, C.5]
+ - Inappropriate for plain text characters [StringPrep, C.6]
+ - Inappropriate for canonical representation characters [StringPrep, C.7]
+ - Change display properties or deprecated characters [StringPrep, C.8]
+ - Tagging characters [StringPrep, C.9]
+
+ In addition of checking of Bidirectional Characters [StringPrep, Section 6]
+ and the Unassigned Code Points [StringPrep, A.1].
+
+ Returns:
+ A tuple with ("probited character", "breaked_rule")
+ """
+ rules = {
+ "Space characters that contains the ASCII code points": in_table_c11,
+ "Space characters non-ASCII code points": in_table_c12,
+ "Unassigned Code Points [StringPrep, A.1]": in_table_a1,
+ "Non-ASCII space characters [StringPrep, C.1.2]": in_table_c12,
+ "ASCII control characters [StringPrep, C.2.1]": in_table_c21_c22,
+ "Private Use characters [StringPrep, C.3]": in_table_c3,
+ "Non-character code points [StringPrep, C.4]": in_table_c4,
+ "Surrogate code points [StringPrep, C.5]": in_table_c5,
+ "Inappropriate for plain text characters [StringPrep, C.6]": in_table_c6,
+ "Inappropriate for canonical representation characters [StringPrep, C.7]": in_table_c7,
+ "Change display properties or deprecated characters [StringPrep, C.8]": in_table_c8,
+ "Tagging characters [StringPrep, C.9]": in_table_c9
+ }
+
+ try:
+ rules.update(_get_unicode_direction_rule(normalized_str))
+ except ValueError as err:
+ return normalized_str, str(err)
+
+ for char in normalized_str:
+ for rule in rules:
+ if rules[rule](char) and char != u' ':
+ return char, rule
+
+ return None
+
+
+def normalize_unicode_string(a_string):
+ """normalizes a unicode string according to rfc4013
+
+ Normalization of a unicode string according to rfc4013: The SASLprep profile
+ of the "stringprep" algorithm.
+
+ Normalization Unicode equivalence is the specification by the Unicode
+ character encoding standard that some sequences of code points represent
+ essentially the same character.
+
+ This method normalizes using the Normalization Form Compatibility
+ Composition (NFKC), as described in rfc4013 2.2.
+
+ Returns:
+ Normalized unicode string according to rfc4013.
+ """
+ # Per rfc4013 2.1. Mapping
+ # non-ASCII space characters [StringPrep, C.1.2] are mapped to ' ' (U+0020)
+ # "commonly mapped to nothing" characters [StringPrep, B.1] are mapped to ''
+ nstr_list = [
+ u' ' if in_table_c12(char) else u'' if in_table_b1(char) else char
+ for char in a_string]
+
+ nstr = u''.join(nstr_list)
+
+ # Per rfc4013 2.2. Use NFKC Normalization Form Compatibility Composition
+ # Characters are decomposed by compatibility, then recomposed by canonical
+ # equivalence.
+ nstr = unicodedata.normalize('NFKC', nstr)
+ if not nstr:
+ # Normilization results in empty string.
+ return u''
+
+ return nstr
+
+
+def make_abc(base_class):
+ """Decorator used to create a abstract base class.
+
+ We use this decorator to create abstract base classes instead of
+ using the abc-module. The decorator makes it possible to do the
+ same in both Python v2 and v3 code.
+ """
+ def wrapper(class_):
+ """Wrapper"""
+ attrs = class_.__dict__.copy()
+ for attr in '__dict__', '__weakref__':
+ attrs.pop(attr, None) # ignore missing attributes
+
+ bases = class_.__bases__
+ bases = (class_,) + bases
+ return base_class(class_.__name__, bases, attrs)
+ return wrapper
+
+
+def init_bytearray(payload=b'', encoding='utf-8'):
+ """Initialize a bytearray from the payload."""
+ if isinstance(payload, bytearray):
+ return payload
+ if isinstance(payload, int):
+ return bytearray(payload)
+ if not isinstance(payload, bytes):
+ try:
+ return bytearray(payload.encode(encoding=encoding))
+ except AttributeError:
+ raise ValueError("payload must be a str or bytes")
+
+ return bytearray(payload)
+
+
+@lru_cache()
+def get_platform():
+ """Return a dict with the platform arch and OS version."""
+ plat = {"arch": None, "version": None}
+ if os.name == "nt":
+ if "64" in platform.architecture()[0]:
+ plat["arch"] = "x86_64"
+ elif "32" in platform.architecture()[0]:
+ plat["arch"] = "i386"
+ else:
+ plat["arch"] = platform.architecture()
+ plat["version"] = "Windows-{}".format(platform.win32_ver()[1])
+ else:
+ plat["arch"] = platform.machine()
+ if platform.system() == "Darwin":
+ plat["version"] = "{}-{}".format("macOS", platform.mac_ver()[0])
+ else:
+ plat["version"] = "-".join(linux_distribution()[0:2])
+
+ return plat
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/connector/version.py b/monEnvTP/lib/python3.8/site-packages/mysql/connector/version.py
new file mode 100644
index 0000000000000000000000000000000000000000..95e518ac5a185791e754e7ed5d3c44445bd0c1e6
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql/connector/version.py
@@ -0,0 +1,44 @@
+# Copyright (c) 2012, 2021, Oracle and/or its affiliates. All rights reserved.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""MySQL Connector/Python version information
+
+The file version.py gets installed and is available after installation
+as mysql.connector.version.
+"""
+
+VERSION = (8, 0, 28, '', 1)
+
+if VERSION[3] and VERSION[4]:
+ VERSION_TEXT = '{0}.{1}.{2}{3}{4}'.format(*VERSION)
+else:
+ VERSION_TEXT = '{0}.{1}.{2}'.format(*VERSION[0:3])
+
+VERSION_EXTRA = ''
+LICENSE = 'GPLv2 with FOSS License Exception'
+EDITION = '' # Added in package names, after the version
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/libcrypto.so.1.1 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/libcrypto.so.1.1
new file mode 100755
index 0000000000000000000000000000000000000000..4e4586c27b984f50f3c5aba503234c813289920f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/libcrypto.so.1.1 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/libssl.so.1.1 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/libssl.so.1.1
new file mode 100755
index 0000000000000000000000000000000000000000..7ddf353fe22bf3698efa281fd55294c0df057299
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/libssl.so.1.1 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/plugin/authentication_kerberos_client.so b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/plugin/authentication_kerberos_client.so
new file mode 100755
index 0000000000000000000000000000000000000000..840f866d3489208c87606c126f779784558b28bc
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/plugin/authentication_kerberos_client.so differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/plugin/authentication_ldap_sasl_client.so b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/plugin/authentication_ldap_sasl_client.so
new file mode 100755
index 0000000000000000000000000000000000000000..08d45ab330eb41dc57b2d1fb37ed1ea3113f7c03
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/plugin/authentication_ldap_sasl_client.so differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/plugin/authentication_oci_client.so b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/plugin/authentication_oci_client.so
new file mode 100755
index 0000000000000000000000000000000000000000..e623ae3c4b364bfcae2e972725aaa676d92c9d17
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/plugin/authentication_oci_client.so differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libcom_err.so.3 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libcom_err.so.3
new file mode 100755
index 0000000000000000000000000000000000000000..ce6b46ceacc2bbb9debda975243167c90edc174f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libcom_err.so.3 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libcom_err.so.3.0 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libcom_err.so.3.0
new file mode 100755
index 0000000000000000000000000000000000000000..ce6b46ceacc2bbb9debda975243167c90edc174f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libcom_err.so.3.0 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libcrypto.so.1.1 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libcrypto.so.1.1
new file mode 100755
index 0000000000000000000000000000000000000000..4e4586c27b984f50f3c5aba503234c813289920f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libcrypto.so.1.1 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libgssapi_krb5.so.2 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libgssapi_krb5.so.2
new file mode 100755
index 0000000000000000000000000000000000000000..f2ea6b91deddd6f5adf0bd13a9d653e6e6b8ee53
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libgssapi_krb5.so.2 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libgssapi_krb5.so.2.2 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libgssapi_krb5.so.2.2
new file mode 100755
index 0000000000000000000000000000000000000000..f2ea6b91deddd6f5adf0bd13a9d653e6e6b8ee53
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libgssapi_krb5.so.2.2 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libk5crypto.so.3 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libk5crypto.so.3
new file mode 100755
index 0000000000000000000000000000000000000000..6d04211ca938127d55e512896e7a3e7d108388e3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libk5crypto.so.3 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libk5crypto.so.3.1 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libk5crypto.so.3.1
new file mode 100755
index 0000000000000000000000000000000000000000..6d04211ca938127d55e512896e7a3e7d108388e3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libk5crypto.so.3.1 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libkrb5.so.3 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libkrb5.so.3
new file mode 100755
index 0000000000000000000000000000000000000000..d5977f92686f2eab79cdbb8fe500e9f02057b643
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libkrb5.so.3 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libkrb5.so.3.3 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libkrb5.so.3.3
new file mode 100755
index 0000000000000000000000000000000000000000..d5977f92686f2eab79cdbb8fe500e9f02057b643
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libkrb5.so.3.3 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libkrb5support.so.0 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libkrb5support.so.0
new file mode 100755
index 0000000000000000000000000000000000000000..17f7ec893f0f3a3a7c9ec8f990aeea24d33b90ab
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libkrb5support.so.0 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libkrb5support.so.0.1 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libkrb5support.so.0.1
new file mode 100755
index 0000000000000000000000000000000000000000..17f7ec893f0f3a3a7c9ec8f990aeea24d33b90ab
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libkrb5support.so.0.1 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libsasl2.so.3 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libsasl2.so.3
new file mode 100755
index 0000000000000000000000000000000000000000..4c015c5034afc20ddbf5229f4b47e2032246b682
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libsasl2.so.3 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libsasl2.so.3.0.0 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libsasl2.so.3.0.0
new file mode 100755
index 0000000000000000000000000000000000000000..4c015c5034afc20ddbf5229f4b47e2032246b682
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libsasl2.so.3.0.0 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libssl.so.1.1 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libssl.so.1.1
new file mode 100755
index 0000000000000000000000000000000000000000..7ddf353fe22bf3698efa281fd55294c0df057299
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/libssl.so.1.1 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libanonymous.so b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libanonymous.so
new file mode 100755
index 0000000000000000000000000000000000000000..3297921b3cf8d2e3e3b2893fe2978e2e45f061bd
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libanonymous.so differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libanonymous.so.3 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libanonymous.so.3
new file mode 100755
index 0000000000000000000000000000000000000000..3297921b3cf8d2e3e3b2893fe2978e2e45f061bd
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libanonymous.so.3 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libanonymous.so.3.0.0 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libanonymous.so.3.0.0
new file mode 100755
index 0000000000000000000000000000000000000000..3297921b3cf8d2e3e3b2893fe2978e2e45f061bd
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libanonymous.so.3.0.0 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libcrammd5.so b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libcrammd5.so
new file mode 100755
index 0000000000000000000000000000000000000000..a53f7726c84ec5ff6677ab969ee7aae3a725c3c2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libcrammd5.so differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libcrammd5.so.3 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libcrammd5.so.3
new file mode 100755
index 0000000000000000000000000000000000000000..a53f7726c84ec5ff6677ab969ee7aae3a725c3c2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libcrammd5.so.3 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libcrammd5.so.3.0.0 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libcrammd5.so.3.0.0
new file mode 100755
index 0000000000000000000000000000000000000000..a53f7726c84ec5ff6677ab969ee7aae3a725c3c2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libcrammd5.so.3.0.0 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libdigestmd5.so b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libdigestmd5.so
new file mode 100755
index 0000000000000000000000000000000000000000..a16278756b0158c16aaedb0416b86061ed631b83
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libdigestmd5.so differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libdigestmd5.so.3 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libdigestmd5.so.3
new file mode 100755
index 0000000000000000000000000000000000000000..a16278756b0158c16aaedb0416b86061ed631b83
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libdigestmd5.so.3 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libdigestmd5.so.3.0.0 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libdigestmd5.so.3.0.0
new file mode 100755
index 0000000000000000000000000000000000000000..a16278756b0158c16aaedb0416b86061ed631b83
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libdigestmd5.so.3.0.0 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libgs2.so b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libgs2.so
new file mode 100755
index 0000000000000000000000000000000000000000..b6bef09cb659c5d8b75d8f455efa4de0cabb0743
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libgs2.so differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libgs2.so.3 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libgs2.so.3
new file mode 100755
index 0000000000000000000000000000000000000000..b6bef09cb659c5d8b75d8f455efa4de0cabb0743
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libgs2.so.3 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libgs2.so.3.0.0 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libgs2.so.3.0.0
new file mode 100755
index 0000000000000000000000000000000000000000..b6bef09cb659c5d8b75d8f455efa4de0cabb0743
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libgs2.so.3.0.0 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libgssapiv2.so b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libgssapiv2.so
new file mode 100755
index 0000000000000000000000000000000000000000..4edf4ea8146985e931a55c12897257d00ba79945
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libgssapiv2.so differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libgssapiv2.so.3 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libgssapiv2.so.3
new file mode 100755
index 0000000000000000000000000000000000000000..4edf4ea8146985e931a55c12897257d00ba79945
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libgssapiv2.so.3 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libgssapiv2.so.3.0.0 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libgssapiv2.so.3.0.0
new file mode 100755
index 0000000000000000000000000000000000000000..4edf4ea8146985e931a55c12897257d00ba79945
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libgssapiv2.so.3.0.0 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libplain.so b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libplain.so
new file mode 100755
index 0000000000000000000000000000000000000000..2097b802b25bd834d7004b4daa8c9a2959eaba39
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libplain.so differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libplain.so.3 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libplain.so.3
new file mode 100755
index 0000000000000000000000000000000000000000..2097b802b25bd834d7004b4daa8c9a2959eaba39
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libplain.so.3 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libplain.so.3.0.0 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libplain.so.3.0.0
new file mode 100755
index 0000000000000000000000000000000000000000..2097b802b25bd834d7004b4daa8c9a2959eaba39
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libplain.so.3.0.0 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libscram.so b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libscram.so
new file mode 100755
index 0000000000000000000000000000000000000000..a51c376937586bb86ec3fe777bf2a3d49da5c0cd
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libscram.so differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libscram.so.3 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libscram.so.3
new file mode 100755
index 0000000000000000000000000000000000000000..a51c376937586bb86ec3fe777bf2a3d49da5c0cd
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libscram.so.3 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libscram.so.3.0.0 b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libscram.so.3.0.0
new file mode 100755
index 0000000000000000000000000000000000000000..a51c376937586bb86ec3fe777bf2a3d49da5c0cd
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysql/vendor/private/sasl2/libscram.so.3.0.0 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql_connector_python-8.0.28.dist-info/INSTALLER b/monEnvTP/lib/python3.8/site-packages/mysql_connector_python-8.0.28.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql_connector_python-8.0.28.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql_connector_python-8.0.28.dist-info/LICENSE.txt b/monEnvTP/lib/python3.8/site-packages/mysql_connector_python-8.0.28.dist-info/LICENSE.txt
new file mode 100644
index 0000000000000000000000000000000000000000..e9f9c3049968dc64ecc16d41554c8708ddc188be
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql_connector_python-8.0.28.dist-info/LICENSE.txt
@@ -0,0 +1,2945 @@
+Licensing Information User Manual
+
+MySQL Connector/Python 8.0
+ __________________________________________________________________
+
+Introduction
+
+ This License Information User Manual contains Oracle's product license
+ and other licensing information, including licensing information for
+ third-party software which may be included in this distribution of
+ MySQL Connector/Python 8.0.
+
+ Last updated: October 2021
+
+Licensing Information
+
+ This is a release of MySQL Connector/Python 8.0, brought to you by the
+ MySQL team at Oracle. This software is released under version 2 of the
+ GNU General Public License (GPLv2), as set forth below, with the
+ following additional permissions:
+
+ This distribution of MySQL Connector/Python 8.0 is distributed with
+ certain software (including but not limited to OpenSSL) that is
+ licensed under separate terms, as designated in a particular file or
+ component or in the license documentation. Without limiting your rights
+ under the GPLv2, the authors of MySQL hereby grant you an additional
+ permission to link the program and your derivative works with the
+ separately licensed software that they have included with the program.
+
+ Without limiting the foregoing grant of rights under the GPLv2 and
+ additional permission as to separately licensed software, this
+ Connector is also subject to the Universal FOSS Exception, version 1.0,
+ a copy of which is reproduced below and can also be found along with
+ its FAQ at http://oss.oracle.com/licenses/universal-foss-exception.
+
+ Copyright (c) 2012, 2021, Oracle and/or its affiliates.
+
+Election of GPLv2
+
+ For the avoidance of doubt, except that if any license choice other
+ than GPL or LGPL is available it will apply instead, Oracle elects to
+ use only the General Public License version 2 (GPLv2) at this time for
+ any software where a choice of GPL license versions is made available
+ with the language indicating that GPLv2 or any later version may be
+ used, or where a choice of which version of the GPL is applied is
+ otherwise unspecified.
+
+GNU General Public License Version 2.0, June 1991
+
+The following applies to all products licensed under the GNU General
+Public License, Version 2.0: You may not use the identified files
+except in compliance with the GNU General Public License, Version
+2.0 (the "License.") You may obtain a copy of the License at
+http://www.gnu.org/licenses/gpl-2.0.txt. A copy of the license is
+also reproduced below. Unless required by applicable law or agreed
+to in writing, software distributed under the License is distributed
+on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+either express or implied. See the License for the specific language
+governing permissions and limitations under the License.
+
+
+ ======================================================================
+ ======================================================================
+
+
+GNU GENERAL PUBLIC LICENSE
+Version 2, June 1991
+
+Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+Everyone is permitted to copy and distribute verbatim
+copies of this license document, but changing it is not
+allowed.
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users. This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it. (Some other Free Software Foundation software is covered by
+the GNU Lesser General Public License instead.) You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+ To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have. You must make sure that they, too, receive or can get the
+source code. And you must show them these terms so they know their
+rights.
+
+ We protect your rights with two steps: (1) copyright the software,
+and (2) offer you this license which gives you legal permission to
+copy, distribute and/or modify the software.
+
+ Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software. If the software is modified by someone else and passed on,
+we want its recipients to know that what they have is not the original,
+so that any problems introduced by others will not reflect on the
+original authors' reputations.
+
+ Finally, any free program is threatened constantly by software
+patents. We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary. To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ GNU GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License. The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language. (Hereinafter, translation is included without limitation in
+the term "modification".) Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+ 1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+ 2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) You must cause the modified files to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ b) You must cause any work that you distribute or publish, that in
+ whole or in part contains or is derived from the Program or any
+ part thereof, to be licensed as a whole at no charge to all third
+ parties under the terms of this License.
+
+ c) If the modified program normally reads commands interactively
+ when run, you must cause it, when started running for such
+ interactive use in the most ordinary way, to print or display an
+ announcement including an appropriate copyright notice and a
+ notice that there is no warranty (or else, saying that you provide
+ a warranty) and that users may redistribute the program under
+ these conditions, and telling the user how to view a copy of this
+ License. (Exception: if the Program itself is interactive but
+ does not normally print such an announcement, your work based on
+ the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+ a) Accompany it with the complete corresponding machine-readable
+ source code, which must be distributed under the terms of Sections
+ 1 and 2 above on a medium customarily used for software
+ interchange; or,
+
+ b) Accompany it with a written offer, valid for at least three
+ years, to give any third party, for a charge no more than your
+ cost of physically performing source distribution, a complete
+ machine-readable copy of the corresponding source code, to be
+ distributed under the terms of Sections 1 and 2 above on a medium
+ customarily used for software interchange; or,
+
+ c) Accompany it with the information you received as to the offer
+ to distribute corresponding source code. (This alternative is
+ allowed only for noncommercial distribution and only if you
+ received the program in object code or executable form with such
+ an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it. For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable. However, as
+a special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License. Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+ 5. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Program or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+ 6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+ 7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all. For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded. In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+ 9. The Free Software Foundation may publish revised and/or new
+versions of the General Public License from time to time. Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Program
+specifies a version number of this License which applies to it and
+"any later version", you have the option of following the terms and
+conditions either of that version or of any later version published by
+the Free Software Foundation. If the Program does not specify a
+version number of this License, you may choose any version ever
+published by the Free Software Foundation.
+
+ 10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the
+author to ask for permission. For software which is copyrighted by the
+Free Software Foundation, write to the Free Software Foundation; we
+sometimes make exceptions for this. Our decision will be guided by the
+two goals of preserving the free status of all derivatives of our free
+software and of promoting the sharing and reuse of software generally.
+
+ NO WARRANTY
+
+ 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO
+WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
+EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
+OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND,
+EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS
+WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
+WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
+AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU
+FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
+CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
+PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
+RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
+FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF
+SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software; you can redistribute it and/or
+ modify it under the terms of the GNU General Public License as
+ published by the Free Software Foundation; either version 2 of
+
+ the License, or (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+ 02110-1301 USA.
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+ Gnomovision version 69, Copyright (C) year name of author
+ Gnomovision comes with ABSOLUTELY NO WARRANTY; for details
+ type 'show w'. This is free software, and you are welcome
+ to redistribute it under certain conditions; type 'show c'
+ for details.
+
+The hypothetical commands 'show w' and 'show c' should show the
+appropriate parts of the General Public License. Of course, the
+commands you use may be called something other than 'show w' and
+'show c'; they could even be mouse-clicks or menu items--whatever
+suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the
+ program 'Gnomovision' (which makes passes at compilers) written
+ by James Hacker.
+
+ <signature of Ty Coon>, 1 April 1989
+ Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library,
+you may consider it more useful to permit linking proprietary
+applications with the library. If this is what you want to do, use
+the GNU Lesser General Public License instead of this License.
+
+ ======================================================================
+ ======================================================================
+
+The Universal FOSS Exception, Version 1.0
+
+ In addition to the rights set forth in the other license(s) included in
+ the distribution for this software, data, and/or documentation
+ (collectively the "Software", and such licenses collectively with this
+ additional permission the "Software License"), the copyright holders
+ wish to facilitate interoperability with other software, data, and/or
+ documentation distributed with complete corresponding source under a
+ license that is OSI-approved and/or categorized by the FSF as free
+ (collectively "Other FOSS"). We therefore hereby grant the following
+ additional permission with respect to the use and distribution of the
+ Software with Other FOSS, and the constants, function signatures, data
+ structures and other invocation methods used to run or interact with
+ each of them (as to each, such software's "Interfaces"):
+
+ i. The Software's Interfaces may, to the extent permitted by the
+ license of the Other FOSS, be copied into, used and distributed in
+ the Other FOSS in order to enable interoperability, without
+ requiring a change to the license of the Other FOSS other than as
+ to any Interfaces of the Software embedded therein. The Software's
+ Interfaces remain at all times under the Software License,
+ including without limitation as used in the Other FOSS (which upon
+ any such use also then contains a portion of the Software under the
+ Software License).
+
+ ii. The Other FOSS's Interfaces may, to the extent permitted by the
+ license of the Other FOSS, be copied into, used and distributed in
+ the Software in order to enable interoperability, without requiring
+ that such Interfaces be licensed under the terms of the Software
+ License or otherwise altering their original terms, if this does
+ not require any portion of the Software other than such Interfaces
+ to be licensed under the terms other than the Software License.
+
+ iii. If only Interfaces and no other code is copied between the
+ Software and the Other FOSS in either direction, the use and/or
+ distribution of the Software with the Other FOSS shall not be
+ deemed to require that the Other FOSS be licensed under the license
+ of the Software, other than as to any Interfaces of the Software
+ copied into the Other FOSS. This includes, by way of example and
+ without limitation, statically or dynamically linking the Software
+ together with Other FOSS after enabling interoperability using the
+ Interfaces of one or both, and distributing the resulting
+ combination under different licenses for the respective portions
+ thereof. For avoidance of doubt, a license which is OSI-approved or
+ categorized by the FSF as free, includes, for the purpose of this
+ permission, such licenses with additional permissions, and any
+ license that has previously been so approved or categorized as
+ free, even if now deprecated or otherwise no longer recognized as
+ approved or free. Nothing in this additional permission grants any
+ right to distribute any portion of the Software on terms other than
+ those of the Software License or grants any additional permission
+ of any kind for use or distribution of the Software in conjunction
+ with software other than Other FOSS.
+
+ ======================================================================
+ ======================================================================
+
+Licenses for Third-Party Components
+
+ The following sections contain licensing information for libraries that
+ may be included with this product. We are thankful to all individuals
+ that have created these. Standard licenses referenced herein are
+ detailed in the Standard Licenses section.
+
+Cyrus SASL
+
+ Cyrus SASL
+
+* Copyright (c) 1994-2012 Carnegie Mellon University. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions
+* are met:
+*
+* 1. Redistributions of source code must retain the above copyright
+* notice, this list of conditions and the following disclaimer.
+*
+* 2. Redistributions in binary form must reproduce the above copyright
+* notice, this list of conditions and the following disclaimer in
+* the documentation and/or other materials provided with the
+* distribution.
+*
+* 3. The name "Carnegie Mellon University" must not be used to
+* endorse or promote products derived from this software without
+* prior written permission. For permission or any legal
+* details, please contact
+* Office of Technology Transfer
+* Carnegie Mellon University
+* 5000 Forbes Avenue
+* Pittsburgh, PA 15213-3890
+* (412) 268-4387, fax: (412) 268-7395
+* tech-transfer@andrew.cmu.edu
+*
+* 4. Redistributions of any form whatsoever must retain the following
+* acknowledgment:
+* "This product includes software developed by Computing Services
+* at Carnegie Mellon University (http://www.cmu.edu/computing/)."
+*
+* CARNEGIE MELLON UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO
+* THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+* AND FITNESS, IN NO EVENT SHALL CARNEGIE MELLON UNIVERSITY BE LIABLE
+* FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
+* AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING
+* OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+ ======================================================================
+ ======================================================================
+
+Django 2.12 and 3.0
+
+Copyright (c) Django Software Foundation and individual contributors.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification,
+are permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+ 3. Neither the name of Django nor the names of its contributors may be
+ used to endorse or promote products derived from this software without
+ specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+***************
+Django is licensed under the three-clause BSD license; see the file
+LICENSE for details.
+
+Django includes code from the Python standard library, which is licensed
+under the Python license, a permissive open source license. The copyright and
+license is included below for compliance with Python's terms.
+
+----------------------------------------------------------------------
+
+Copyright (c) 2001-present Python Software Foundation; All Rights Reserved
+
+A. HISTORY OF THE SOFTWARE
+==========================
+
+Python was created in the early 1990s by Guido van Rossum at Stichting
+Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
+as a successor of a language called ABC. Guido remains Python's
+principal author, although it includes many contributions from others.
+
+In 1995, Guido continued his work on Python at the Corporation for
+National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
+in Reston, Virginia where he released several versions of the
+software.
+
+In May 2000, Guido and the Python core development team moved to
+BeOpen.com to form the BeOpen PythonLabs team. In October of the same
+year, the PythonLabs team moved to Digital Creations (now Zope
+Corporation, see http://www.zope.com). In 2001, the Python Software
+Foundation (PSF, see http://www.python.org/psf/) was formed, a
+non-profit organization created specifically to own Python-related
+Intellectual Property. Zope Corporation is a sponsoring member of
+the PSF.
+
+All Python releases are Open Source (see http://www.opensource.org for
+the Open Source Definition). Historically, most, but not all, Python
+releases have also been GPL-compatible; the table below summarizes
+the various releases.
+
+ Release Derived Year Owner GPL-
+ from compatible? (1)
+
+ 0.9.0 thru 1.2 1991-1995 CWI yes
+ 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
+ 1.6 1.5.2 2000 CNRI no
+ 2.0 1.6 2000 BeOpen.com no
+ 1.6.1 1.6 2001 CNRI yes (2)
+ 2.1 2.0+1.6.1 2001 PSF no
+ 2.0.1 2.0+1.6.1 2001 PSF yes
+ 2.1.1 2.1+2.0.1 2001 PSF yes
+ 2.1.2 2.1.1 2002 PSF yes
+ 2.1.3 2.1.2 2002 PSF yes
+ 2.2 and above 2.1.1 2001-now PSF yes
+
+Footnotes:
+
+(1) GPL-compatible doesn't mean that we're distributing Python under
+ the GPL. All Python licenses, unlike the GPL, let you distribute
+ a modified version without making your changes open source. The
+ GPL-compatible licenses make it possible to combine Python with
+ other software that is released under the GPL; the others don't.
+
+(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
+ because its license has a choice of law clause. According to
+ CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
+ is "not incompatible" with the GPL.
+
+Thanks to the many outside volunteers who have worked under Guido's
+direction to make these releases possible.
+
+B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
+===============================================================
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
+2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018 Python Software Foundation;
+All Rights Reserved" are retained in Python alone or in any derivative version
+prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee. This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions. Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee. This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party. As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee. Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement. This Agreement together with
+Python 1.6.1 may be located on the Internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013. This
+Agreement may also be obtained from a proxy server on the Internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee. This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+ ACCEPT
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands. All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+ ======================================================================
+ ======================================================================
+
+DNSPython
+
+DNSPython License
+
+Copyright (C) Dnspython Contributors
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all
+copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL
+WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL
+DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR
+PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
+
+Copyright (C) 2001-2017 Nominum, Inc.
+Copyright (C) Google Inc.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose with or without fee is hereby granted,
+provided that the above copyright notice and this permission notice
+appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+ ======================================================================
+ ======================================================================
+
+Google Protocol Buffers
+
+Copyright 2008 Google Inc. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+Code generated by the Protocol Buffer compiler is owned by the owner
+of the input file used when generating it. This code is not
+standalone and requires a support library to be linked with it. This
+support library is itself covered by the above license.
+
+ ======================================================================
+ ======================================================================
+
+GSSAPI
+
+ GSSAPI
+
+Copyright (c) 2014, The Python GSSAPI Team
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL
+WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES
+OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE
+FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY
+DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
+IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING
+OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+ ======================================================================
+ ======================================================================
+
+Kerberos5
+
+Kerberos5
+
+Copyright (C) 1985-2019 by the Massachusetts Institute of Technology.
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+* Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+Downloading of this software may constitute an export of cryptographic
+software from the United States of America that is subject to the
+United States Export Administration Regulations (EAR), 15 CFR 730-774.
+Additional laws or regulations may apply. It is the responsibility of
+the person or entity contemplating export to comply with all
+applicable export laws and regulations, including obtaining any
+required license from the U.S. government.
+
+The U.S. government prohibits export of encryption source code to
+certain countries and individuals, including, but not limited to, the
+countries of Cuba, Iran, North Korea, Sudan, Syria, and residents and
+nationals of those countries.
+
+Documentation components of this software distribution are licensed
+under a Creative Commons Attribution-ShareAlike 3.0 Unported License.
+(http://creativecommons.org/licenses/by-sa/3.0/)
+
+Individual source code files are copyright MIT, Cygnus Support,
+Novell, OpenVision Technologies, Oracle, Red Hat, Sun Microsystems,
+FundsXpress, and others.
+
+Project Athena, Athena, Athena MUSE, Discuss, Hesiod, Kerberos, Moira,
+and Zephyr are trademarks of the Massachusetts Institute of Technology
+(MIT). No commercial use of these trademarks may be made without
+prior written permission of MIT.
+
+"Commercial use" means use of a name in a product or other for-profit
+manner. It does NOT prevent a commercial firm from referring to the
+MIT trademarks in order to convey information (although in doing so,
+recognition of their trademark status should be given).
+
+======================================================================
+
+The following copyright and permission notice applies to the
+OpenVision Kerberos Administration system located in "kadmin/create",
+"kadmin/dbutil", "kadmin/passwd", "kadmin/server", "lib/kadm5", and
+portions of "lib/rpc":
+
+ Copyright, OpenVision Technologies, Inc., 1993-1996, All Rights
+ Reserved
+
+ WARNING: Retrieving the OpenVision Kerberos Administration system
+ source code, as described below, indicates your acceptance of the
+ following terms. If you do not agree to the following terms, do
+ not retrieve the OpenVision Kerberos administration system.
+
+ You may freely use and distribute the Source Code and Object Code
+ compiled from it, with or without modification, but this Source
+ Code is provided to you "AS IS" EXCLUSIVE OF ANY WARRANTY,
+ INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY OR
+ FITNESS FOR A PARTICULAR PURPOSE, OR ANY OTHER WARRANTY, WHETHER
+ EXPRESS OR IMPLIED. IN NO EVENT WILL OPENVISION HAVE ANY LIABILITY
+ FOR ANY LOST PROFITS, LOSS OF DATA OR COSTS OF PROCUREMENT OF
+ SUBSTITUTE GOODS OR SERVICES, OR FOR ANY SPECIAL, INDIRECT, OR
+ CONSEQUENTIAL DAMAGES ARISING OUT OF THIS AGREEMENT, INCLUDING,
+ WITHOUT LIMITATION, THOSE RESULTING FROM THE USE OF THE SOURCE
+ CODE, OR THE FAILURE OF THE SOURCE CODE TO PERFORM, OR FOR ANY
+ OTHER REASON.
+
+ OpenVision retains all copyrights in the donated Source Code.
+ OpenVision also retains copyright to derivative works of the Source
+ Code, whether created by OpenVision or by a third party. The
+ OpenVision copyright notice must be preserved if derivative works
+ are made based on the donated Source Code.
+
+ OpenVision Technologies, Inc. has donated this Kerberos
+ Administration system to MIT for inclusion in the standard Kerberos
+ 5 distribution. This donation underscores our commitment to
+ continuing Kerberos technology development and our gratitude for
+ the valuable work which has been performed by MIT and the Kerberos
+ community.
+
+======================================================================
+
+ Portions contributed by Matt Crawford "crawdad@fnal.gov" were work
+performed at Fermi National Accelerator Laboratory, which is
+ operated by Universities Research Association, Inc., under contract
+ DE-AC02-76CHO3000 with the U.S. Department of Energy.
+
+======================================================================
+
+Portions of "src/lib/crypto" have the following copyright:
+
+ Copyright (C) 1998 by the FundsXpress, INC.
+
+ All rights reserved.
+
+ Export of this software from the United States of America may
+ require a specific license from the United States Government.
+ It is the responsibility of any person or organization
+ contemplating export to obtain such a license before exporting.
+
+ WITHIN THAT CONSTRAINT, permission to use, copy, modify, and
+ distribute this software and its documentation for any purpose and
+ without fee is hereby granted, provided that the above copyright
+ notice appear in all copies and that both that copyright notice and
+ this permission notice appear in supporting documentation, and that
+ the name of FundsXpress. not be used in advertising or publicity
+ pertaining to distribution of the software without specific,
+ written prior permission. FundsXpress makes no representations
+ about the suitability of this software for any purpose. It is
+ provided "as is" without express or implied warranty.
+
+ THIS SOFTWARE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR
+ IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED
+ WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+
+======================================================================
+
+The implementation of the AES encryption algorithm in
+"src/lib/crypto/builtin/aes" has the following copyright:
+
+ Copyright (C) 2001, Dr Brian Gladman "brg@gladman.uk.net", Worcester, UK.
+ All rights reserved.
+
+ LICENSE TERMS
+
+ The free distribution and use of this software in both source and
+ binary form is allowed (with or without changes) provided that:
+
+ 1. distributions of this source code include the above copyright
+ notice, this list of conditions and the following disclaimer;
+
+ 2. distributions in binary form include the above copyright notice,
+ this list of conditions and the following disclaimer in the
+ documentation and/or other associated materials;
+
+ 3. the copyright holder's name is not used to endorse products
+ built using this software without specific written permission.
+
+ DISCLAIMER
+
+ This software is provided 'as is' with no explcit or implied
+ warranties in respect of any properties, including, but not limited
+ to, correctness and fitness for purpose.
+
+======================================================================
+
+Portions contributed by Red Hat, including the pre-authentication
+plug-in framework and the NSS crypto implementation, contain the
+following copyright:
+
+ Copyright (C) 2006 Red Hat, Inc.
+ Portions copyright (C) 2006 Massachusetts Institute of Technology
+ All Rights Reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+
+ * Neither the name of Red Hat, Inc., nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+ OF THE POSSIBILITY OF SUCH DAMAGE.
+
+======================================================================
+
+The bundled verto source code is subject to the following license:
+
+ Copyright 2011 Red Hat, Inc.
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation
+ files (the "Software"), to deal in the Software without
+ restriction, including without limitation the rights to use, copy,
+ modify, merge, publish, distribute, sublicense, and/or sell copies
+ of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ DEALINGS IN THE SOFTWARE.
+
+======================================================================
+
+The MS-KKDCP client implementation has the following copyright:
+
+ Copyright 2013,2014 Red Hat, Inc.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ 1. Redistributions of source code must retain the above
+ copyright notice, this list of conditions and the following
+ disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials
+ provided with the distribution.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+ OF THE POSSIBILITY OF SUCH DAMAGE.
+
+======================================================================
+
+The implementations of GSSAPI mechglue in GSSAPI-SPNEGO in
+"src/lib/gssapi", including the following files:
+
+ lib/gssapi/generic/gssapi_err_generic.et
+ lib/gssapi/mechglue/g_accept_sec_context.c
+ lib/gssapi/mechglue/g_acquire_cred.c
+ lib/gssapi/mechglue/g_canon_name.c
+ lib/gssapi/mechglue/g_compare_name.c
+ lib/gssapi/mechglue/g_context_time.c
+ lib/gssapi/mechglue/g_delete_sec_context.c
+ lib/gssapi/mechglue/g_dsp_name.c
+ lib/gssapi/mechglue/g_dsp_status.c
+ lib/gssapi/mechglue/g_dup_name.c
+ lib/gssapi/mechglue/g_exp_sec_context.c
+ lib/gssapi/mechglue/g_export_name.c
+ lib/gssapi/mechglue/g_glue.c
+ lib/gssapi/mechglue/g_imp_name.c
+ lib/gssapi/mechglue/g_imp_sec_context.c
+ lib/gssapi/mechglue/g_init_sec_context.c
+ lib/gssapi/mechglue/g_initialize.c
+ lib/gssapi/mechglue/g_inquire_context.c
+ lib/gssapi/mechglue/g_inquire_cred.c
+ lib/gssapi/mechglue/g_inquire_names.c
+ lib/gssapi/mechglue/g_process_context.c
+ lib/gssapi/mechglue/g_rel_buffer.c
+ lib/gssapi/mechglue/g_rel_cred.c
+ lib/gssapi/mechglue/g_rel_name.c
+ lib/gssapi/mechglue/g_rel_oid_set.c
+ lib/gssapi/mechglue/g_seal.c
+ lib/gssapi/mechglue/g_sign.c
+ lib/gssapi/mechglue/g_store_cred.c
+ lib/gssapi/mechglue/g_unseal.c
+ lib/gssapi/mechglue/g_userok.c
+ lib/gssapi/mechglue/g_utils.c
+ lib/gssapi/mechglue/g_verify.c
+ lib/gssapi/mechglue/gssd_pname_to_uid.c
+ lib/gssapi/mechglue/mglueP.h
+ lib/gssapi/mechglue/oid_ops.c
+ lib/gssapi/spnego/gssapiP_spnego.h
+ lib/gssapi/spnego/spnego_mech.c
+
+and the initial implementation of incremental propagation, including
+the following new or changed files:
+
+ include/iprop_hdr.h
+ kadmin/server/ipropd_svc.c
+ lib/kdb/iprop.x
+ lib/kdb/kdb_convert.c
+ lib/kdb/kdb_log.c
+ lib/kdb/kdb_log.h
+ lib/krb5/error_tables/kdb5_err.et
+ slave/kpropd_rpc.c
+ slave/kproplog.c
+
+are subject to the following license:
+
+ Copyright (C) 2004 Sun Microsystems, Inc.
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation
+ files (the "Software"), to deal in the Software without
+ restriction, including without limitation the rights to use, copy,
+ modify, merge, publish, distribute, sublicense, and/or sell copies
+ of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+ BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE.
+
+======================================================================
+
+Kerberos V5 includes documentation and software developed at the
+University of California at Berkeley, which includes this copyright
+notice:
+
+ Copyright (C) 1983 Regents of the University of California.
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+ 3. Neither the name of the University nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS "AS IS"
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+ TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+ PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS
+ OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
+ USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ SUCH DAMAGE.
+
+======================================================================
+
+Portions contributed by Novell, Inc., including the LDAP database
+backend, are subject to the following license:
+
+ Copyright (C) 2004-2005, Novell, Inc.
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+
+ * The copyright holder's name is not used to endorse or promote
+ products derived from this software without specific prior
+ written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+ OF THE POSSIBILITY OF SUCH DAMAGE.
+
+======================================================================
+
+Portions funded by Sandia National Laboratory and developed by the
+University of Michigan's Center for Information Technology
+Integration, including the PKINIT implementation, are subject to the
+following license:
+
+ COPYRIGHT (C) 2006-2007
+ THE REGENTS OF THE UNIVERSITY OF MICHIGAN
+ ALL RIGHTS RESERVED
+
+ Permission is granted to use, copy, create derivative works and
+ redistribute this software and such derivative works for any
+ purpose, so long as the name of The University of Michigan is not
+ used in any advertising or publicity pertaining to the use of
+ distribution of this software without specific, written prior
+ authorization. If the above copyright notice or any other
+ identification of the University of Michigan is included in any
+ copy of any portion of this software, then the disclaimer below
+ must also be included.
+
+ THIS SOFTWARE IS PROVIDED AS IS, WITHOUT REPRESENTATION FROM THE
+ UNIVERSITY OF MICHIGAN AS TO ITS FITNESS FOR ANY PURPOSE, AND
+ WITHOUT WARRANTY BY THE UNIVERSITY OF MICHIGAN OF ANY KIND, EITHER
+ EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION THE IMPLIED
+ WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+ THE REGENTS OF THE UNIVERSITY OF MICHIGAN SHALL NOT BE LIABLE FOR
+ ANY DAMAGES, INCLUDING SPECIAL, INDIRECT, INCIDENTAL, OR
+ CONSEQUENTIAL DAMAGES, WITH RESPECT TO ANY CLAIM ARISING OUT OF OR
+ IN CONNECTION WITH THE USE OF THE SOFTWARE, EVEN IF IT HAS BEEN OR
+ IS HEREAFTER ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
+======================================================================
+
+The pkcs11.h file included in the PKINIT code has the following
+license:
+
+ Copyright 2006 g10 Code GmbH
+ Copyright 2006 Andreas Jellinghaus
+
+ This file is free software; as a special exception the author gives
+ unlimited permission to copy and/or distribute it, with or without
+ modifications, as long as this notice is preserved.
+
+ This file is distributed in the hope that it will be useful, but
+ WITHOUT ANY WARRANTY, to the extent permitted by law; without even
+ the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ PURPOSE.
+
+======================================================================
+
+Portions contributed by Apple Inc. are subject to the following
+license:
+
+ Copyright 2004-2008 Apple Inc. All Rights Reserved.
+
+ Export of this software from the United States of America may
+ require a specific license from the United States Government.
+ It is the responsibility of any person or organization
+ contemplating export to obtain such a license before exporting.
+
+ WITHIN THAT CONSTRAINT, permission to use, copy, modify, and
+ distribute this software and its documentation for any purpose and
+ without fee is hereby granted, provided that the above copyright
+ notice appear in all copies and that both that copyright notice and
+ this permission notice appear in supporting documentation, and that
+ the name of Apple Inc. not be used in advertising or publicity
+ pertaining to distribution of the software without specific,
+ written prior permission. Apple Inc. makes no representations
+ about the suitability of this software for any purpose. It is
+ provided "as is" without express or implied warranty.
+
+ THIS SOFTWARE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR
+ IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED
+ WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+
+======================================================================
+
+The implementations of UTF-8 string handling in src/util/support and
+src/lib/krb5/unicode are subject to the following copyright and
+permission notice:
+
+ The OpenLDAP Public License
+ Version 2.8, 17 August 2003
+
+ Redistribution and use of this software and associated
+ documentation ("Software"), with or without modification, are
+ permitted provided that the following conditions are met:
+
+ 1. Redistributions in source form must retain copyright statements
+ and notices,
+
+ 2. Redistributions in binary form must reproduce applicable
+ copyright statements and notices, this list of conditions, and
+ the following disclaimer in the documentation and/or other
+ materials provided with the distribution, and
+
+ 3. Redistributions must contain a verbatim copy of this document.
+
+ The OpenLDAP Foundation may revise this license from time to time.
+ Each revision is distinguished by a version number. You may use
+ this Software under terms of this license revision or under the
+ terms of any subsequent revision of the license.
+
+ THIS SOFTWARE IS PROVIDED BY THE OPENLDAP FOUNDATION AND ITS
+ CONTRIBUTORS "AS IS" AND ANY EXPRESSED OR IMPLIED WARRANTIES,
+ INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ DISCLAIMED. IN NO EVENT SHALL THE OPENLDAP FOUNDATION, ITS
+ CONTRIBUTORS, OR THE AUTHOR(S) OR OWNER(S) OF THE SOFTWARE BE
+ LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
+ OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+ DAMAGE.
+
+ The names of the authors and copyright holders must not be used in
+ advertising or otherwise to promote the sale, use or other dealing
+ in this Software without specific, written prior permission. Title
+ to copyright in this Software shall at all times remain with
+ copyright holders.
+
+ OpenLDAP is a registered trademark of the OpenLDAP Foundation.
+
+ Copyright 1999-2003 The OpenLDAP Foundation, Redwood City,
+ California, USA. All Rights Reserved. Permission to copy and
+ distribute verbatim copies of this document is granted.
+
+Marked test programs in src/lib/krb5/krb have the following copyright:
+
+
+ Copyright (C) 2006 Kungliga Tekniska Högskolan
+ (Royal Institute of Technology, Stockholm, Sweden).
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+ 3. Neither the name of KTH nor the names of its contributors may be
+ used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY KTH AND ITS CONTRIBUTORS "AS IS" AND
+ ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+ PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL KTH OR ITS
+ CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
+ USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ SUCH DAMAGE.
+
+======================================================================
+
+The KCM Mach RPC definition file used on macOS has the following
+copyright:
+
+ Copyright (C) 2009 Kungliga Tekniska Högskolan
+ (Royal Institute of Technology, Stockholm, Sweden).
+ All rights reserved.
+
+ Portions Copyright (C) 2009 Apple Inc. All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ 1. Redistributions of source code must retain the above
+ copyright notice, this list of conditions and the following
+ disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+ 3. Neither the name of the Institute nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS "AS IS"
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+ TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+ PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE INSTITUTE
+ OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
+ USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ SUCH DAMAGE.
+
+======================================================================
+
+Portions of the RPC implementation in src/lib/rpc and
+src/include/gssrpc have the following copyright and permission notice:
+
+ Copyright (C) 2010, Oracle America, Inc.
+
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+ 3. Neither the name of the "Oracle America, Inc." nor the names of
+ its contributors may be used to endorse or promote products
+ derived from this software without specific prior written
+ permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
+ INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+ OF THE POSSIBILITY OF SUCH DAMAGE.
+
+======================================================================
+
+ Copyright (C) 2006,2007,2009 NTT (Nippon Telegraph and Telephone
+ Corporation). All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer as
+ the first lines of this file unmodified.
+
+ 2. Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+ THIS SOFTWARE IS PROVIDED BY NTT "AS IS" AND ANY EXPRESS OR IMPLIED
+ WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ DISCLAIMED. IN NO EVENT SHALL NTT BE LIABLE FOR ANY DIRECT,
+ INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+ OF THE POSSIBILITY OF SUCH DAMAGE.
+
+======================================================================
+
+ Copyright 2000 by Carnegie Mellon University
+
+ All Rights Reserved
+
+ Permission to use, copy, modify, and distribute this software and
+ its documentation for any purpose and without fee is hereby
+ granted, provided that the above copyright notice appear in all
+ copies and that both that copyright notice and this permission
+ notice appear in supporting documentation, and that the name of
+ Carnegie Mellon University not be used in advertising or publicity
+ pertaining to distribution of the software without specific,
+ written prior permission.
+
+ CARNEGIE MELLON UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO
+ THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+ AND FITNESS, IN NO EVENT SHALL CARNEGIE MELLON UNIVERSITY BE LIABLE
+ FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
+ AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING
+ OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
+ SOFTWARE.
+
+======================================================================
+
+ Copyright (C) 2002 Naval Research Laboratory (NRL/CCS)
+
+ Permission to use, copy, modify and distribute this software and
+ its documentation is hereby granted, provided that both the
+ copyright notice and this permission notice appear in all copies of
+ the software, derivative works or modified versions, and any
+ portions thereof.
+
+ NRL ALLOWS FREE USE OF THIS SOFTWARE IN ITS "AS IS" CONDITION AND
+ DISCLAIMS ANY LIABILITY OF ANY KIND FOR ANY DAMAGES WHATSOEVER
+ RESULTING FROM THE USE OF THIS SOFTWARE.
+
+======================================================================
+
+Portions extracted from Internet RFCs have the following copyright
+notice:
+
+ Copyright (C) The Internet Society (2006).
+
+ This document is subject to the rights, licenses and restrictions
+ contained in BCP 78, and except as set forth therein, the authors
+ retain all their rights.
+
+ This document and the information contained herein are provided on
+ an "AS IS" basis and THE CONTRIBUTOR, THE ORGANIZATION HE/SHE
+ REPRESENTS OR IS SPONSORED BY (IF ANY), THE INTERNET SOCIETY AND
+ THE INTERNET ENGINEERING TASK FORCE DISCLAIM ALL WARRANTIES,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTY THAT
+ THE USE OF THE INFORMATION HEREIN WILL NOT INFRINGE ANY RIGHTS OR
+ ANY IMPLIED WARRANTIES OF MERCHANTABILITY OR FITNESS FOR A
+ PARTICULAR PURPOSE.
+
+======================================================================
+
+ Copyright (C) 1991, 1992, 1994 by Cygnus Support.
+
+ Permission to use, copy, modify, and distribute this software and
+ its documentation for any purpose and without fee is hereby
+ granted, provided that the above copyright notice appear in all
+ copies and that both that copyright notice and this permission
+ notice appear in supporting documentation. Cygnus Support makes no
+ representations about the suitability of this software for any
+ purpose. It is provided "as is" without express or implied
+ warranty.
+
+======================================================================
+
+ Copyright (C) 2006 Secure Endpoints Inc.
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation
+ files (the "Software"), to deal in the Software without
+ restriction, including without limitation the rights to use, copy,
+ modify, merge, publish, distribute, sublicense, and/or sell copies
+ of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+ BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE.
+
+======================================================================
+
+Portions of the implementation of the Fortuna-like PRNG are subject to
+the following notice:
+
+
+ Copyright (C) 2005 Marko Kreen
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+ THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS"
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+ TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+ PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR
+ CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
+ USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ SUCH DAMAGE.
+
+ Copyright (C) 1994 by the University of Southern California
+
+ EXPORT OF THIS SOFTWARE from the United States of America may
+ require a specific license from the United States Government. It
+ is the responsibility of any person or organization
+ contemplating export to obtain such a license before exporting.
+
+ WITHIN THAT CONSTRAINT, permission to copy, modify, and distribute
+ this software and its documentation in source and binary forms is
+ hereby granted, provided that any documentation or other materials
+ related to such distribution or use acknowledge that the software
+ was developed by the University of Southern California.
+
+ DISCLAIMER OF WARRANTY. THIS SOFTWARE IS PROVIDED "AS IS". The
+ University of Southern California MAKES NO REPRESENTATIONS OR
+ WARRANTIES, EXPRESS OR IMPLIED. By way of example, but not
+ limitation, the University of Southern California MAKES NO
+ REPRESENTATIONS OR WARRANTIES OF MERCHANTABILITY OR FITNESS FOR ANY
+ PARTICULAR PURPOSE. The University of Southern California shall not
+ be held liable for any liability nor for any direct, indirect, or
+ consequential damages with respect to any claim by the user or
+ distributor of the ksu software.
+
+======================================================================
+
+ Copyright (C) 1995
+ The President and Fellows of Harvard University
+
+ This code is derived from software contributed to Harvard by Jeremy
+ Rassen.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+ 3. All advertising materials mentioning features or use of this
+ software must display the following acknowledgement:
+
+ This product includes software developed by the University of
+ California, Berkeley and its contributors.
+
+ 4. Neither the name of the University nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS "AS IS"
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+ TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+ PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS
+ OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
+ USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ SUCH DAMAGE.
+
+======================================================================
+
+ Copyright (C) 2008 by the Massachusetts Institute of Technology.
+ Copyright 1995 by Richard P. Basch. All Rights Reserved.
+ Copyright 1995 by Lehman Brothers, Inc. All Rights Reserved.
+
+ Export of this software from the United States of America may
+ require a specific license from the United States Government. It
+ is the responsibility of any person or organization
+ contemplating export to obtain such a license before exporting.
+
+ WITHIN THAT CONSTRAINT, permission to use, copy, modify, and
+ distribute this software and its documentation for any purpose and
+ without fee is hereby granted, provided that the above copyright
+ notice appear in all copies and that both that copyright notice and
+ this permission notice appear in supporting documentation, and that
+ the name of Richard P. Basch, Lehman Brothers and M.I.T. not be
+ used in advertising or publicity pertaining to distribution of the
+ software without specific, written prior permission. Richard P.
+ Basch, Lehman Brothers and M.I.T. make no representations about the
+ suitability of this software for any purpose. It is provided "as
+ is" without express or implied warranty.
+
+======================================================================
+
+The following notice applies to "src/lib/krb5/krb/strptime.c" and
+"src/include/k5-queue.h".
+
+ Copyright (C) 1997, 1998 The NetBSD Foundation, Inc.
+ All rights reserved.
+
+ This code was contributed to The NetBSD Foundation by Klaus Klein.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+ 3. All advertising materials mentioning features or use of this
+ software must display the following acknowledgement:
+
+ This product includes software developed by the NetBSD
+ Foundation, Inc. and its contributors.
+
+ 4. Neither the name of The NetBSD Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND
+ CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS BE
+ LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
+ OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+ DAMAGE.
+
+======================================================================
+
+The following notice applies to Unicode library files in
+"src/lib/krb5/unicode":
+
+ Copyright 1997, 1998, 1999 Computing Research Labs,
+ New Mexico State University
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation
+ files (the "Software"), to deal in the Software without
+ restriction, including without limitation the rights to use, copy,
+ modify, merge, publish, distribute, sublicense, and/or sell copies
+ of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE COMPUTING RESEARCH LAB OR
+ NEW MEXICO STATE UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR
+ OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+ OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
+ OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+======================================================================
+
+The following notice applies to "src/util/support/strlcpy.c":
+
+ Copyright (C) 1998 Todd C. Miller "Todd.Miller@courtesan.com"
+
+ Permission to use, copy, modify, and distribute this software for
+ any purpose with or without fee is hereby granted, provided that
+ the above copyright notice and this permission notice appear in all
+ copies.
+
+ THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL
+ WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+ WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+ AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+ CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+ OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
+ NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+ CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+======================================================================
+
+The following notice applies to "src/util/profile/argv_parse.c" and
+"src/util/profile/argv_parse.h":
+
+ Copyright 1999 by Theodore Ts'o.
+
+ Permission to use, copy, modify, and distribute this software for
+ any purpose with or without fee is hereby granted, provided that
+ the above copyright notice and this permission notice appear in all
+ copies. THE SOFTWARE IS PROVIDED "AS IS" AND THEODORE TS'O (THE
+ AUTHOR) DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
+ INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN
+ NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+ INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
+ RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
+ OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+ IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. (Isn't
+ it sick that the U.S. culture of lawsuit-happy lawyers requires
+ this kind of disclaimer?)
+
+======================================================================
+
+The following notice applies to SWIG-generated code in
+"src/util/profile/profile_tcl.c":
+
+ Copyright (C) 1999-2000, The University of Chicago
+
+ This file may be freely redistributed without license or fee
+ provided this copyright message remains intact.
+
+======================================================================
+
+The following notice applies to portiions of "src/lib/rpc" and
+"src/include/gssrpc":
+
+ Copyright (C) 2000 The Regents of the University of Michigan. All
+ rights reserved.
+
+ Copyright (C) 2000 Dug Song "dugsong@UMICH.EDU". All rights
+ reserved, all wrongs reversed.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+ 3. Neither the name of the University nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
+ OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+ DAMAGE.
+
+Implementations of the MD4 algorithm are subject to the following
+notice:
+
+ Copyright (C) 1990, RSA Data Security, Inc. All rights reserved.
+
+ License to copy and use this software is granted provided that it
+ is identified as the "RSA Data Security, Inc. MD4 Message Digest
+ Algorithm" in all material mentioning or referencing this software
+ or this function.
+
+ License is also granted to make and use derivative works provided
+ that such works are identified as "derived from the RSA Data
+ Security, Inc. MD4 Message Digest Algorithm" in all material
+ mentioning or referencing the derived work.
+
+ RSA Data Security, Inc. makes no representations concerning either
+ the merchantability of this software or the suitability of this
+ software for any particular purpose. It is provided "as is"
+ without express or implied warranty of any kind.
+
+ These notices must be retained in any copies of any part of this
+ documentation and/or software.
+
+======================================================================
+
+Implementations of the MD5 algorithm are subject to the following
+notice:
+
+ Copyright (C) 1990, RSA Data Security, Inc. All rights reserved.
+
+ License to copy and use this software is granted provided that it
+ is identified as the "RSA Data Security, Inc. MD5 Message- Digest
+ Algorithm" in all material mentioning or referencing this software
+ or this function.
+
+ License is also granted to make and use derivative works provided
+ that such works are identified as "derived from the RSA Data
+ Security, Inc. MD5 Message-Digest Algorithm" in all material
+ mentioning or referencing the derived work.
+
+ RSA Data Security, Inc. makes no representations concerning either
+ the merchantability of this software or the suitability of this
+ software for any particular purpose. It is provided "as is"
+ without express or implied warranty of any kind.
+
+ These notices must be retained in any copies of any part of this
+ documentation and/or software.
+
+======================================================================
+
+The following notice applies to
+"src/lib/crypto/crypto_tests/t_mddriver.c":
+
+ Copyright (C) 1990-2, RSA Data Security, Inc. Created 1990. All
+ rights reserved.
+
+ RSA Data Security, Inc. makes no representations concerning either
+ the merchantability of this software or the suitability of this
+ software for any particular purpose. It is provided "as is" without
+ express or implied warranty of any kind.
+
+ These notices must be retained in any copies of any part of this
+ documentation and/or software.
+
+======================================================================
+
+Portions of "src/lib/krb5" are subject to the following notice:
+
+ Copyright (C) 1994 CyberSAFE Corporation.
+ Copyright 1990,1991,2007,2008 by the Massachusetts Institute of
+Technology.
+ All Rights Reserved.
+
+ Export of this software from the United States of America may
+ require a specific license from the United States Government. It
+ is the responsibility of any person or organization
+ contemplating export to obtain such a license before exporting.
+
+ WITHIN THAT CONSTRAINT, permission to use, copy, modify, and
+ distribute this software and its documentation for any purpose and
+ without fee is hereby granted, provided that the above copyright
+ notice appear in all copies and that both that copyright notice and
+ this permission notice appear in supporting documentation, and that
+ the name of M.I.T. not be used in advertising or publicity
+ pertaining to distribution of the software without specific,
+ written prior permission. Furthermore if you modify this software
+ you must label your software as modified software and not
+ distribute it in such a fashion that it might be confused with the
+ original M.I.T. software. Neither M.I.T., the Open Computing
+ Security Group, nor CyberSAFE Corporation make any representations
+ about the suitability of this software for any purpose. It is
+ provided "as is" without express or implied warranty.
+
+======================================================================
+
+Portions contributed by PADL Software are subject to the following
+license:
+
+ Copyright (c) 2011, PADL Software Pty Ltd. All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+ 3. Neither the name of PADL Software nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY PADL SOFTWARE AND CONTRIBUTORS "AS IS"
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+ TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+ PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL PADL SOFTWARE
+ OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
+ USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ SUCH DAMAGE.
+
+======================================================================
+
+The bundled libev source code is subject to the following license:
+
+ All files in libev are Copyright (C)2007,2008,2009 Marc Alexander
+ Lehmann.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+ OF THE POSSIBILITY OF SUCH DAMAGE.
+
+ Alternatively, the contents of this package may be used under the
+ terms of the GNU General Public License ("GPL") version 2 or any
+ later version, in which case the provisions of the GPL are
+ applicable instead of the above. If you wish to allow the use of
+ your version of this package only under the terms of the GPL and
+ not to allow others to use your version of this file under the BSD
+ license, indicate your decision by deleting the provisions above
+ and replace them with the notice and other provisions required by
+ the GPL in this and the other files of this package. If you do not
+ delete the provisions above, a recipient may use your version of
+ this file under either the BSD or the GPL.
+
+======================================================================
+
+Files copied from the Intel AESNI Sample Library are subject to the
+following license:
+
+ Copyright (C) 2010, Intel Corporation
+ All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials
+ provided with the distribution.
+
+ * Neither the name of Intel Corporation nor the names of its
+ contributors may be used to endorse or promote products
+ derived from this software without specific prior written
+ permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+ OF THE POSSIBILITY OF SUCH DAMAGE.
+
+======================================================================
+
+The following notice applies to
+"src/ccapi/common/win/OldCC/autolock.hxx":
+
+ Copyright (C) 1998 by Danilo Almeida. All rights reserved.
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
+ INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+ OF THE POSSIBILITY OF SUCH DAMAGE.
+
+ ======================================================================
+ ======================================================================
+
+OpenSSL License
+
+ You are receiving a copy of OpenSSL as part of this product in object
+ code form. The terms of the Oracle license do NOT apply to OpenSSL.
+ OpenSSL is licensed under a double license, of the OpenSSL License and
+ the original SSLeay license, separate from the Oracle product. If you
+ do not wish to install this library, you may remove it, but the Oracle
+ program might not operate properly or at all without it.
+
+ LICENSE ISSUES
+ ==============
+
+ The OpenSSL toolkit stays under a double license, i.e. both the conditions of
+ the OpenSSL License and the original SSLeay license apply to the toolkit.
+ See below for the actual license texts.
+
+ OpenSSL License
+ ---------------
+
+/* ====================================================================
+ * Copyright (c) 1998-2019 The OpenSSL Project. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in
+ * the documentation and/or other materials provided with the
+ * distribution.
+ *
+ * 3. All advertising materials mentioning features or use of this
+ * software must display the following acknowledgment:
+ * "This product includes software developed by the OpenSSL Project
+ * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
+ *
+ * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
+ * endorse or promote products derived from this software without
+ * prior written permission. For written permission, please contact
+ * openssl-core@openssl.org.
+ *
+ * 5. Products derived from this software may not be called "OpenSSL"
+ * nor may "OpenSSL" appear in their names without prior written
+ * permission of the OpenSSL Project.
+ *
+ * 6. Redistributions of any form whatsoever must retain the following
+ * acknowledgment:
+ * "This product includes software developed by the OpenSSL Project
+ * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
+ * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
+ * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+ * OF THE POSSIBILITY OF SUCH DAMAGE.
+ * ====================================================================
+ *
+ * This product includes cryptographic software written by Eric Young
+ * (eay@cryptsoft.com). This product includes software written by Tim
+ * Hudson (tjh@cryptsoft.com).
+ *
+ */
+
+ Original SSLeay License
+ -----------------------
+
+/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
+ * All rights reserved.
+ *
+ * This package is an SSL implementation written
+ * by Eric Young (eay@cryptsoft.com).
+ * The implementation was written so as to conform with Netscapes SSL.
+ *
+ * This library is free for commercial and non-commercial use as long as
+ * the following conditions are aheared to. The following conditions
+ * apply to all code found in this distribution, be it the RC4, RSA,
+ * lhash, DES, etc., code; not just the SSL code. The SSL documentation
+ * included with this distribution is covered by the same copyright terms
+ * except that the holder is Tim Hudson (tjh@cryptsoft.com).
+ *
+ * Copyright remains Eric Young's, and as such any Copyright notices in
+ * the code are not to be removed.
+ * If this package is used in a product, Eric Young should be given attribution
+ * as the author of the parts of the library used.
+ * This can be in the form of a textual message at program startup or
+ * in documentation (online or textual) provided with the package.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. All advertising materials mentioning features or use of this software
+ * must display the following acknowledgement:
+ * "This product includes cryptographic software written by
+ * Eric Young (eay@cryptsoft.com)"
+ * The word 'cryptographic' can be left out if the rouines from the library
+ * being used are not cryptographic related :-).
+ * 4. If you include any Windows specific code (or a derivative thereof) from
+ * the apps directory (application code) you must include an acknowledgement:
+ * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
+ *
+ * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * The licence and distribution terms for any publically available version or
+ * derivative of this code cannot be changed. i.e. this code cannot simply be
+ * copied and put under another distribution licence
+ * [including the GNU Public Licence.]
+ */
+
+ ======================================================================
+ ======================================================================
+
+Oracle OCI Python SDK
+
+ Oracle OCI Python SDK
+
+Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
+This software is dual-licensed to you under the Universal Permissive License
+(UPL) 1.0 or Apache License 2.0. See below for license terms. You may
+choose either license.
+____________________________
+The Universal Permissive License (UPL), Version 1.0
+Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved.
+Subject to the condition set forth below, permission is hereby granted to any
+person obtaining a copy of this software, associated documentation and/or
+data (collectively the "Software"), free of charge and under any and all
+copyright rights in the Software, and any and all patent rights owned or
+freely licensable by each licensor hereunder covering either (i) the
+unmodified Software as contributed to or provided by such licensor, or (ii)
+the Larger Works (as defined below), to deal in both
+(a) the Software, and
+(b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
+one is included with the Software (each a "Larger Work" to which the Software
+is contributed by such licensors),
+without restriction, including without limitation the rights to copy, create
+derivative works of, display, perform, and distribute the Software and make,
+use, sell, offer for sale, import, export, have made, and have sold the
+Software and the Larger Work(s), and to sublicense the foregoing rights on
+either these or other terms.
+This license is subject to the following condition:
+The above copyright notice and either this complete permission notice or at a
+minimum a reference to the UPL must be included in all copies or substantial
+portions of the Software.
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+The Apache Software License, Version 2.0
+Copyright (c) 2016, 2018, Oracle and/or its affiliates. All rights reserved.
+Licensed under the Apache License, Version 2.0 (the "License"); You may not
+use this product except in compliance with the License. You may obtain a
+copy of the License at http://www.apache.org/licenses/LICENSE-2.0. A copy of
+the license is also reproduced below. Unless required by applicable law or
+agreed to in writing, software distributed under the License is distributed
+on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+express or implied. See the License for the specific language governing
+permissions and limitations under the License.
+
+Apache License Version 2.0, January 2004
+
+Oracle's use of OCI Python SDK in MySQL Community Edition is solely under the
+UPL
+
+ ======================================================================
+ ======================================================================
+
+Python 3
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019 Python Software Foundation;
+All Rights Reserved" are retained in Python alone or in any derivative version
+prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee. This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+ ======================================================================
+ ======================================================================
+
+python-lz4
+
+Copyright (c) 2012-2013, Steeve Morin
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+3. Neither the name of Steeve Morin nor the names of its contributors may be
+ used to endorse or promote products derived from this software without
+ specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+
+============================================================
+
+Additional 4th party:
+py3c
+------
+from pyc3.h file in code directory:
+
+/*
+The MIT License (MIT)
+
+Copyright (c) 2015, Red Hat, Inc. and/or its affiliates
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+*/
+
+------------------------------------------------------------------------------
+
+lz4
+---
+from lz4.h file in lz4libs directory
+
+/*
+ * LZ4 - Fast LZ compression algorithm
+ * Header File
+ * Copyright (C) 2011-present, Yann Collet.
+
+ BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are
+ met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following disclaimer
+ in the documentation and/or other materials provided with the
+ distribution.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+ You can contact the author at :
+ - LZ4 homepage : http://www.lz4.org
+ - LZ4 source repository : https://github.com/lz4/lz4
+*/
+
+** Future 4th party
+(https://files.pythonhosted.org/packages/90/52/e20466b85000a181e1e144fd8305caf
+2cf475e2f9674e797b222f8105f5f/future-0.17.1.tar.gz)
+Copyright (c) 2013-2018 Python Charmers Pty Ltd, Australia
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+ ======================================================================
+ ======================================================================
+
+python-zstandard
+
+ python-zstandard
+
+Copyright (c) 2016, Gregory Szorc
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice,
+this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation
+and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its contributors
+may be used to endorse or promote products derived from this software without
+specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+ ======================================================================
+ ======================================================================
+
+Six
+
+ Six
+
+Copyright (c) 2010-2020 Benjamin Peterson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+ ======================================================================
+ ======================================================================
+
+Standard Licenses
+
+Apache License Version 2.0, January 2004
+
+The following applies to all products licensed under the Apache 2.0
+License: You may not use the identified files except in compliance
+with the Apache License, Version 2.0 (the "License.") You may obtain a
+copy of the License at http://www.apache.org/licenses/LICENSE-2.0. A
+copy of the license is also reproduced below. Unless required by
+applicable law or agreed to in writing, software distributed under the
+License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
+CONDITIONS OF ANY KIND, either express or implied. See the License for
+the specific language governing permissions and limitations under the
+License.
+
+Apache License Version 2.0, January 2004 http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+"License" shall mean the terms and conditions for use, reproduction,
+and distribution as defined by Sections 1 through 9 of this document.
+
+"Licensor" shall mean the copyright owner or entity authorized by the
+copyright owner that is granting the License.
+
+"Legal Entity" shall mean the union of the acting entity and all other
+entities that control, are controlled by, or are under common control
+with that entity. For the purposes of this definition, "control" means
+(i) the power, direct or indirect, to cause the direction or
+management of such entity, whether by contract or otherwise, or (ii)
+ownership of fifty percent (50%) or more of the outstanding shares, or
+(iii) beneficial ownership of such entity.
+
+"You" (or "Your") shall mean an individual or Legal Entity exercising
+permissions granted by this License.
+
+"Source" form shall mean the preferred form for making modifications,
+including but not limited to software source code, documentation
+source, and configuration files.
+
+"Object" form shall mean any form resulting from mechanical
+transformation or translation of a Source form, including but not
+limited to compiled object code, generated documentation, and
+conversions to other media types.
+
+"Work" shall mean the work of authorship, whether in Source or Object
+form, made available under the License, as indicated by a copyright
+notice that is included in or attached to the work (an example is
+provided in the Appendix below).
+
+"Derivative Works" shall mean any work, whether in Source or Object
+form, that is based on (or derived from) the Work and for which the
+editorial revisions, annotations, elaborations, or other modifications
+represent, as a whole, an original work of authorship. For the
+purposes of this License, Derivative Works shall not include works
+that remain separable from, or merely link (or bind by name) to the
+interfaces of, the Work and Derivative Works thereof.
+
+"Contribution" shall mean any work of authorship, including the
+original version of the Work and any modifications or additions to
+that Work or Derivative Works thereof, that is intentionally submitted
+to Licensor for inclusion in the Work by the copyright owner or by an
+individual or Legal Entity authorized to submit on behalf of the
+copyright owner. For the purposes of this definition, "submitted"
+means any form of electronic, verbal, or written communication sent to
+the Licensor or its representatives, including but not limited to
+communication on electronic mailing lists, source code control
+systems, and issue tracking systems that are managed by, or on behalf
+of, the Licensor for the purpose of discussing and improving the Work,
+but excluding communication that is conspicuously marked or otherwise
+designated in writing by the copyright owner as "Not a Contribution."
+
+"Contributor" shall mean Licensor and any individual or Legal Entity
+on behalf of whom a Contribution has been received by Licensor and
+subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+this License, each Contributor hereby grants to You a perpetual,
+worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+copyright license to reproduce, prepare Derivative Works of, publicly
+display, publicly perform, sublicense, and distribute the Work and
+such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+this License, each Contributor hereby grants to You a perpetual,
+worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except
+as stated in this section) patent license to make, have made, use,
+offer to sell, sell, import, and otherwise transfer the Work, where
+such license applies only to those patent claims licensable by such
+Contributor that are necessarily infringed by their Contribution(s)
+alone or by combination of their Contribution(s) with the Work to
+which such Contribution(s) was submitted. If You institute patent
+litigation against any entity (including a cross-claim or counterclaim
+in a lawsuit) alleging that the Work or a Contribution incorporated
+within the Work constitutes direct or contributory patent
+infringement, then any patent licenses granted to You under this
+License for that Work shall terminate as of the date such litigation
+is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the Work
+or Derivative Works thereof in any medium, with or without
+modifications, and in Source or Object form, provided that You meet
+the following conditions:
+
+(a) You must give any other recipients of the Work or Derivative Works
+a copy of this License; and
+
+(b) You must cause any modified files to carry prominent notices
+stating that You changed the files; and
+
+(c) You must retain, in the Source form of any Derivative Works that
+You distribute, all copyright, patent, trademark, and attribution
+notices from the Source form of the Work, excluding those notices that
+do not pertain to any part of the Derivative Works; and
+
+(d) If the Work includes a "NOTICE" text file as part of its
+distribution, then any Derivative Works that You distribute must
+include a readable copy of the attribution notices contained
+
+within such NOTICE file, excluding those notices that do not pertain
+to any part of the Derivative Works, in at least one of the following
+places: within a NOTICE text file distributed as part of the
+Derivative Works; within the Source form or documentation, if provided
+along with the Derivative Works; or, within a display generated by the
+Derivative Works, if and wherever such third-party notices normally
+appear. The contents of the NOTICE file are for informational purposes
+only and do not modify the License. You may add Your own attribution
+notices within Derivative Works that You distribute, alongside or as
+an addendum to the NOTICE text from the Work, provided that such
+additional attribution notices cannot be construed as modifying the
+License.
+
+You may add Your own copyright statement to Your modifications and may
+provide additional or different license terms and conditions for use,
+reproduction, or distribution of Your modifications, or for any such
+Derivative Works as a whole, provided Your use, reproduction, and
+distribution of the Work otherwise complies with the conditions stated
+in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+any Contribution intentionally submitted for inclusion in the Work by
+You to the Licensor shall be under the terms and conditions of this
+License, without any additional terms or conditions. Notwithstanding
+the above, nothing herein shall supersede or modify the terms of any
+separate license agreement you may have executed with Licensor
+regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+names, trademarks, service marks, or product names of the Licensor,
+except as required for reasonable and customary use in describing the
+origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or agreed
+to in writing, Licensor provides the Work (and each Contributor
+provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR
+CONDITIONS OF ANY KIND, either express or implied, including, without
+limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT,
+MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely
+responsible for determining the appropriateness of using or
+redistributing the Work and assume any risks associated with Your
+exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+whether in tort (including negligence), contract, or otherwise, unless
+required by applicable law (such as deliberate and grossly negligent
+acts) or agreed to in writing, shall any Contributor be liable to You
+for damages, including any direct, indirect, special, incidental, or
+consequential damages of any character arising as a result of this
+License or out of the use or inability to use the Work (including but
+not limited to damages for loss of goodwill, work stoppage, computer
+failure or malfunction, or any and all other commercial damages or
+losses), even if such Contributor has been advised of the possibility
+of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+the Work or Derivative Works thereof, You may choose to offer, and
+charge a fee for, acceptance of support, warranty, indemnity, or other
+liability obligations and/or rights consistent with this
+License. However, in accepting such obligations, You may act only on
+Your own behalf and on Your sole responsibility, not on behalf of any
+other Contributor, and only if You agree to indemnify, defend, and
+hold each Contributor harmless for any liability incurred by, or
+claims asserted against, such Contributor by reason of your accepting
+any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work
+
+To apply the Apache License to your work, attach the following boilerplate
+notice, with the fields enclosed by brackets "[]" replaced with your own
+identifying information. (Don't include the brackets!) The text should be
+enclosed in the appropriate comment syntax for the file format. We also
+recommend that a file or class name and description of purpose be included
+on the same "printed page" as the copyright notice for easier identification
+within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+implied. See the License for the specific language governing permissions
+and limitations under the License.
+
+ ======================================================================
+ ======================================================================
+
+Written Offer for Source Code
+
+ For any software that you receive from Oracle in binary form which is
+ licensed under an open source license that gives you the right to
+ receive the source code for that binary, you can obtain a copy of the
+ applicable source code by visiting
+ http://www.oracle.com/goto/opensourcecode. If the source code for the
+ binary was not provided to you with the binary, you can also receive a
+ copy of the source code on physical media by submitting a written
+ request to the address listed below or by sending an email to Oracle
+ using the following link:
+ http://www.oracle.com/goto/opensourcecode/request.
+
+ Oracle America, Inc.
+ Attn: Senior Vice President
+ Development and Engineering Legal
+ 500 Oracle Parkway, 10th Floor
+ Redwood Shores, CA 94065
+
+ Your request should include:
+
+ * The name of the binary for which you are requesting the source code
+
+ * The name and version number of the Oracle product containing the
+ binary
+
+ * The date you received the Oracle product
+
+ * Your name
+
+ * Your company name (if applicable)
+
+ * Your return mailing address and email, and
+
+ * A telephone number in the event we need to reach you.
+
+
+ We may charge you a fee to cover the cost of physical media and
+ processing.
+
+ Your request must be sent
+
+ a. within three (3) years of the date you received the Oracle product
+ that included the binary that is the subject of your request, or
+
+ b. in the case of code licensed under the GPL v3 for as long as Oracle
+ offers spare parts or customer support for that product model.
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql_connector_python-8.0.28.dist-info/METADATA b/monEnvTP/lib/python3.8/site-packages/mysql_connector_python-8.0.28.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..11d5730c5eb2646688362bbe04ac58e9995f1507
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql_connector_python-8.0.28.dist-info/METADATA
@@ -0,0 +1,43 @@
+Metadata-Version: 2.1
+Name: mysql-connector-python
+Version: 8.0.28
+Summary: MySQL driver written in Python
+Home-page: http://dev.mysql.com/doc/connector-python/en/index.html
+Author: Oracle and/or its affiliates
+Author-email:
+License: GNU GPLv2 (with FOSS License Exception)
+Download-URL: http://dev.mysql.com/downloads/connector/python/
+Keywords: mysql db
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Other Environment
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Education
+Classifier: Intended Audience :: Information Technology
+Classifier: Intended Audience :: System Administrators
+Classifier: License :: OSI Approved :: GNU General Public License (GPL)
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Topic :: Database
+Classifier: Topic :: Software Development
+Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Dist: protobuf (>=3.0.0)
+Provides-Extra: compression
+Requires-Dist: lz4 (>=2.1.6) ; extra == 'compression'
+Requires-Dist: zstandard (>=0.12.0) ; extra == 'compression'
+Provides-Extra: dns-srv
+Requires-Dist: dnspython (>=1.16.0) ; extra == 'dns-srv'
+Provides-Extra: gssapi
+Requires-Dist: gssapi (>=1.6.9) ; extra == 'gssapi'
+
+
+MySQL driver written in Python which does not depend on MySQL C client
+libraries and implements the DB API v2.0 specification (PEP-249).
+
+
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql_connector_python-8.0.28.dist-info/RECORD b/monEnvTP/lib/python3.8/site-packages/mysql_connector_python-8.0.28.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..f88f187f98fe1a051d95c84b24509b105c772b04
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql_connector_python-8.0.28.dist-info/RECORD
@@ -0,0 +1,176 @@
+_mysql_connector.cpython-38-x86_64-linux-gnu.so,sha256=CwfSA4xceXOWpWeAb-KlP8Jfxh7pB_t2jJgSb55KlzU,33045152
+_mysqlxpb.cpython-38-x86_64-linux-gnu.so,sha256=5q1uWhltiG4Ydf9s0I610107d9cQ8wsSOikYsvP2DuQ,32573344
+mysql/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+mysql/__pycache__/__init__.cpython-38.pyc,,
+mysql/connector/__init__.py,sha256=XwnVrP3m5edtvbVJdewR1U225D-9mhcFsaxiIbmOtPY,11027
+mysql/connector/__pycache__/__init__.cpython-38.pyc,,
+mysql/connector/__pycache__/abstracts.cpython-38.pyc,,
+mysql/connector/__pycache__/authentication.cpython-38.pyc,,
+mysql/connector/__pycache__/charsets.cpython-38.pyc,,
+mysql/connector/__pycache__/connection.cpython-38.pyc,,
+mysql/connector/__pycache__/connection_cext.cpython-38.pyc,,
+mysql/connector/__pycache__/constants.cpython-38.pyc,,
+mysql/connector/__pycache__/conversion.cpython-38.pyc,,
+mysql/connector/__pycache__/cursor.cpython-38.pyc,,
+mysql/connector/__pycache__/cursor_cext.cpython-38.pyc,,
+mysql/connector/__pycache__/custom_types.cpython-38.pyc,,
+mysql/connector/__pycache__/dbapi.cpython-38.pyc,,
+mysql/connector/__pycache__/errorcode.cpython-38.pyc,,
+mysql/connector/__pycache__/errors.cpython-38.pyc,,
+mysql/connector/__pycache__/network.cpython-38.pyc,,
+mysql/connector/__pycache__/optionfiles.cpython-38.pyc,,
+mysql/connector/__pycache__/pooling.cpython-38.pyc,,
+mysql/connector/__pycache__/protocol.cpython-38.pyc,,
+mysql/connector/__pycache__/utils.cpython-38.pyc,,
+mysql/connector/__pycache__/version.cpython-38.pyc,,
+mysql/connector/abstracts.py,sha256=md75aEV_aFY425VZWoihNopb3pUXHxDW1A3hpsuaJ3w,55483
+mysql/connector/authentication.py,sha256=x8Ad4fq_psqYsB3iUixLh6TSw8yzuXL2hCeid6OFby0,40184
+mysql/connector/charsets.py,sha256=gYthYrxbLW2CMzgEJHZscLUtoskR-RmmBDDTAMzvPGc,15499
+mysql/connector/connection.py,sha256=4SJ_9svUGYhFu4rZZHYI9dgk-rYhIoBsoqhcRjR432A,60107
+mysql/connector/connection_cext.py,sha256=h-iO1fmYTp4FECS_rrE8CpMNmDiXeGxhJj3bLWz6gn8,30884
+mysql/connector/constants.py,sha256=KJMGdijfFmtV_leWg4VY8mLRFCLHlmw_cWB_-ZRHQ7Q,39698
+mysql/connector/conversion.py,sha256=M_uSyMX2qXe83-O-g6wsyxCuoQwrvjCzsoKeBw-KTNA,21603
+mysql/connector/cursor.py,sha256=yfy3DvNZ3-HPC38sDc8t9vZg2AxTjgR97IRFv-uQTJk,48429
+mysql/connector/cursor_cext.py,sha256=5Nyq3tI5RG-T3h_qJFYfZ-7E5F0BhLpQ00B9-4tVYHs,33941
+mysql/connector/custom_types.py,sha256=loK__Wv4CzemP5XWrmrYnnWVck6aHMiFKxm3d5t4EUM,1956
+mysql/connector/dbapi.py,sha256=SqfBKwNzR1wTbbQmFGRbPwGMimwO4Y8fTQpxWu_nLQg,2621
+mysql/connector/django/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+mysql/connector/django/__pycache__/__init__.cpython-38.pyc,,
+mysql/connector/django/__pycache__/base.cpython-38.pyc,,
+mysql/connector/django/__pycache__/client.cpython-38.pyc,,
+mysql/connector/django/__pycache__/compiler.cpython-38.pyc,,
+mysql/connector/django/__pycache__/creation.cpython-38.pyc,,
+mysql/connector/django/__pycache__/features.cpython-38.pyc,,
+mysql/connector/django/__pycache__/introspection.cpython-38.pyc,,
+mysql/connector/django/__pycache__/operations.cpython-38.pyc,,
+mysql/connector/django/__pycache__/schema.cpython-38.pyc,,
+mysql/connector/django/__pycache__/validation.cpython-38.pyc,,
+mysql/connector/django/base.py,sha256=X0sMWyyQqs73UYMCkMUaFjKiXtXipFRIGfPbBEAfL-I,19787
+mysql/connector/django/client.py,sha256=3Jg8goY8du8hyyxnZnrNeCBJ0M9sMRjL-5MCf2IxF7o,3109
+mysql/connector/django/compiler.py,sha256=IdQvH_1uSmQglAdKFf1P5WZfyk5l-_UUdqwzyNbTiKM,1555
+mysql/connector/django/creation.py,sha256=mnDr7cM5okDZTTRN2BZrCIGn6uvuZULcgg2LP8q8Gec,1457
+mysql/connector/django/features.py,sha256=cNMhl2Vqh1Nvue7T_acCHZ9R3ydVBPzVwVHjPvl4I64,1911
+mysql/connector/django/introspection.py,sha256=PkRctkBPIUO2LtVZZIl1HiFs8MNvb2TSvYRcToiLj4g,16150
+mysql/connector/django/operations.py,sha256=8L89HG3SQWFXLegNTXZt8ZIQRSAD3IBdObv4yBMl7bw,3457
+mysql/connector/django/schema.py,sha256=n9V_85-PMWs1niGTH8Pj3ctwgYm1cuAbdpt-Hn4dRz0,1896
+mysql/connector/django/validation.py,sha256=x3I0YVpsGktdMf-C5pSn-AgufnwacS1-zvBo4tSWQ0w,1461
+mysql/connector/errorcode.py,sha256=nvEAJ4eoBTExUnPVlLhwRKyivUUajO_SSHcSE7j4emM,69341
+mysql/connector/errors.py,sha256=GGAkqJMsNrcZj6RFD_z2yk60q_Erq52fxXRxen7nFFc,10372
+mysql/connector/locales/__init__.py,sha256=H08wYvK1J2dfeJ-bBhFJh_w5LV2WAAl1FohCkXvL284,2721
+mysql/connector/locales/__pycache__/__init__.cpython-38.pyc,,
+mysql/connector/locales/eng/__init__.py,sha256=gdSd1bVd0dn4lcJxV3kOS9CimOfjmnybT_aUY31U4V8,1444
+mysql/connector/locales/eng/__pycache__/__init__.cpython-38.pyc,,
+mysql/connector/locales/eng/__pycache__/client_error.cpython-38.pyc,,
+mysql/connector/locales/eng/client_error.py,sha256=OdWtHapOteyIq95KzTHeQVYJukwgJsFsIJ3M7p5HiEg,7328
+mysql/connector/network.py,sha256=ZF14A6YZ4lFedsgZR86KpToBC02VzEbFWQ0lqLD7eL0,22076
+mysql/connector/optionfiles.py,sha256=EV4xWpZpXSvDFay_LZNO6GjRyu7Hy3AcHkH-o5sS_Ws,13344
+mysql/connector/pooling.py,sha256=YJk7dPh8I_6GXz_zeTmjsY5mB118Fcz2ILSo_aGrJGA,13104
+mysql/connector/protocol.py,sha256=V7X_pInMjRR-HrtYJG0Sj5Bt50JoWKEMjt7RhN039XE,31862
+mysql/connector/utils.py,sha256=ymYCEpcSF2T-H0hxhVdacbPy6P6LchFZC5v5Yz1hOCg,19649
+mysql/connector/version.py,sha256=RLuZMoi6cd1x1VMq3c9pJm8t6uUhnWXPWJMu7EPC3OI,1872
+mysql/vendor/libcrypto.so.1.1,sha256=ZzI97bgl0tO8CGbQOh1n2sgVkxKMqV6twLY-ir63Ojg,2813064
+mysql/vendor/libssl.so.1.1,sha256=edrR4Z6I8ABWTVjpPZ682UpnNYzpc1tnb9-JUK42vQ8,683784
+mysql/vendor/plugin/authentication_kerberos_client.so,sha256=lErOa8cagy-HNPI5-Y3vm163xWYS6ZGxJg3HmyoQQoQ,10449936
+mysql/vendor/plugin/authentication_ldap_sasl_client.so,sha256=wwMCmOFlVc-C9B11CXzJayxNvKyjAVDDNyZJSf0idbI,10072672
+mysql/vendor/plugin/authentication_oci_client.so,sha256=2u9LqWIZmDY4aydz4VmzFsNd7gZ_bz5dVxPBqsYLITI,1219456
+mysql/vendor/private/libcom_err.so.3,sha256=9RIFg_yaY3qVQ38Df14B7QOtehgM22KRvtj7ozG_O3M,28830
+mysql/vendor/private/libcom_err.so.3.0,sha256=9RIFg_yaY3qVQ38Df14B7QOtehgM22KRvtj7ozG_O3M,28830
+mysql/vendor/private/libcrypto.so.1.1,sha256=ZzI97bgl0tO8CGbQOh1n2sgVkxKMqV6twLY-ir63Ojg,2813064
+mysql/vendor/private/libgssapi_krb5.so.2,sha256=Xdn_o_XXK9Z5dW7vVKDzxXvzk7aMT1_HBvGbQsX3Aw0,1829877
+mysql/vendor/private/libgssapi_krb5.so.2.2,sha256=Xdn_o_XXK9Z5dW7vVKDzxXvzk7aMT1_HBvGbQsX3Aw0,1829877
+mysql/vendor/private/libk5crypto.so.3,sha256=zYuaOe3o3_M-62aVPGQZLrUZXg2Hk_2cLf20yWx6DDQ,872153
+mysql/vendor/private/libk5crypto.so.3.1,sha256=zYuaOe3o3_M-62aVPGQZLrUZXg2Hk_2cLf20yWx6DDQ,872153
+mysql/vendor/private/libkrb5.so.3,sha256=INKacqWvHACd9lvetLvZNnTKyP2eOnORmfitoQe6fn8,3301310
+mysql/vendor/private/libkrb5.so.3.3,sha256=INKacqWvHACd9lvetLvZNnTKyP2eOnORmfitoQe6fn8,3301310
+mysql/vendor/private/libkrb5support.so.0,sha256=fLLUXJ40EQRzQigReLz6De1NaG8RDRyQsctnPow3L2o,180560
+mysql/vendor/private/libkrb5support.so.0.1,sha256=fLLUXJ40EQRzQigReLz6De1NaG8RDRyQsctnPow3L2o,180560
+mysql/vendor/private/libsasl2.so.3,sha256=C3vHBqgco57PpynFX8CxvE0NIinxxDc9FA7nNIqI5v8,126105
+mysql/vendor/private/libsasl2.so.3.0.0,sha256=C3vHBqgco57PpynFX8CxvE0NIinxxDc9FA7nNIqI5v8,126105
+mysql/vendor/private/libssl.so.1.1,sha256=edrR4Z6I8ABWTVjpPZ682UpnNYzpc1tnb9-JUK42vQ8,683784
+mysql/vendor/private/sasl2/libanonymous.so,sha256=FuhkS-nBD0U4MLBomMkEpWC8g_ETRE761-njiNjdmbI,19825
+mysql/vendor/private/sasl2/libanonymous.so.3,sha256=FuhkS-nBD0U4MLBomMkEpWC8g_ETRE761-njiNjdmbI,19825
+mysql/vendor/private/sasl2/libanonymous.so.3.0.0,sha256=FuhkS-nBD0U4MLBomMkEpWC8g_ETRE761-njiNjdmbI,19825
+mysql/vendor/private/sasl2/libcrammd5.so,sha256=CjhLrfkseFOhJyGbV12yscXgSbG8sWSMlaBNAVGJbWY,23457
+mysql/vendor/private/sasl2/libcrammd5.so.3,sha256=CjhLrfkseFOhJyGbV12yscXgSbG8sWSMlaBNAVGJbWY,23457
+mysql/vendor/private/sasl2/libcrammd5.so.3.0.0,sha256=CjhLrfkseFOhJyGbV12yscXgSbG8sWSMlaBNAVGJbWY,23457
+mysql/vendor/private/sasl2/libdigestmd5.so,sha256=S1o7fGh7nA8hHXTCvnds7S-HJD4NUkmYPkmzkbSRm-Y,59523
+mysql/vendor/private/sasl2/libdigestmd5.so.3,sha256=S1o7fGh7nA8hHXTCvnds7S-HJD4NUkmYPkmzkbSRm-Y,59523
+mysql/vendor/private/sasl2/libdigestmd5.so.3.0.0,sha256=S1o7fGh7nA8hHXTCvnds7S-HJD4NUkmYPkmzkbSRm-Y,59523
+mysql/vendor/private/sasl2/libgs2.so,sha256=bBax2n7FmkKRpitpbCBGRm6iIH5h_0C0tJKjNE4KS64,35215
+mysql/vendor/private/sasl2/libgs2.so.3,sha256=bBax2n7FmkKRpitpbCBGRm6iIH5h_0C0tJKjNE4KS64,35215
+mysql/vendor/private/sasl2/libgs2.so.3.0.0,sha256=bBax2n7FmkKRpitpbCBGRm6iIH5h_0C0tJKjNE4KS64,35215
+mysql/vendor/private/sasl2/libgssapiv2.so,sha256=tsF3HzycdVu9KMillhJQKvyFRgVfX87t-3DFxxu7WAE,37894
+mysql/vendor/private/sasl2/libgssapiv2.so.3,sha256=tsF3HzycdVu9KMillhJQKvyFRgVfX87t-3DFxxu7WAE,37894
+mysql/vendor/private/sasl2/libgssapiv2.so.3.0.0,sha256=tsF3HzycdVu9KMillhJQKvyFRgVfX87t-3DFxxu7WAE,37894
+mysql/vendor/private/sasl2/libplain.so,sha256=-BhjxLzisimvo7lWwl0WhTt4zzDE6gxp-9uNYVYyEtI,20493
+mysql/vendor/private/sasl2/libplain.so.3,sha256=-BhjxLzisimvo7lWwl0WhTt4zzDE6gxp-9uNYVYyEtI,20493
+mysql/vendor/private/sasl2/libplain.so.3.0.0,sha256=-BhjxLzisimvo7lWwl0WhTt4zzDE6gxp-9uNYVYyEtI,20493
+mysql/vendor/private/sasl2/libscram.so,sha256=CWIJPwfCHcJN6FJJwzehs7PWbcycr5EqAHBwpicvbC8,42511
+mysql/vendor/private/sasl2/libscram.so.3,sha256=CWIJPwfCHcJN6FJJwzehs7PWbcycr5EqAHBwpicvbC8,42511
+mysql/vendor/private/sasl2/libscram.so.3.0.0,sha256=CWIJPwfCHcJN6FJJwzehs7PWbcycr5EqAHBwpicvbC8,42511
+mysql_connector_python-8.0.28.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+mysql_connector_python-8.0.28.dist-info/LICENSE.txt,sha256=9Net-jLYonTqcz-jJr3zdmW4Ym2woutz5ysdIvHUbXQ,141747
+mysql_connector_python-8.0.28.dist-info/METADATA,sha256=YT-vADaZRhnL4F6yrNvzY7mvKfBie7RLrNfInGdcKf4,1792
+mysql_connector_python-8.0.28.dist-info/RECORD,,
+mysql_connector_python-8.0.28.dist-info/WHEEL,sha256=lZtnO0mZxqr35T145kHCmb-YnzMXkVWJhJ6jnbb7-XE,103
+mysql_connector_python-8.0.28.dist-info/top_level.txt,sha256=ENfchIvxULFOQVHrVTCEURaZRfdXyBiO_V27EMhrbgY,40
+mysqlx/__init__.py,sha256=zA20jeZvgGWiaOHBwaMm6pzPxIiXpqVcRGcbcwoyVDo,32355
+mysqlx/__pycache__/__init__.cpython-38.pyc,,
+mysqlx/__pycache__/authentication.cpython-38.pyc,,
+mysqlx/__pycache__/charsets.cpython-38.pyc,,
+mysqlx/__pycache__/connection.cpython-38.pyc,,
+mysqlx/__pycache__/constants.cpython-38.pyc,,
+mysqlx/__pycache__/crud.cpython-38.pyc,,
+mysqlx/__pycache__/dbdoc.cpython-38.pyc,,
+mysqlx/__pycache__/errorcode.cpython-38.pyc,,
+mysqlx/__pycache__/errors.cpython-38.pyc,,
+mysqlx/__pycache__/expr.cpython-38.pyc,,
+mysqlx/__pycache__/helpers.cpython-38.pyc,,
+mysqlx/__pycache__/protocol.cpython-38.pyc,,
+mysqlx/__pycache__/result.cpython-38.pyc,,
+mysqlx/__pycache__/statement.cpython-38.pyc,,
+mysqlx/authentication.py,sha256=Oe5T6eH9PhcD358dlWHqADovuieC34JFLkOZBOwek2M,5431
+mysqlx/charsets.py,sha256=8veRUPKYm-AaQDxU542kFdTCDuqX8myjlt9Z1TsVW3I,15499
+mysqlx/connection.py,sha256=CIYJk2YSetJHCN5bTA-Uv_mEskGTREC2QiTY2WN4W4g,90815
+mysqlx/constants.py,sha256=dnKkxvQUFn5ont_oSr5-TgRMKBcrwpecg2JRhSsV59Y,15428
+mysqlx/crud.py,sha256=UEM0Kfk-662aS1XRMrswv9IG9UOnZSZIWvs84dLyqjQ,25519
+mysqlx/dbdoc.py,sha256=tkJBZBNqD9yujzrKIOkYhT1yyXWeORVd7CwuFswLcTA,3868
+mysqlx/errorcode.py,sha256=nvEAJ4eoBTExUnPVlLhwRKyivUUajO_SSHcSE7j4emM,69341
+mysqlx/errors.py,sha256=CVMbWRYMptM1H3_xBg6uKtQCAVg4TxxMwFkbx-izVO4,8789
+mysqlx/expr.py,sha256=l2PSVRSOgtSEZQIqloLTrQzuPAqVIvPQPKOLzwmrhjA,48259
+mysqlx/helpers.py,sha256=11JbBs1G3eaHe14-bLQoigEpfixvhKSqzciCrwIujGM,7274
+mysqlx/locales/__init__.py,sha256=1fK6KuOCErHt9QyuNg03DwEaKk4my8wguXL4bk5J2CI,2722
+mysqlx/locales/__pycache__/__init__.cpython-38.pyc,,
+mysqlx/locales/eng/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+mysqlx/locales/eng/__pycache__/__init__.cpython-38.pyc,,
+mysqlx/locales/eng/__pycache__/client_error.cpython-38.pyc,,
+mysqlx/locales/eng/client_error.py,sha256=OdWtHapOteyIq95KzTHeQVYJukwgJsFsIJ3M7p5HiEg,7328
+mysqlx/protobuf/__init__.py,sha256=9J-wfpG_ViRtyHaJzw0KOEjlfLENGmK_lJe-t_62L-s,18565
+mysqlx/protobuf/__pycache__/__init__.cpython-38.pyc,,
+mysqlx/protobuf/__pycache__/mysqlx_connection_pb2.cpython-38.pyc,,
+mysqlx/protobuf/__pycache__/mysqlx_crud_pb2.cpython-38.pyc,,
+mysqlx/protobuf/__pycache__/mysqlx_cursor_pb2.cpython-38.pyc,,
+mysqlx/protobuf/__pycache__/mysqlx_datatypes_pb2.cpython-38.pyc,,
+mysqlx/protobuf/__pycache__/mysqlx_expect_pb2.cpython-38.pyc,,
+mysqlx/protobuf/__pycache__/mysqlx_expr_pb2.cpython-38.pyc,,
+mysqlx/protobuf/__pycache__/mysqlx_notice_pb2.cpython-38.pyc,,
+mysqlx/protobuf/__pycache__/mysqlx_pb2.cpython-38.pyc,,
+mysqlx/protobuf/__pycache__/mysqlx_prepare_pb2.cpython-38.pyc,,
+mysqlx/protobuf/__pycache__/mysqlx_resultset_pb2.cpython-38.pyc,,
+mysqlx/protobuf/__pycache__/mysqlx_session_pb2.cpython-38.pyc,,
+mysqlx/protobuf/__pycache__/mysqlx_sql_pb2.cpython-38.pyc,,
+mysqlx/protobuf/mysqlx_connection_pb2.py,sha256=cey5o0zQDedc7CDP8K5AO7W3c2WLJOteDBRQ4nrsrJ4,10995
+mysqlx/protobuf/mysqlx_crud_pb2.py,sha256=UGgJi2c29DuPUsnP5bS4M6BLoA3UMTQTH-JJ8-YknTg,50572
+mysqlx/protobuf/mysqlx_cursor_pb2.py,sha256=_iJJC7T7HGTcxL2DJ2NfEu0MUjJpgBOskoIgzMSWWsM,9301
+mysqlx/protobuf/mysqlx_datatypes_pb2.py,sha256=8R2iE2l_goqLeLuHP5dEcGoVFOXetl9SqsXwnRENwVo,18174
+mysqlx/protobuf/mysqlx_expect_pb2.py,sha256=DI4VxEQ_4R8PqUrqcPNlMGTgeAiNUyJsh7-malHnpWg,9219
+mysqlx/protobuf/mysqlx_expr_pb2.py,sha256=MIuZ6KrlH5TMc5HnKeCeR8P9ixBeXzhHQO6QnddGaW4,23037
+mysqlx/protobuf/mysqlx_notice_pb2.py,sha256=SHEPXFSgiGBCsHibnrsbtMgdtZIY8Xz1cJB_WjoxsUc,18483
+mysqlx/protobuf/mysqlx_pb2.py,sha256=Fdc4Mifat88WQUBaoRg_VfCtIaF2ygjOLInk7GdpIyc,14782
+mysqlx/protobuf/mysqlx_prepare_pb2.py,sha256=fCCMkavQSA3c-85XDL9ptKAI4vioNCd1uE_2ESgrC0I,12414
+mysqlx/protobuf/mysqlx_resultset_pb2.py,sha256=08VWFwKuxbxqfBHbUirFyv_dLk8AQ8cchcHCUUDci1o,16158
+mysqlx/protobuf/mysqlx_session_pb2.py,sha256=Y8228874A9JtkK5YmIuAfeyn1ywI4BKOG1mjuYY7bz0,8570
+mysqlx/protobuf/mysqlx_sql_pb2.py,sha256=X9wgnh-_2acNRLgE8IHNAOLs_sv1SHvbMthhBIw9cTc,5680
+mysqlx/protocol.py,sha256=6VEBMYFUDIvtScLMW1BvHV-1CCwCL6pO2H1S2JjWbNo,39941
+mysqlx/result.py,sha256=nlLW-LG3GlaITvZzh9gTXBJDO1xhOZYC8AIr3INF0LY,31094
+mysqlx/statement.py,sha256=KvjHJurNVi0QuAQiZN6q8Vv5SSwkEsE-5WhIivWvz3Q,46681
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql_connector_python-8.0.28.dist-info/WHEEL b/monEnvTP/lib/python3.8/site-packages/mysql_connector_python-8.0.28.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..7ae20a049f3296c665c3bc701f4e6f33be833438
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql_connector_python-8.0.28.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.0)
+Root-Is-Purelib: false
+Tag: cp38-cp38-linux_x86_64
+
diff --git a/monEnvTP/lib/python3.8/site-packages/mysql_connector_python-8.0.28.dist-info/top_level.txt b/monEnvTP/lib/python3.8/site-packages/mysql_connector_python-8.0.28.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..d05e3442a4c853f3cdfdd842fc684693aa2284c9
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysql_connector_python-8.0.28.dist-info/top_level.txt
@@ -0,0 +1,4 @@
+_mysql_connector
+_mysqlxpb
+mysql
+mysqlx
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/__init__.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..8d9254cbd7be2c1ec2a10c50c15ac6c683e18882
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/__init__.py
@@ -0,0 +1,809 @@
+# Copyright (c) 2016, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""MySQL X DevAPI Python implementation"""
+
+import re
+import json
+import logging
+import ssl
+
+from urllib.parse import parse_qsl, unquote, urlparse
+
+try:
+ from json.decoder import JSONDecodeError
+except ImportError:
+ JSONDecodeError = ValueError
+
+from .connection import Client, Session
+from .constants import (Auth, LockContention, OPENSSL_CS_NAMES, SSLMode,
+ TLS_VERSIONS, TLS_CIPHER_SUITES)
+from .crud import Schema, Collection, Table, View
+from .dbdoc import DbDoc
+# pylint: disable=W0622
+from .errors import (Error, InterfaceError, DatabaseError, NotSupportedError,
+ DataError, IntegrityError, ProgrammingError,
+ OperationalError, InternalError, PoolError, TimeoutError)
+from .result import (Column, Row, Result, BufferingResult, RowResult,
+ SqlResult, DocResult, ColumnType)
+from .statement import (Statement, FilterableStatement, SqlStatement,
+ FindStatement, AddStatement, RemoveStatement,
+ ModifyStatement, SelectStatement, InsertStatement,
+ DeleteStatement, UpdateStatement,
+ CreateCollectionIndexStatement, Expr, ReadStatement,
+ WriteStatement)
+from .expr import ExprParser as expr
+
+
+_SPLIT_RE = re.compile(r",(?![^\(\)]*\))")
+_PRIORITY_RE = re.compile(r"^\(address=(.+),priority=(\d+)\)$", re.VERBOSE)
+_ROUTER_RE = re.compile(r"^\(address=(.+)[,]*\)$", re.VERBOSE)
+_URI_SCHEME_RE = re.compile(r"^([a-zA-Z][a-zA-Z0-9+\-.]+)://(.*)")
+_SSL_OPTS = ["ssl-cert", "ssl-ca", "ssl-key", "ssl-crl", "tls-versions",
+ "tls-ciphersuites"]
+_SESS_OPTS = _SSL_OPTS + ["user", "password", "schema", "host", "port",
+ "routers", "socket", "ssl-mode", "auth", "use-pure",
+ "connect-timeout", "connection-attributes",
+ "compression", "compression-algorithms", "dns-srv"]
+
+logging.getLogger(__name__).addHandler(logging.NullHandler())
+
+DUPLICATED_IN_LIST_ERROR = (
+ "The '{list}' list must not contain repeated values, the value "
+ "'{value}' is duplicated.")
+
+TLS_VERSION_ERROR = ("The given tls-version: '{}' is not recognized as a "
+ "valid TLS protocol version (should be one of {}).")
+
+TLS_VERSION_DEPRECATED_ERROR = ("The given tls_version: '{}' are no longer "
+ "allowed (should be one of {}).")
+
+TLS_VER_NO_SUPPORTED = ("No supported TLS protocol version found in the "
+ "'tls-versions' list '{}'. ")
+
+TLS_VERSIONS = ["TLSv1.2", "TLSv1.3"]
+
+DEPRECATED_TLS_VERSIONS = ["TLSv1", "TLSv1.1"]
+
+TLS_V1_3_SUPPORTED = False
+if hasattr(ssl, "HAS_TLSv1_3") and ssl.HAS_TLSv1_3:
+ TLS_V1_3_SUPPORTED = True
+
+
+def _parse_address_list(path):
+ """Parses a list of host, port pairs
+
+ Args:
+ path: String containing a list of routers or just router
+
+ Returns:
+ Returns a dict with parsed values of host, port and priority if
+ specified.
+ """
+ path = path.replace(" ", "")
+ array = not("," not in path and path.count(":") > 1
+ and path.count("[") == 1) and path.startswith("[") \
+ and path.endswith("]")
+
+ routers = []
+ address_list = _SPLIT_RE.split(path[1:-1] if array else path)
+ priority_count = 0
+ for address in address_list:
+ router = {}
+
+ match = _PRIORITY_RE.match(address)
+ if match:
+ address = match.group(1)
+ router["priority"] = int(match.group(2))
+ priority_count += 1
+ else:
+ match = _ROUTER_RE.match(address)
+ if match:
+ address = match.group(1)
+ router["priority"] = 100
+
+ match = urlparse("//{0}".format(address))
+ if not match.hostname:
+ raise InterfaceError("Invalid address: {0}".format(address))
+
+ try:
+ router.update(host=match.hostname, port=match.port)
+ except ValueError as err:
+ raise ProgrammingError("Invalid URI: {0}".format(err), 4002)
+
+ routers.append(router)
+
+ if 0 < priority_count < len(address_list):
+ raise ProgrammingError("You must either assign no priority to any "
+ "of the routers or give a priority for "
+ "every router", 4000)
+
+ return {"routers": routers} if array else routers[0]
+
+
+def _parse_connection_uri(uri):
+ """Parses the connection string and returns a dictionary with the
+ connection settings.
+
+ Args:
+ uri: mysqlx URI scheme to connect to a MySQL server/farm.
+
+ Returns:
+ Returns a dict with parsed values of credentials and address of the
+ MySQL server/farm.
+
+ Raises:
+ :class:`mysqlx.InterfaceError`: If contains a duplicate option or
+ URI scheme is not valid.
+ """
+ settings = {"schema": ""}
+
+ match = _URI_SCHEME_RE.match(uri)
+ scheme, uri = match.groups() if match else ("mysqlx", uri)
+
+ if scheme not in ("mysqlx", "mysqlx+srv"):
+ raise InterfaceError("Scheme '{0}' is not valid".format(scheme))
+
+ if scheme == "mysqlx+srv":
+ settings["dns-srv"] = True
+
+ userinfo, tmp = uri.partition("@")[::2]
+ host, query_str = tmp.partition("?")[::2]
+
+ pos = host.rfind("/")
+ if host[pos:].find(")") == -1 and pos > 0:
+ host, settings["schema"] = host.rsplit("/", 1)
+ host = host.strip("()")
+
+ if not host or not userinfo or ":" not in userinfo:
+ raise InterfaceError("Malformed URI '{0}'".format(uri))
+ user, password = userinfo.split(":", 1)
+ settings["user"], settings["password"] = unquote(user), unquote(password)
+
+ if host.startswith(("/", "..", ".")):
+ settings["socket"] = unquote(host)
+ elif host.startswith("\\."):
+ raise InterfaceError("Windows Pipe is not supported")
+ else:
+ settings.update(_parse_address_list(host))
+
+ invalid_options = ("user", "password", "dns-srv")
+ for key, val in parse_qsl(query_str, True):
+ opt = key.replace("_", "-").lower()
+ if opt in invalid_options:
+ raise InterfaceError("Invalid option: '{0}'".format(key))
+ if opt in settings:
+ raise InterfaceError("Duplicate option: '{0}'".format(key))
+ if opt in _SSL_OPTS:
+ settings[opt] = unquote(val.strip("()"))
+ else:
+ val_str = val.lower()
+ if val_str in ("1", "true"):
+ settings[opt] = True
+ elif val_str in ("0", "false"):
+ settings[opt] = False
+ else:
+ settings[opt] = val_str
+ return settings
+
+
+def _validate_settings(settings):
+ """Validates the settings to be passed to a Session object
+ the port values are converted to int if specified or set to 33060
+ otherwise. The priority values for each router is converted to int
+ if specified.
+
+ Args:
+ settings: dict containing connection settings.
+
+ Raises:
+ :class:`mysqlx.InterfaceError`: On any configuration issue.
+ """
+ invalid_opts = set(settings.keys()).difference(_SESS_OPTS)
+ if invalid_opts:
+ raise InterfaceError("Invalid option(s): '{0}'"
+ "".format("', '".join(invalid_opts)))
+
+ if "routers" in settings:
+ for router in settings["routers"]:
+ _validate_hosts(router, 33060)
+ elif "host" in settings:
+ _validate_hosts(settings)
+
+ if "ssl-mode" in settings:
+ try:
+ settings["ssl-mode"] = settings["ssl-mode"].lower()
+ SSLMode.index(settings["ssl-mode"])
+ except (AttributeError, ValueError):
+ raise InterfaceError("Invalid SSL Mode '{0}'"
+ "".format(settings["ssl-mode"]))
+ if settings["ssl-mode"] == SSLMode.DISABLED and \
+ any(key in settings for key in _SSL_OPTS):
+ raise InterfaceError("SSL options used with ssl-mode 'disabled'")
+
+ if "ssl-crl" in settings and not "ssl-ca" in settings:
+ raise InterfaceError("CA Certificate not provided")
+ if "ssl-key" in settings and not "ssl-cert" in settings:
+ raise InterfaceError("Client Certificate not provided")
+
+ if not "ssl-ca" in settings and settings.get("ssl-mode") \
+ in [SSLMode.VERIFY_IDENTITY, SSLMode.VERIFY_CA]:
+ raise InterfaceError("Cannot verify Server without CA")
+ if "ssl-ca" in settings and settings.get("ssl-mode") \
+ not in [SSLMode.VERIFY_IDENTITY, SSLMode.VERIFY_CA]:
+ raise InterfaceError("Must verify Server if CA is provided")
+
+ if "auth" in settings:
+ try:
+ settings["auth"] = settings["auth"].lower()
+ Auth.index(settings["auth"])
+ except (AttributeError, ValueError):
+ raise InterfaceError("Invalid Auth '{0}'".format(settings["auth"]))
+
+ if "compression" in settings:
+ compression = settings["compression"].lower().strip()
+ if compression not in ("preferred", "required", "disabled"):
+ raise InterfaceError(
+ "The connection property 'compression' acceptable values are: "
+ "'preferred', 'required', or 'disabled'. The value '{0}' is "
+ "not acceptable".format(settings["compression"]))
+ settings["compression"] = compression
+
+ if "compression-algorithms" in settings:
+ if isinstance(settings["compression-algorithms"], str):
+ compression_algorithms = \
+ settings["compression-algorithms"].strip().strip("[]")
+ if compression_algorithms:
+ settings["compression-algorithms"] = \
+ compression_algorithms.split(",")
+ else:
+ settings["compression-algorithms"] = None
+ elif not isinstance(settings["compression-algorithms"], (list, tuple)):
+ raise InterfaceError("Invalid type of the connection property "
+ "'compression-algorithms'")
+ if settings.get("compression") == "disabled":
+ settings["compression-algorithms"] = None
+
+ if "connection-attributes" in settings:
+ _validate_connection_attributes(settings)
+
+ if "connect-timeout" in settings:
+ try:
+ if isinstance(settings["connect-timeout"], str):
+ settings["connect-timeout"] = int(settings["connect-timeout"])
+ if not isinstance(settings["connect-timeout"], int) \
+ or settings["connect-timeout"] < 0:
+ raise ValueError
+ except ValueError:
+ raise TypeError("The connection timeout value must be a positive "
+ "integer (including 0)")
+
+ if "dns-srv" in settings:
+ if not isinstance(settings["dns-srv"], bool):
+ raise InterfaceError("The value of 'dns-srv' must be a boolean")
+ if settings.get("socket"):
+ raise InterfaceError("Using Unix domain sockets with DNS SRV "
+ "lookup is not allowed")
+ if settings.get("port"):
+ raise InterfaceError("Specifying a port number with DNS SRV "
+ "lookup is not allowed")
+ if settings.get("routers"):
+ raise InterfaceError("Specifying multiple hostnames with DNS "
+ "SRV look up is not allowed")
+ elif "host" in settings and not settings.get("port"):
+ settings["port"] = 33060
+
+ if "tls-versions" in settings:
+ _validate_tls_versions(settings)
+
+ if "tls-ciphersuites" in settings:
+ _validate_tls_ciphersuites(settings)
+
+
+def _validate_hosts(settings, default_port=None):
+ """Validate hosts.
+
+ Args:
+ settings (dict): Settings dictionary.
+ default_port (int): Default connection port.
+
+ Raises:
+ :class:`mysqlx.InterfaceError`: If priority or port are invalid.
+ """
+ if "priority" in settings and settings["priority"]:
+ try:
+ settings["priority"] = int(settings["priority"])
+ if settings["priority"] < 0 or settings["priority"] > 100:
+ raise ProgrammingError("Invalid priority value, "
+ "must be between 0 and 100", 4007)
+ except NameError:
+ raise ProgrammingError("Invalid priority", 4007)
+ except ValueError:
+ raise ProgrammingError(
+ "Invalid priority: {}".format(settings["priority"]), 4007)
+
+ if "port" in settings and settings["port"]:
+ try:
+ settings["port"] = int(settings["port"])
+ except NameError:
+ raise InterfaceError("Invalid port")
+ elif "host" in settings and default_port:
+ settings["port"] = default_port
+
+
+def _validate_connection_attributes(settings):
+ """Validate connection-attributes.
+
+ Args:
+ settings (dict): Settings dictionary.
+
+ Raises:
+ :class:`mysqlx.InterfaceError`: If attribute name or value exceeds size.
+ """
+ attributes = {}
+ if "connection-attributes" not in settings:
+ return
+
+ conn_attrs = settings["connection-attributes"]
+
+ if isinstance(conn_attrs, str):
+ if conn_attrs == "":
+ settings["connection-attributes"] = {}
+ return
+ if not (conn_attrs.startswith("[") and conn_attrs.endswith("]")) and \
+ not conn_attrs in ['False', "false", "True", "true"]:
+ raise InterfaceError("The value of 'connection-attributes' must "
+ "be a boolean or a list of key-value pairs, "
+ "found: '{}'".format(conn_attrs))
+ elif conn_attrs in ['False', "false", "True", "true"]:
+ if conn_attrs in ['False', "false"]:
+ settings["connection-attributes"] = False
+ else:
+ settings["connection-attributes"] = {}
+ return
+ else:
+ conn_attributes = conn_attrs[1:-1].split(",")
+ for attr in conn_attributes:
+ if attr == "":
+ continue
+ attr_name_val = attr.split('=')
+ attr_name = attr_name_val[0]
+ attr_val = attr_name_val[1] if len(attr_name_val) > 1 else ""
+ if attr_name in attributes:
+ raise InterfaceError("Duplicate key '{}' used in "
+ "connection-attributes"
+ "".format(attr_name))
+ else:
+ attributes[attr_name] = attr_val
+ elif isinstance(conn_attrs, dict):
+ for attr_name in conn_attrs:
+ attr_value = conn_attrs[attr_name]
+ if not isinstance(attr_value, str):
+ attr_value = repr(attr_value)
+ attributes[attr_name] = attr_value
+ elif isinstance(conn_attrs, bool) or conn_attrs in [0, 1]:
+ if conn_attrs:
+ settings["connection-attributes"] = {}
+ else:
+ settings["connection-attributes"] = False
+ return
+ elif isinstance(conn_attrs, set):
+ for attr_name in conn_attrs:
+ attributes[attr_name] = ""
+ elif isinstance(conn_attrs, list):
+ for attr in conn_attrs:
+ if attr == "":
+ continue
+ attr_name_val = attr.split('=')
+ attr_name = attr_name_val[0]
+ attr_val = attr_name_val[1] if len(attr_name_val) > 1 else ""
+ if attr_name in attributes:
+ raise InterfaceError("Duplicate key '{}' used in "
+ "connection-attributes"
+ "".format(attr_name))
+ else:
+ attributes[attr_name] = attr_val
+ elif not isinstance(conn_attrs, bool):
+ raise InterfaceError("connection-attributes must be Boolean or a list "
+ "of key-value pairs, found: '{}'"
+ "".format(conn_attrs))
+
+ if attributes:
+ for attr_name in attributes:
+ attr_value = attributes[attr_name]
+
+ # Validate name type
+ if not isinstance(attr_name, str):
+ raise InterfaceError("Attribute name '{}' must be a string"
+ "type".format(attr_name))
+ # Validate attribute name limit 32 characters
+ if len(attr_name) > 32:
+ raise InterfaceError("Attribute name '{}' exceeds 32 "
+ "characters limit size".format(attr_name))
+ # Validate names in connection-attributes cannot start with "_"
+ if attr_name.startswith("_"):
+ raise InterfaceError("Key names in connection-attributes "
+ "cannot start with '_', found: '{}'"
+ "".format(attr_name))
+
+ # Validate value type
+ if not isinstance(attr_value, str):
+ raise InterfaceError("Attribute '{}' value: '{}' must "
+ "be a string type"
+ "".format(attr_name, attr_value))
+ # Validate attribute value limit 1024 characters
+ if len(attr_value) > 1024:
+ raise InterfaceError("Attribute '{}' value: '{}' "
+ "exceeds 1024 characters limit size"
+ "".format(attr_name, attr_value))
+
+ settings["connection-attributes"] = attributes
+
+
+def _validate_tls_versions(settings):
+ """Validate tls-versions.
+
+ Args:
+ settings (dict): Settings dictionary.
+
+ Raises:
+ :class:`mysqlx.InterfaceError`: If tls-versions name is not valid.
+ """
+ tls_versions = []
+ if "tls-versions" not in settings:
+ return
+
+ tls_versions_settings = settings["tls-versions"]
+
+ if isinstance(tls_versions_settings, str):
+ if not (tls_versions_settings.startswith("[") and
+ tls_versions_settings.endswith("]")):
+ raise InterfaceError("tls-versions must be a list, found: '{}'"
+ "".format(tls_versions_settings))
+ else:
+ tls_vers = tls_versions_settings[1:-1].split(",")
+ for tls_ver in tls_vers:
+ tls_version = tls_ver.strip()
+ if tls_version == "":
+ continue
+ else:
+ if tls_version in tls_versions:
+ raise InterfaceError(
+ DUPLICATED_IN_LIST_ERROR.format(
+ list="tls_versions", value=tls_version))
+ tls_versions.append(tls_version)
+ elif isinstance(tls_versions_settings, list):
+ if not tls_versions_settings:
+ raise InterfaceError("At least one TLS protocol version must be "
+ "specified in 'tls-versions' list.")
+ for tls_ver in tls_versions_settings:
+ if tls_ver in tls_versions:
+ raise InterfaceError(
+ DUPLICATED_IN_LIST_ERROR.format(list="tls_versions",
+ value=tls_ver))
+ else:
+ tls_versions.append(tls_ver)
+
+ elif isinstance(tls_versions_settings, set):
+ for tls_ver in tls_versions_settings:
+ tls_versions.append(tls_ver)
+ else:
+ raise InterfaceError("tls-versions should be a list with one or more "
+ "of versions in {}. found: '{}'"
+ "".format(", ".join(TLS_VERSIONS), tls_versions))
+
+ if not tls_versions:
+ raise InterfaceError("At least one TLS protocol version must be "
+ "specified in 'tls-versions' list.")
+
+ use_tls_versions = []
+ deprecated_tls_versions = []
+ not_tls_versions = []
+ for tls_ver in tls_versions:
+ if tls_ver in TLS_VERSIONS:
+ use_tls_versions.append(tls_ver)
+ if tls_ver in DEPRECATED_TLS_VERSIONS:
+ deprecated_tls_versions.append(tls_ver)
+ else:
+ not_tls_versions.append(tls_ver)
+
+ if use_tls_versions:
+ if use_tls_versions == ["TLSv1.3"] and not TLS_V1_3_SUPPORTED:
+ raise NotSupportedError(
+ TLS_VER_NO_SUPPORTED.format(tls_versions, TLS_VERSIONS))
+ use_tls_versions.sort()
+ settings["tls-versions"] = use_tls_versions
+ elif deprecated_tls_versions:
+ raise NotSupportedError(
+ TLS_VERSION_DEPRECATED_ERROR.format(deprecated_tls_versions,
+ TLS_VERSIONS))
+ elif not_tls_versions:
+ raise InterfaceError(
+ TLS_VERSION_ERROR.format(tls_ver, TLS_VERSIONS))
+
+
+def _validate_tls_ciphersuites(settings):
+ """Validate tls-ciphersuites.
+
+ Args:
+ settings (dict): Settings dictionary.
+
+ Raises:
+ :class:`mysqlx.InterfaceError`: If tls-ciphersuites name is not valid.
+ """
+ tls_ciphersuites = []
+ if "tls-ciphersuites" not in settings:
+ return
+
+ tls_ciphersuites_settings = settings["tls-ciphersuites"]
+
+ if isinstance(tls_ciphersuites_settings, str):
+ if not (tls_ciphersuites_settings.startswith("[") and
+ tls_ciphersuites_settings.endswith("]")):
+ raise InterfaceError("tls-ciphersuites must be a list, found: '{}'"
+ "".format(tls_ciphersuites_settings))
+ else:
+ tls_css = tls_ciphersuites_settings[1:-1].split(",")
+ if not tls_css:
+ raise InterfaceError("No valid cipher suite found in the "
+ "'tls-ciphersuites' list.")
+ for tls_cs in tls_css:
+ tls_cs = tls_cs.strip().upper()
+ if tls_cs:
+ tls_ciphersuites.append(tls_cs)
+ elif isinstance(tls_ciphersuites_settings, list):
+ tls_ciphersuites = [tls_cs for tls_cs in tls_ciphersuites_settings
+ if tls_cs]
+
+ elif isinstance(tls_ciphersuites_settings, set):
+ for tls_cs in tls_ciphersuites:
+ if tls_cs:
+ tls_ciphersuites.append(tls_cs)
+ else:
+ raise InterfaceError("tls-ciphersuites should be a list with one or "
+ "more ciphersuites. Found: '{}'"
+ "".format(tls_ciphersuites_settings))
+
+ tls_versions = TLS_VERSIONS[:] if settings.get("tls-versions", None) \
+ is None else settings["tls-versions"][:]
+
+ # A newer TLS version can use a cipher introduced on
+ # an older version.
+ tls_versions.sort(reverse=True)
+ newer_tls_ver = tls_versions[0]
+
+ translated_names = []
+ iani_cipher_suites_names = {}
+ ossl_cipher_suites_names = []
+
+ # Old ciphers can work with new TLS versions.
+ # Find all the ciphers introduced on previous TLS versions
+ for tls_ver in TLS_VERSIONS[:TLS_VERSIONS.index(newer_tls_ver) + 1]:
+ iani_cipher_suites_names.update(TLS_CIPHER_SUITES[tls_ver])
+ ossl_cipher_suites_names.extend(OPENSSL_CS_NAMES[tls_ver])
+
+ for name in tls_ciphersuites:
+ if "-" in name and name in ossl_cipher_suites_names:
+ translated_names.append(name)
+ elif name in iani_cipher_suites_names:
+ translated_name = iani_cipher_suites_names[name]
+ if translated_name in translated_names:
+ raise AttributeError(
+ DUPLICATED_IN_LIST_ERROR.format(
+ list="tls_ciphersuites", value=translated_name))
+ else:
+ translated_names.append(translated_name)
+ else:
+ raise InterfaceError(
+ "The value '{}' in cipher suites is not a valid "
+ "cipher suite".format(name))
+
+ if not translated_names:
+ raise InterfaceError("No valid cipher suite found in the "
+ "'tls-ciphersuites' list.")
+
+ settings["tls-ciphersuites"] = translated_names
+
+
+def _get_connection_settings(*args, **kwargs):
+ """Parses the connection string and returns a dictionary with the
+ connection settings.
+
+ Args:
+ *args: Variable length argument list with the connection data used
+ to connect to the database. It can be a dictionary or a
+ connection string.
+ **kwargs: Arbitrary keyword arguments with connection data used to
+ connect to the database.
+
+ Returns:
+ mysqlx.Session: Session object.
+
+ Raises:
+ TypeError: If connection timeout is not a positive integer.
+ :class:`mysqlx.InterfaceError`: If settings not provided.
+ """
+ settings = {}
+ if args:
+ if isinstance(args[0], str):
+ settings = _parse_connection_uri(args[0])
+ elif isinstance(args[0], dict):
+ for key, val in args[0].items():
+ settings[key.replace("_", "-")] = val
+ elif kwargs:
+ for key, val in kwargs.items():
+ settings[key.replace("_", "-")] = val
+
+ if not settings:
+ raise InterfaceError("Settings not provided")
+
+ _validate_settings(settings)
+ return settings
+
+
+def get_session(*args, **kwargs):
+ """Creates a Session instance using the provided connection data.
+
+ Args:
+ *args: Variable length argument list with the connection data used
+ to connect to a MySQL server. It can be a dictionary or a
+ connection string.
+ **kwargs: Arbitrary keyword arguments with connection data used to
+ connect to the database.
+
+ Returns:
+ mysqlx.Session: Session object.
+ """
+ settings = _get_connection_settings(*args, **kwargs)
+ return Session(settings)
+
+
+def get_client(connection_string, options_string):
+ """Creates a Client instance with the provided connection data and settings.
+
+ Args:
+ connection_string: A string or a dict type object to indicate the \
+ connection data used to connect to a MySQL server.
+
+ The string must have the following uri format::
+
+ cnx_str = 'mysqlx://{user}:{pwd}@{host}:{port}'
+ cnx_str = ('mysqlx://{user}:{pwd}@['
+ ' (address={host}:{port}, priority=n),'
+ ' (address={host}:{port}, priority=n), ...]'
+ ' ?[option=value]')
+
+ And the dictionary::
+
+ cnx_dict = {
+ 'host': 'The host where the MySQL product is running',
+ 'port': '(int) the port number configured for X protocol',
+ 'user': 'The user name account',
+ 'password': 'The password for the given user account',
+ 'ssl-mode': 'The flags for ssl mode in mysqlx.SSLMode.FLAG',
+ 'ssl-ca': 'The path to the ca.cert'
+ "connect-timeout": '(int) milliseconds to wait on timeout'
+ }
+
+ options_string: A string in the form of a document or a dictionary \
+ type with configuration for the client.
+
+ Current options include::
+
+ options = {
+ 'pooling': {
+ 'enabled': (bool), # [True | False], True by default
+ 'max_size': (int), # Maximum connections per pool
+ "max_idle_time": (int), # milliseconds that a
+ # connection will remain active while not in use.
+ # By default 0, means infinite.
+ "queue_timeout": (int), # milliseconds a request will
+ # wait for a connection to become available.
+ # By default 0, means infinite.
+ }
+ }
+
+ Returns:
+ mysqlx.Client: Client object.
+
+ .. versionadded:: 8.0.13
+ """
+ if not isinstance(connection_string, (str, dict)):
+ raise InterfaceError("connection_data must be a string or dict")
+
+ settings_dict = _get_connection_settings(connection_string)
+
+ if not isinstance(options_string, (str, dict)):
+ raise InterfaceError("connection_options must be a string or dict")
+
+ if isinstance(options_string, str):
+ try:
+ options_dict = json.loads(options_string)
+ except JSONDecodeError:
+ raise InterfaceError("'pooling' options must be given in the form "
+ "of a document or dict")
+ else:
+ options_dict = {}
+ for key, value in options_string.items():
+ options_dict[key.replace("-", "_")] = value
+
+ if not isinstance(options_dict, dict):
+ raise InterfaceError("'pooling' options must be given in the form of a "
+ "document or dict")
+ pooling_options_dict = {}
+ if "pooling" in options_dict:
+ pooling_options = options_dict.pop("pooling")
+ if not isinstance(pooling_options, (dict)):
+ raise InterfaceError("'pooling' options must be given in the form "
+ "document or dict")
+ # Fill default pooling settings
+ pooling_options_dict["enabled"] = pooling_options.pop("enabled", True)
+ pooling_options_dict["max_size"] = pooling_options.pop("max_size", 25)
+ pooling_options_dict["max_idle_time"] = \
+ pooling_options.pop("max_idle_time", 0)
+ pooling_options_dict["queue_timeout"] = \
+ pooling_options.pop("queue_timeout", 0)
+
+ # No other options besides pooling are supported
+ if len(pooling_options) > 0:
+ raise InterfaceError("Unrecognized pooling options: {}"
+ "".format(pooling_options))
+ # No other options besides pooling are supported
+ if len(options_dict) > 0:
+ raise InterfaceError("Unrecognized connection options: {}"
+ "".format(options_dict.keys()))
+
+ return Client(settings_dict, pooling_options_dict)
+
+
+__all__ = [
+ # mysqlx.connection
+ "Client", "Session", "get_client", "get_session", "expr",
+
+ # mysqlx.constants
+ "Auth", "LockContention", "SSLMode",
+
+ # mysqlx.crud
+ "Schema", "Collection", "Table", "View",
+
+ # mysqlx.errors
+ "Error", "InterfaceError", "DatabaseError", "NotSupportedError",
+ "DataError", "IntegrityError", "ProgrammingError", "OperationalError",
+ "InternalError", "PoolError", "TimeoutError",
+
+ # mysqlx.result
+ "Column", "Row", "Result", "BufferingResult", "RowResult",
+ "SqlResult", "DocResult", "ColumnType",
+
+ # mysqlx.statement
+ "DbDoc", "Statement", "FilterableStatement", "SqlStatement",
+ "FindStatement", "AddStatement", "RemoveStatement", "ModifyStatement",
+ "SelectStatement", "InsertStatement", "DeleteStatement", "UpdateStatement",
+ "ReadStatement", "WriteStatement", "CreateCollectionIndexStatement",
+ "Expr",
+]
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..dff15f6ab8b390e59754177aaae0436b72d16f22
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/authentication.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/authentication.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..43bb8835d924679ae1246b528319e375bd8a80de
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/authentication.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/charsets.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/charsets.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..95b4e740493752831a2d3c83a76af374e6064fcc
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/charsets.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/connection.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/connection.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..036e79743926c5ef26984aed332ca6f03ff6c164
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/connection.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/constants.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/constants.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..677ee25d242015e4a8185ebd427d9af9ce6047de
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/constants.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/crud.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/crud.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1ae188a82155a3632f6eb02c7b233f883d2efa9e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/crud.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/dbdoc.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/dbdoc.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4763ee6e15965f58bdc16ea7f2cad94466128dc7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/dbdoc.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/errorcode.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/errorcode.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..db11d5d14ac892cf02f4484175248e85e152ee78
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/errorcode.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/errors.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/errors.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..caf49ac6edabbf682fe040db6016cd4ec49a8c3f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/errors.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/expr.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/expr.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..899b9f224be9d6f7f310e6c509671c33d73089b4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/expr.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/helpers.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/helpers.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..385735b9a3c573ac88bfb341e552ab5f0ec8f97f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/helpers.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/protocol.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/protocol.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..69825ef8f6f2cc6b1827fa5b2b08089118711429
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/protocol.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/result.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/result.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9b86143250675373d3adeea39dcdbbac917f48bc
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/result.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/statement.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/statement.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..698e8f907271c8518a6fdcbece0544916750e7b3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/__pycache__/statement.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/authentication.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/authentication.py
new file mode 100644
index 0000000000000000000000000000000000000000..790d074322597c94718db1631ea332656c11063e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/authentication.py
@@ -0,0 +1,174 @@
+# Copyright (c) 2016, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Implementation of MySQL Authentication Plugin."""
+
+import hashlib
+import struct
+
+from .helpers import hexlify
+
+
+def xor_string(hash1, hash2, hash_size):
+ """Encrypt/Decrypt function used for password encryption in
+ authentication, using a simple XOR.
+
+ Args:
+ hash1 (str): The first hash.
+ hash2 (str): The second hash.
+
+ Returns:
+ str: A string with the xor applied.
+ """
+ xored = [h1 ^ h2 for (h1, h2) in zip(hash1, hash2)]
+ return struct.pack("{0}B".format(hash_size), *xored)
+
+
+class BaseAuthPlugin(object):
+ """Base class for implementing the authentication plugins."""
+ def __init__(self, username=None, password=None):
+ self._username = username
+ self._password = password
+
+ def name(self):
+ """Returns the plugin name.
+
+ Returns:
+ str: The plugin name.
+ """
+ raise NotImplementedError
+
+ def auth_name(self):
+ """Returns the authentication name.
+
+ Returns:
+ str: The authentication name.
+ """
+ raise NotImplementedError
+
+
+class MySQL41AuthPlugin(BaseAuthPlugin):
+ """Class implementing the MySQL Native Password authentication plugin."""
+ def name(self):
+ """Returns the plugin name.
+
+ Returns:
+ str: The plugin name.
+ """
+ return "MySQL 4.1 Authentication Plugin"
+
+ def auth_name(self):
+ """Returns the authentication name.
+
+ Returns:
+ str: The authentication name.
+ """
+ return "MYSQL41"
+
+ def auth_data(self, data):
+ """Hashing for MySQL 4.1 authentication.
+
+ Args:
+ data (str): The authentication data.
+
+ Returns:
+ str: The authentication response.
+ """
+ if self._password:
+ password = self._password.encode("utf-8") \
+ if isinstance(self._password, str) else self._password
+ hash1 = hashlib.sha1(password).digest()
+ hash2 = hashlib.sha1(hash1).digest()
+ xored = xor_string(hash1, hashlib.sha1(data + hash2).digest(), 20)
+ return "{0}\0{1}\0*{2}\0".format("", self._username, hexlify(xored))
+ return "{0}\0{1}\0".format("", self._username)
+
+
+class PlainAuthPlugin(BaseAuthPlugin):
+ """Class implementing the MySQL Plain authentication plugin."""
+ def name(self):
+ """Returns the plugin name.
+
+ Returns:
+ str: The plugin name.
+ """
+ return "Plain Authentication Plugin"
+
+ def auth_name(self):
+ """Returns the authentication name.
+
+ Returns:
+ str: The authentication name.
+ """
+ return "PLAIN"
+
+ def auth_data(self):
+ """Returns the authentication data.
+
+ Returns:
+ str: The authentication data.
+ """
+ return "\0{0}\0{1}".format(self._username, self._password)
+
+
+class Sha256MemoryAuthPlugin(BaseAuthPlugin):
+ """Class implementing the SHA256_MEMORY authentication plugin."""
+ def name(self):
+ """Returns the plugin name.
+
+ Returns:
+ str: The plugin name.
+ """
+ return "SHA256_MEMORY Authentication Plugin"
+
+ def auth_name(self):
+ """Returns the authentication name.
+
+ Returns:
+ str: The authentication name.
+ """
+ return "SHA256_MEMORY"
+
+ def auth_data(self, data):
+ """Hashing for SHA256_MEMORY authentication.
+
+ The scramble is of the form:
+ SHA256(SHA256(SHA256(PASSWORD)),NONCE) XOR SHA256(PASSWORD)
+
+ Args:
+ data (str): The authentication data.
+
+ Returns:
+ str: The authentication response.
+ """
+ password = self._password.encode("utf-8") \
+ if isinstance(self._password, str) else self._password
+ hash1 = hashlib.sha256(password).digest()
+ hash2 = hashlib.sha256(hashlib.sha256(hash1).digest() + data).digest()
+ xored = xor_string(hash2, hash1, 32)
+ return "\0{0}\0{1}".format(self._username, hexlify(xored))
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/charsets.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/charsets.py
new file mode 100644
index 0000000000000000000000000000000000000000..90d5d47ba03f750d90021971e497a9ba00f13093
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/charsets.py
@@ -0,0 +1,350 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+# This file was auto-generated.
+_GENERATED_ON = '2019-04-29'
+_MYSQL_VERSION = (8, 0, 17)
+
+"""This module contains the MySQL Server Character Sets"""
+
+MYSQL_CHARACTER_SETS = [
+ # (character set name, collation, default)
+ None,
+ ("big5", "big5_chinese_ci", True), # 1
+ ("latin2", "latin2_czech_cs", False), # 2
+ ("dec8", "dec8_swedish_ci", True), # 3
+ ("cp850", "cp850_general_ci", True), # 4
+ ("latin1", "latin1_german1_ci", False), # 5
+ ("hp8", "hp8_english_ci", True), # 6
+ ("koi8r", "koi8r_general_ci", True), # 7
+ ("latin1", "latin1_swedish_ci", True), # 8
+ ("latin2", "latin2_general_ci", True), # 9
+ ("swe7", "swe7_swedish_ci", True), # 10
+ ("ascii", "ascii_general_ci", True), # 11
+ ("ujis", "ujis_japanese_ci", True), # 12
+ ("sjis", "sjis_japanese_ci", True), # 13
+ ("cp1251", "cp1251_bulgarian_ci", False), # 14
+ ("latin1", "latin1_danish_ci", False), # 15
+ ("hebrew", "hebrew_general_ci", True), # 16
+ None,
+ ("tis620", "tis620_thai_ci", True), # 18
+ ("euckr", "euckr_korean_ci", True), # 19
+ ("latin7", "latin7_estonian_cs", False), # 20
+ ("latin2", "latin2_hungarian_ci", False), # 21
+ ("koi8u", "koi8u_general_ci", True), # 22
+ ("cp1251", "cp1251_ukrainian_ci", False), # 23
+ ("gb2312", "gb2312_chinese_ci", True), # 24
+ ("greek", "greek_general_ci", True), # 25
+ ("cp1250", "cp1250_general_ci", True), # 26
+ ("latin2", "latin2_croatian_ci", False), # 27
+ ("gbk", "gbk_chinese_ci", True), # 28
+ ("cp1257", "cp1257_lithuanian_ci", False), # 29
+ ("latin5", "latin5_turkish_ci", True), # 30
+ ("latin1", "latin1_german2_ci", False), # 31
+ ("armscii8", "armscii8_general_ci", True), # 32
+ ("utf8", "utf8_general_ci", True), # 33
+ ("cp1250", "cp1250_czech_cs", False), # 34
+ ("ucs2", "ucs2_general_ci", True), # 35
+ ("cp866", "cp866_general_ci", True), # 36
+ ("keybcs2", "keybcs2_general_ci", True), # 37
+ ("macce", "macce_general_ci", True), # 38
+ ("macroman", "macroman_general_ci", True), # 39
+ ("cp852", "cp852_general_ci", True), # 40
+ ("latin7", "latin7_general_ci", True), # 41
+ ("latin7", "latin7_general_cs", False), # 42
+ ("macce", "macce_bin", False), # 43
+ ("cp1250", "cp1250_croatian_ci", False), # 44
+ ("utf8mb4", "utf8mb4_general_ci", False), # 45
+ ("utf8mb4", "utf8mb4_bin", False), # 46
+ ("latin1", "latin1_bin", False), # 47
+ ("latin1", "latin1_general_ci", False), # 48
+ ("latin1", "latin1_general_cs", False), # 49
+ ("cp1251", "cp1251_bin", False), # 50
+ ("cp1251", "cp1251_general_ci", True), # 51
+ ("cp1251", "cp1251_general_cs", False), # 52
+ ("macroman", "macroman_bin", False), # 53
+ ("utf16", "utf16_general_ci", True), # 54
+ ("utf16", "utf16_bin", False), # 55
+ ("utf16le", "utf16le_general_ci", True), # 56
+ ("cp1256", "cp1256_general_ci", True), # 57
+ ("cp1257", "cp1257_bin", False), # 58
+ ("cp1257", "cp1257_general_ci", True), # 59
+ ("utf32", "utf32_general_ci", True), # 60
+ ("utf32", "utf32_bin", False), # 61
+ ("utf16le", "utf16le_bin", False), # 62
+ ("binary", "binary", True), # 63
+ ("armscii8", "armscii8_bin", False), # 64
+ ("ascii", "ascii_bin", False), # 65
+ ("cp1250", "cp1250_bin", False), # 66
+ ("cp1256", "cp1256_bin", False), # 67
+ ("cp866", "cp866_bin", False), # 68
+ ("dec8", "dec8_bin", False), # 69
+ ("greek", "greek_bin", False), # 70
+ ("hebrew", "hebrew_bin", False), # 71
+ ("hp8", "hp8_bin", False), # 72
+ ("keybcs2", "keybcs2_bin", False), # 73
+ ("koi8r", "koi8r_bin", False), # 74
+ ("koi8u", "koi8u_bin", False), # 75
+ ("utf8", "utf8_tolower_ci", False), # 76
+ ("latin2", "latin2_bin", False), # 77
+ ("latin5", "latin5_bin", False), # 78
+ ("latin7", "latin7_bin", False), # 79
+ ("cp850", "cp850_bin", False), # 80
+ ("cp852", "cp852_bin", False), # 81
+ ("swe7", "swe7_bin", False), # 82
+ ("utf8", "utf8_bin", False), # 83
+ ("big5", "big5_bin", False), # 84
+ ("euckr", "euckr_bin", False), # 85
+ ("gb2312", "gb2312_bin", False), # 86
+ ("gbk", "gbk_bin", False), # 87
+ ("sjis", "sjis_bin", False), # 88
+ ("tis620", "tis620_bin", False), # 89
+ ("ucs2", "ucs2_bin", False), # 90
+ ("ujis", "ujis_bin", False), # 91
+ ("geostd8", "geostd8_general_ci", True), # 92
+ ("geostd8", "geostd8_bin", False), # 93
+ ("latin1", "latin1_spanish_ci", False), # 94
+ ("cp932", "cp932_japanese_ci", True), # 95
+ ("cp932", "cp932_bin", False), # 96
+ ("eucjpms", "eucjpms_japanese_ci", True), # 97
+ ("eucjpms", "eucjpms_bin", False), # 98
+ ("cp1250", "cp1250_polish_ci", False), # 99
+ None,
+ ("utf16", "utf16_unicode_ci", False), # 101
+ ("utf16", "utf16_icelandic_ci", False), # 102
+ ("utf16", "utf16_latvian_ci", False), # 103
+ ("utf16", "utf16_romanian_ci", False), # 104
+ ("utf16", "utf16_slovenian_ci", False), # 105
+ ("utf16", "utf16_polish_ci", False), # 106
+ ("utf16", "utf16_estonian_ci", False), # 107
+ ("utf16", "utf16_spanish_ci", False), # 108
+ ("utf16", "utf16_swedish_ci", False), # 109
+ ("utf16", "utf16_turkish_ci", False), # 110
+ ("utf16", "utf16_czech_ci", False), # 111
+ ("utf16", "utf16_danish_ci", False), # 112
+ ("utf16", "utf16_lithuanian_ci", False), # 113
+ ("utf16", "utf16_slovak_ci", False), # 114
+ ("utf16", "utf16_spanish2_ci", False), # 115
+ ("utf16", "utf16_roman_ci", False), # 116
+ ("utf16", "utf16_persian_ci", False), # 117
+ ("utf16", "utf16_esperanto_ci", False), # 118
+ ("utf16", "utf16_hungarian_ci", False), # 119
+ ("utf16", "utf16_sinhala_ci", False), # 120
+ ("utf16", "utf16_german2_ci", False), # 121
+ ("utf16", "utf16_croatian_ci", False), # 122
+ ("utf16", "utf16_unicode_520_ci", False), # 123
+ ("utf16", "utf16_vietnamese_ci", False), # 124
+ None,
+ None,
+ None,
+ ("ucs2", "ucs2_unicode_ci", False), # 128
+ ("ucs2", "ucs2_icelandic_ci", False), # 129
+ ("ucs2", "ucs2_latvian_ci", False), # 130
+ ("ucs2", "ucs2_romanian_ci", False), # 131
+ ("ucs2", "ucs2_slovenian_ci", False), # 132
+ ("ucs2", "ucs2_polish_ci", False), # 133
+ ("ucs2", "ucs2_estonian_ci", False), # 134
+ ("ucs2", "ucs2_spanish_ci", False), # 135
+ ("ucs2", "ucs2_swedish_ci", False), # 136
+ ("ucs2", "ucs2_turkish_ci", False), # 137
+ ("ucs2", "ucs2_czech_ci", False), # 138
+ ("ucs2", "ucs2_danish_ci", False), # 139
+ ("ucs2", "ucs2_lithuanian_ci", False), # 140
+ ("ucs2", "ucs2_slovak_ci", False), # 141
+ ("ucs2", "ucs2_spanish2_ci", False), # 142
+ ("ucs2", "ucs2_roman_ci", False), # 143
+ ("ucs2", "ucs2_persian_ci", False), # 144
+ ("ucs2", "ucs2_esperanto_ci", False), # 145
+ ("ucs2", "ucs2_hungarian_ci", False), # 146
+ ("ucs2", "ucs2_sinhala_ci", False), # 147
+ ("ucs2", "ucs2_german2_ci", False), # 148
+ ("ucs2", "ucs2_croatian_ci", False), # 149
+ ("ucs2", "ucs2_unicode_520_ci", False), # 150
+ ("ucs2", "ucs2_vietnamese_ci", False), # 151
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ ("ucs2", "ucs2_general_mysql500_ci", False), # 159
+ ("utf32", "utf32_unicode_ci", False), # 160
+ ("utf32", "utf32_icelandic_ci", False), # 161
+ ("utf32", "utf32_latvian_ci", False), # 162
+ ("utf32", "utf32_romanian_ci", False), # 163
+ ("utf32", "utf32_slovenian_ci", False), # 164
+ ("utf32", "utf32_polish_ci", False), # 165
+ ("utf32", "utf32_estonian_ci", False), # 166
+ ("utf32", "utf32_spanish_ci", False), # 167
+ ("utf32", "utf32_swedish_ci", False), # 168
+ ("utf32", "utf32_turkish_ci", False), # 169
+ ("utf32", "utf32_czech_ci", False), # 170
+ ("utf32", "utf32_danish_ci", False), # 171
+ ("utf32", "utf32_lithuanian_ci", False), # 172
+ ("utf32", "utf32_slovak_ci", False), # 173
+ ("utf32", "utf32_spanish2_ci", False), # 174
+ ("utf32", "utf32_roman_ci", False), # 175
+ ("utf32", "utf32_persian_ci", False), # 176
+ ("utf32", "utf32_esperanto_ci", False), # 177
+ ("utf32", "utf32_hungarian_ci", False), # 178
+ ("utf32", "utf32_sinhala_ci", False), # 179
+ ("utf32", "utf32_german2_ci", False), # 180
+ ("utf32", "utf32_croatian_ci", False), # 181
+ ("utf32", "utf32_unicode_520_ci", False), # 182
+ ("utf32", "utf32_vietnamese_ci", False), # 183
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ ("utf8", "utf8_unicode_ci", False), # 192
+ ("utf8", "utf8_icelandic_ci", False), # 193
+ ("utf8", "utf8_latvian_ci", False), # 194
+ ("utf8", "utf8_romanian_ci", False), # 195
+ ("utf8", "utf8_slovenian_ci", False), # 196
+ ("utf8", "utf8_polish_ci", False), # 197
+ ("utf8", "utf8_estonian_ci", False), # 198
+ ("utf8", "utf8_spanish_ci", False), # 199
+ ("utf8", "utf8_swedish_ci", False), # 200
+ ("utf8", "utf8_turkish_ci", False), # 201
+ ("utf8", "utf8_czech_ci", False), # 202
+ ("utf8", "utf8_danish_ci", False), # 203
+ ("utf8", "utf8_lithuanian_ci", False), # 204
+ ("utf8", "utf8_slovak_ci", False), # 205
+ ("utf8", "utf8_spanish2_ci", False), # 206
+ ("utf8", "utf8_roman_ci", False), # 207
+ ("utf8", "utf8_persian_ci", False), # 208
+ ("utf8", "utf8_esperanto_ci", False), # 209
+ ("utf8", "utf8_hungarian_ci", False), # 210
+ ("utf8", "utf8_sinhala_ci", False), # 211
+ ("utf8", "utf8_german2_ci", False), # 212
+ ("utf8", "utf8_croatian_ci", False), # 213
+ ("utf8", "utf8_unicode_520_ci", False), # 214
+ ("utf8", "utf8_vietnamese_ci", False), # 215
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ ("utf8", "utf8_general_mysql500_ci", False), # 223
+ ("utf8mb4", "utf8mb4_unicode_ci", False), # 224
+ ("utf8mb4", "utf8mb4_icelandic_ci", False), # 225
+ ("utf8mb4", "utf8mb4_latvian_ci", False), # 226
+ ("utf8mb4", "utf8mb4_romanian_ci", False), # 227
+ ("utf8mb4", "utf8mb4_slovenian_ci", False), # 228
+ ("utf8mb4", "utf8mb4_polish_ci", False), # 229
+ ("utf8mb4", "utf8mb4_estonian_ci", False), # 230
+ ("utf8mb4", "utf8mb4_spanish_ci", False), # 231
+ ("utf8mb4", "utf8mb4_swedish_ci", False), # 232
+ ("utf8mb4", "utf8mb4_turkish_ci", False), # 233
+ ("utf8mb4", "utf8mb4_czech_ci", False), # 234
+ ("utf8mb4", "utf8mb4_danish_ci", False), # 235
+ ("utf8mb4", "utf8mb4_lithuanian_ci", False), # 236
+ ("utf8mb4", "utf8mb4_slovak_ci", False), # 237
+ ("utf8mb4", "utf8mb4_spanish2_ci", False), # 238
+ ("utf8mb4", "utf8mb4_roman_ci", False), # 239
+ ("utf8mb4", "utf8mb4_persian_ci", False), # 240
+ ("utf8mb4", "utf8mb4_esperanto_ci", False), # 241
+ ("utf8mb4", "utf8mb4_hungarian_ci", False), # 242
+ ("utf8mb4", "utf8mb4_sinhala_ci", False), # 243
+ ("utf8mb4", "utf8mb4_german2_ci", False), # 244
+ ("utf8mb4", "utf8mb4_croatian_ci", False), # 245
+ ("utf8mb4", "utf8mb4_unicode_520_ci", False), # 246
+ ("utf8mb4", "utf8mb4_vietnamese_ci", False), # 247
+ ("gb18030", "gb18030_chinese_ci", True), # 248
+ ("gb18030", "gb18030_bin", False), # 249
+ ("gb18030", "gb18030_unicode_520_ci", False), # 250
+ None,
+ None,
+ None,
+ None,
+ ("utf8mb4", "utf8mb4_0900_ai_ci", True), # 255
+ ("utf8mb4", "utf8mb4_de_pb_0900_ai_ci", False), # 256
+ ("utf8mb4", "utf8mb4_is_0900_ai_ci", False), # 257
+ ("utf8mb4", "utf8mb4_lv_0900_ai_ci", False), # 258
+ ("utf8mb4", "utf8mb4_ro_0900_ai_ci", False), # 259
+ ("utf8mb4", "utf8mb4_sl_0900_ai_ci", False), # 260
+ ("utf8mb4", "utf8mb4_pl_0900_ai_ci", False), # 261
+ ("utf8mb4", "utf8mb4_et_0900_ai_ci", False), # 262
+ ("utf8mb4", "utf8mb4_es_0900_ai_ci", False), # 263
+ ("utf8mb4", "utf8mb4_sv_0900_ai_ci", False), # 264
+ ("utf8mb4", "utf8mb4_tr_0900_ai_ci", False), # 265
+ ("utf8mb4", "utf8mb4_cs_0900_ai_ci", False), # 266
+ ("utf8mb4", "utf8mb4_da_0900_ai_ci", False), # 267
+ ("utf8mb4", "utf8mb4_lt_0900_ai_ci", False), # 268
+ ("utf8mb4", "utf8mb4_sk_0900_ai_ci", False), # 269
+ ("utf8mb4", "utf8mb4_es_trad_0900_ai_ci", False), # 270
+ ("utf8mb4", "utf8mb4_la_0900_ai_ci", False), # 271
+ None,
+ ("utf8mb4", "utf8mb4_eo_0900_ai_ci", False), # 273
+ ("utf8mb4", "utf8mb4_hu_0900_ai_ci", False), # 274
+ ("utf8mb4", "utf8mb4_hr_0900_ai_ci", False), # 275
+ None,
+ ("utf8mb4", "utf8mb4_vi_0900_ai_ci", False), # 277
+ ("utf8mb4", "utf8mb4_0900_as_cs", False), # 278
+ ("utf8mb4", "utf8mb4_de_pb_0900_as_cs", False), # 279
+ ("utf8mb4", "utf8mb4_is_0900_as_cs", False), # 280
+ ("utf8mb4", "utf8mb4_lv_0900_as_cs", False), # 281
+ ("utf8mb4", "utf8mb4_ro_0900_as_cs", False), # 282
+ ("utf8mb4", "utf8mb4_sl_0900_as_cs", False), # 283
+ ("utf8mb4", "utf8mb4_pl_0900_as_cs", False), # 284
+ ("utf8mb4", "utf8mb4_et_0900_as_cs", False), # 285
+ ("utf8mb4", "utf8mb4_es_0900_as_cs", False), # 286
+ ("utf8mb4", "utf8mb4_sv_0900_as_cs", False), # 287
+ ("utf8mb4", "utf8mb4_tr_0900_as_cs", False), # 288
+ ("utf8mb4", "utf8mb4_cs_0900_as_cs", False), # 289
+ ("utf8mb4", "utf8mb4_da_0900_as_cs", False), # 290
+ ("utf8mb4", "utf8mb4_lt_0900_as_cs", False), # 291
+ ("utf8mb4", "utf8mb4_sk_0900_as_cs", False), # 292
+ ("utf8mb4", "utf8mb4_es_trad_0900_as_cs", False), # 293
+ ("utf8mb4", "utf8mb4_la_0900_as_cs", False), # 294
+ None,
+ ("utf8mb4", "utf8mb4_eo_0900_as_cs", False), # 296
+ ("utf8mb4", "utf8mb4_hu_0900_as_cs", False), # 297
+ ("utf8mb4", "utf8mb4_hr_0900_as_cs", False), # 298
+ None,
+ ("utf8mb4", "utf8mb4_vi_0900_as_cs", False), # 300
+ None,
+ None,
+ ("utf8mb4", "utf8mb4_ja_0900_as_cs", False), # 303
+ ("utf8mb4", "utf8mb4_ja_0900_as_cs_ks", False), # 304
+ ("utf8mb4", "utf8mb4_0900_as_ci", False), # 305
+ ("utf8mb4", "utf8mb4_ru_0900_ai_ci", False), # 306
+ ("utf8mb4", "utf8mb4_ru_0900_as_cs", False), # 307
+ ("utf8mb4", "utf8mb4_zh_0900_as_cs", False), # 308
+ ("utf8mb4", "utf8mb4_0900_bin", False), # 309
+]
+
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/connection.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/connection.py
new file mode 100644
index 0000000000000000000000000000000000000000..c5355c497d55efdb1d62835a3eab73907b893a38
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/connection.py
@@ -0,0 +1,2405 @@
+# Copyright (c) 2016, 2021, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Implementation of communication for MySQL X servers."""
+
+try:
+ import ssl
+ SSL_AVAILABLE = True
+ TLS_VERSIONS = {
+ "TLSv1": ssl.PROTOCOL_TLSv1,
+ "TLSv1.1": ssl.PROTOCOL_TLSv1_1,
+ "TLSv1.2": ssl.PROTOCOL_TLSv1_2}
+ # TLSv1.3 included in PROTOCOL_TLS, but PROTOCOL_TLS is not included on 3.4
+ if hasattr(ssl, "PROTOCOL_TLS"):
+ TLS_VERSIONS["TLSv1.3"] = ssl.PROTOCOL_TLS # pylint: disable=E1101
+ else:
+ TLS_VERSIONS["TLSv1.3"] = ssl.PROTOCOL_SSLv23 # Alias of PROTOCOL_TLS
+ if hasattr(ssl, "HAS_TLSv1_3") and ssl.HAS_TLSv1_3:
+ TLS_V1_3_SUPPORTED = True
+ else:
+ TLS_V1_3_SUPPORTED = False
+except:
+ SSL_AVAILABLE = False
+ TLS_V1_3_SUPPORTED = False
+
+import sys
+import socket
+import logging
+import uuid
+import platform
+import queue
+import os
+import random
+import re
+import threading
+import warnings
+
+try:
+ import dns.resolver
+ import dns.exception
+except ImportError:
+ HAVE_DNSPYTHON = False
+else:
+ HAVE_DNSPYTHON = True
+
+from datetime import datetime, timedelta
+from functools import wraps
+
+from .authentication import (MySQL41AuthPlugin, PlainAuthPlugin,
+ Sha256MemoryAuthPlugin)
+# pylint: disable=W0622
+from .errors import (InterfaceError, NotSupportedError, OperationalError,
+ PoolError, ProgrammingError, TimeoutError)
+from .crud import Schema
+from .constants import SSLMode, Auth, COMPRESSION_ALGORITHMS
+from .helpers import escape, get_item_or_attr, iani_to_openssl_cs_name
+from .protocol import (Protocol, MessageReader, MessageWriter, HAVE_LZ4,
+ HAVE_ZSTD)
+from .result import BaseResult, Result, RowResult, SqlResult, DocResult
+from .statement import SqlStatement, AddStatement, quote_identifier
+from .protobuf import Protobuf
+
+# pylint: disable=C0411,C0413
+sys.path.append("..")
+from mysql.connector.utils import linux_distribution
+from mysql.connector.version import VERSION, LICENSE
+
+
+_CONNECT_TIMEOUT = 10000 # Default connect timeout in milliseconds
+_DROP_DATABASE_QUERY = "DROP DATABASE IF EXISTS {0}"
+_CREATE_DATABASE_QUERY = "CREATE DATABASE IF NOT EXISTS {0}"
+_SELECT_SCHEMA_NAME_QUERY = ("SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA."
+ "SCHEMATA WHERE SCHEMA_NAME = '{}'")
+_SHOW_VERSION_QUERY = 'SHOW VARIABLES LIKE "VERSION"'
+
+_CNX_POOL_MAXSIZE = 99
+_CNX_POOL_MAX_NAME_SIZE = 120
+_CNX_POOL_NAME_REGEX = re.compile(r'[^a-zA-Z0-9._:\-*$#]')
+_CNX_POOL_MAX_IDLE_TIME = 2147483
+_CNX_POOL_QUEUE_TIMEOUT = 2147483
+
+# Time is on seconds
+_PENALTY_SERVER_OFFLINE = 1000000
+_PENALTY_MAXED_OUT = 60
+_PENALTY_NO_ADD_INFO = 60 * 60
+_PENALTY_CONN_TIMEOUT = 60 * 60
+_PENALTY_WRONG_PASSW = 60 * 60 * 24
+_PENALTY_RESTARTING = 60
+_TIMEOUT_PENALTIES = {
+ # Server denays service e.g Max connections reached
+ "[WinError 10053]": _PENALTY_MAXED_OUT, # Established connection was aborted
+ "[Errno 32]": _PENALTY_MAXED_OUT, # Broken pipe
+ # Server is Offline
+ "[WinError 10061]": _PENALTY_SERVER_OFFLINE, # Target machine actively refused it
+ "[Errno 111]": _PENALTY_SERVER_OFFLINE, # Connection refused
+ # Host is offline:
+ "[WinError 10060]": _PENALTY_CONN_TIMEOUT, # Not respond after a period of time
+ # No route to Host:
+ "[Errno 11001]": _PENALTY_NO_ADD_INFO,# getaddrinfo failed
+ "[Errno -2]": _PENALTY_NO_ADD_INFO, # Name or service not known
+ # Wrong Password
+ "Access denied": _PENALTY_WRONG_PASSW
+}
+_TIMEOUT_PENALTIES_BY_ERR_NO = {
+ 1053: _PENALTY_RESTARTING
+}
+CONNECTION_CLOSED_ERROR = {
+ 1810: 'This session was closed because the connection has been idle for too '
+ 'long. Use "mysqlx.getSession()" or "mysqlx.getClient()" to create a '
+ 'new one.',
+ 1053: 'This session was closed because the server is shutting down.',
+ 3169: 'This session was closed because the connection has been killed in a '
+ 'different session. Use "mysqlx.getSession()" or "mysqlx.getClient()" '
+ 'to create a new one.',
+};
+_LOGGER = logging.getLogger("mysqlx")
+
+
+def generate_pool_name(**kwargs):
+ """Generate a pool name.
+
+ This function takes keyword arguments, usually the connection arguments and
+ tries to generate a name for the pool.
+
+ Args:
+ **kwargs: Arbitrary keyword arguments with the connection arguments.
+
+ Raises:
+ PoolError: If the name can't be generated.
+
+ Returns:
+ str: The generated pool name.
+ """
+ parts = []
+ for key in ("host", "port", "user", "database", "client_id"):
+ try:
+ parts.append(str(kwargs[key]))
+ except KeyError:
+ pass
+
+ if not parts:
+ raise PoolError("Failed generating pool name; specify pool_name")
+
+ return "_".join(parts)
+
+
+def update_timeout_penalties_by_error(penalty_dict):
+ """Update the timeout penalties directory.
+
+ Update the timeout penalties by error dictionary used to deactivate a pool.
+ Args:
+ penalty_dict (dict): The dictionary with the new timeouts.
+ """
+ if penalty_dict and isinstance(penalty_dict, dict):
+ _TIMEOUT_PENALTIES_BY_ERR_NO.update(penalty_dict)
+
+class SocketStream(object):
+ """Implements a socket stream."""
+ def __init__(self):
+ self._socket = None
+ self._is_ssl = False
+ self._is_socket = False
+ self._host = None
+
+ def connect(self, params, connect_timeout=_CONNECT_TIMEOUT):
+ """Connects to a TCP service.
+
+ Args:
+ params (tuple): The connection parameters.
+
+ Raises:
+ :class:`mysqlx.InterfaceError`: If Unix socket is not supported.
+ """
+ if connect_timeout is not None:
+ connect_timeout = connect_timeout / 1000 # Convert to seconds
+ try:
+ self._socket = socket.create_connection(params, connect_timeout)
+ self._host = params[0]
+ except ValueError:
+ try:
+ self._socket = socket.socket(socket.AF_UNIX)
+ self._socket.settimeout(connect_timeout)
+ self._socket.connect(params)
+ self._is_socket = True
+ except AttributeError:
+ raise InterfaceError("Unix socket unsupported")
+ self._socket.settimeout(None)
+
+ def read(self, count):
+ """Receive data from the socket.
+
+ Args:
+ count (int): Buffer size.
+
+ Returns:
+ bytes: The data received.
+ """
+ if self._socket is None:
+ raise OperationalError("MySQLx Connection not available")
+ buf = []
+ while count > 0:
+ data = self._socket.recv(count)
+ if data == b"":
+ raise RuntimeError("Unexpected connection close")
+ buf.append(data)
+ count -= len(data)
+ return b"".join(buf)
+
+ def sendall(self, data):
+ """Send data to the socket.
+
+ Args:
+ data (bytes): The data to be sent.
+ """
+ if self._socket is None:
+ raise OperationalError("MySQLx Connection not available")
+ try:
+ self._socket.sendall(data)
+ except socket.error as err:
+ raise OperationalError("Unexpected socket error: {}".format(err))
+
+ def close(self):
+ """Close the socket."""
+ if not self._socket:
+ return
+ try:
+ self._socket.shutdown(socket.SHUT_RDWR)
+ self._socket.close()
+ except socket.error:
+ # On [Errno 107] Transport endpoint is not connected
+ pass
+ self._socket = None
+
+ def __del__(self):
+ self.close()
+
+ def set_ssl(self, ssl_protos, ssl_mode, ssl_ca, ssl_crl, ssl_cert, ssl_key,
+ ssl_ciphers):
+ """Set SSL parameters.
+
+ Args:
+ ssl_protos (list): SSL protocol to use.
+ ssl_mode (str): SSL mode.
+ ssl_ca (str): The certification authority certificate.
+ ssl_crl (str): The certification revocation lists.
+ ssl_cert (str): The certificate.
+ ssl_key (str): The certificate key.
+ ssl_ciphers (list): SSL ciphersuites to use.
+
+ Raises:
+ :class:`mysqlx.RuntimeError`: If Python installation has no SSL
+ support.
+ :class:`mysqlx.InterfaceError`: If the parameters are invalid.
+ """
+ if not SSL_AVAILABLE:
+ self.close()
+ raise RuntimeError("Python installation has no SSL support")
+
+ if ssl_protos is None or not ssl_protos:
+ context = ssl.create_default_context()
+ if ssl_mode != SSLMode.VERIFY_IDENTITY:
+ context.check_hostname = False
+ if ssl_mode == SSLMode.REQUIRED:
+ context.verify_mode = ssl.CERT_NONE
+ else:
+ ssl_protos.sort(reverse=True)
+ tls_version = ssl_protos[0]
+ if not TLS_V1_3_SUPPORTED and \
+ tls_version == "TLSv1.3" and len(ssl_protos) > 1:
+ tls_version = ssl_protos[1]
+ ssl_protocol = TLS_VERSIONS[tls_version]
+ context = ssl.SSLContext(ssl_protocol)
+
+ if tls_version == "TLSv1.3":
+ if "TLSv1.2" not in ssl_protos:
+ context.options |= ssl.OP_NO_TLSv1_2
+ if "TLSv1.1" not in ssl_protos:
+ context.options |= ssl.OP_NO_TLSv1_1
+ if "TLSv1" not in ssl_protos:
+ context.options |= ssl.OP_NO_TLSv1
+
+ if ssl_ca:
+ try:
+ context.load_verify_locations(ssl_ca)
+ context.verify_mode = ssl.CERT_REQUIRED
+ except (IOError, ssl.SSLError) as err:
+ self.close()
+ raise InterfaceError("Invalid CA Certificate: {}".format(err))
+
+ if ssl_crl:
+ try:
+ context.load_verify_locations(ssl_crl)
+ context.verify_flags = ssl.VERIFY_CRL_CHECK_LEAF
+ except (IOError, ssl.SSLError) as err:
+ self.close()
+ raise InterfaceError("Invalid CRL: {}".format(err))
+
+ if ssl_cert:
+ try:
+ context.load_cert_chain(ssl_cert, ssl_key)
+ except (IOError, ssl.SSLError) as err:
+ self.close()
+ raise InterfaceError("Invalid Certificate/Key: {}".format(err))
+
+ if ssl_ciphers:
+ context.set_ciphers(":".join(iani_to_openssl_cs_name(ssl_protos[0],
+ ssl_ciphers)))
+ try:
+ self._socket = context.wrap_socket(self._socket,
+ server_hostname=self._host)
+ except ssl.CertificateError as err:
+ raise InterfaceError(str(err))
+ if ssl_mode == SSLMode.VERIFY_IDENTITY:
+ context.check_hostname = True
+ hostnames = []
+ # Windows does not return loopback aliases on gethostbyaddr
+ if os.name == 'nt' and (self._host == 'localhost' or \
+ self._host == '127.0.0.1'):
+ hostnames = ['localhost', '127.0.0.1']
+ aliases = socket.gethostbyaddr(self._host)
+ hostnames.extend([aliases[0]] + aliases[1])
+ match_found = False
+ errs = []
+ for hostname in hostnames:
+ try:
+ ssl.match_hostname(self._socket.getpeercert(), hostname)
+ except ssl.CertificateError as err:
+ errs.append(str(err))
+ else:
+ match_found = True
+ break
+ if not match_found:
+ self.close()
+ raise InterfaceError("Unable to verify server identity: {}"
+ "".format(", ".join(errs)))
+
+ self._is_ssl = True
+
+ # Raise a deprecation warning if TLSv1 or TLSv1.1 is being used
+ tls_version = self._socket.version()
+ if tls_version in ("TLSv1", "TLSv1.1"):
+ warn_msg = (
+ f"This connection is using {tls_version} which is now "
+ "deprecated and will be removed in a future release of "
+ "MySQL Connector/Python"
+ )
+ warnings.warn(warn_msg, DeprecationWarning)
+
+ def is_ssl(self):
+ """Verifies if SSL is being used.
+
+ Returns:
+ bool: Returns `True` if SSL is being used.
+ """
+ return self._is_ssl
+
+ def is_socket(self):
+ """Verifies if socket connection is being used.
+
+ Returns:
+ bool: Returns `True` if socket connection is being used.
+ """
+ return self._is_socket
+
+ def is_secure(self):
+ """Verifies if connection is secure.
+
+ Returns:
+ bool: Returns `True` if connection is secure.
+ """
+ return self._is_ssl or self._is_socket
+
+ def is_open(self):
+ """Verifies if connection is open.
+
+ Returns:
+ bool: Returns `True` if connection is open.
+ """
+ return self._socket is not None
+
+
+def catch_network_exception(func):
+ """Decorator used to catch socket.error or RuntimeError.
+
+ Raises:
+ :class:`mysqlx.InterfaceError`: If `socket.Error` or `RuntimeError`
+ is raised.
+ """
+ @wraps(func)
+ def wrapper(self, *args, **kwargs):
+ """Wrapper function."""
+ try:
+ if isinstance(self, (Connection, PooledConnection)) and \
+ self.is_server_disconnected():
+ raise InterfaceError(*self.get_disconnected_reason())
+ result = func(self, *args, **kwargs)
+ if isinstance(result, BaseResult):
+ warnings = result.get_warnings()
+ for warning in warnings:
+ if warning["code"] in CONNECTION_CLOSED_ERROR:
+ error_msg = CONNECTION_CLOSED_ERROR[warning["code"]]
+ reason = ("Connection close: {}: {}"
+ "".format(warning["msg"], error_msg), warning["code"])
+ if isinstance(self, (Connection, PooledConnection)):
+ self.set_server_disconnected(reason)
+ break
+ return result
+ except (socket.error, ConnectionResetError, ConnectionAbortedError, InterfaceError,
+ RuntimeError, TimeoutError) as err:
+ if func.__name__ == 'get_column_metadata' and args and \
+ isinstance(args[0], SqlResult):
+ warnings = args[0].get_warnings()
+ if warnings:
+ warning = warnings[0]
+ error_msg = CONNECTION_CLOSED_ERROR[warning["code"]]
+ reason = ("Connection close: {}: {}"
+ "".format(warning["msg"], error_msg), warning["code"])
+ if isinstance(self, PooledConnection):
+ self.pool.remove_connections()
+ # pool must be listed as faulty if server is shutting down
+ if warning["code"] == 1053:
+ PoolsManager().set_pool_unavailable(self.pool, InterfaceError(*reason))
+ if isinstance(self, (Connection, PooledConnection)):
+ self.set_server_disconnected(reason)
+ self.disconnect()
+ raise InterfaceError(*reason)
+ else:
+ self.disconnect()
+ raise
+ else:
+ self.disconnect()
+ raise
+ return wrapper
+
+
+class Router(dict):
+ """Represents a set of connection parameters.
+
+ Args:
+ settings (dict): Dictionary with connection settings
+ .. versionadded:: 8.0.20
+ """
+ def __init__(self, connection_params):
+ self.update(connection_params)
+ self["available"] = self.get("available", True)
+
+ def available(self):
+ """Verifies if the Router is available to open connections.
+
+ Returns:
+ bool: True if this Router is available else False.
+ """
+ return self["available"]
+
+ def set_unavailable(self):
+ """Sets this Router unavailable to open connections.
+ """
+ self["available"] = False
+
+ def get_connection_params(self):
+ """Verifies if the Router is available to open connections.
+
+ Returns:
+ tuple: host and port or socket information tuple.
+ """
+ if "socket" in self:
+ return self["socket"]
+ return (self["host"], self["port"])
+
+
+class RouterManager():
+ """Manages the connection parameters of all the routers.
+
+ Args:
+ Routers (list): A list of Router objects.
+ settings (dict): Dictionary with connection settings.
+ .. versionadded:: 8.0.20
+ """
+ def __init__(self, routers, settings):
+ self._routers = routers
+ self._settings = settings
+ self._cur_priority_idx = 0
+ self._can_failover = True
+ # Reuters status
+ self._routers_directory = {}
+ self.routers_priority_list = []
+ self._ensure_priorities()
+
+ def _ensure_priorities(self):
+ """Ensure priorities.
+
+ Raises:
+ :class:`mysqlx.ProgrammingError`: If priorities are invalid.
+ """
+ priority_count = 0
+
+ for router in self._routers:
+ priority = router.get("priority", None)
+ if priority is None:
+ priority_count += 1
+ router["priority"] = 100
+ elif priority > 100:
+ raise ProgrammingError("The priorities must be between 0 and "
+ "100", 4007)
+
+ if 0 < priority_count < len(self._routers):
+ raise ProgrammingError("You must either assign no priority to any "
+ "of the routers or give a priority for "
+ "every router", 4000)
+
+ self._routers.sort(key=lambda x: x["priority"], reverse=True)
+
+ # Group servers with the same priority
+ for router in self._routers:
+ priority = router["priority"]
+ if priority not in self._routers_directory.keys():
+ self._routers_directory[priority] = [Router(router)]
+ self.routers_priority_list.append(priority)
+ else:
+ self._routers_directory[priority].append(Router(router))
+
+ def _get_available_routers(self, priority):
+ """Get a list of the current available routers that shares the given priority.
+
+ Returns:
+ list: A list of the current available routers.
+ """
+ router_list = self._routers_directory[priority]
+ router_list = [router for router in router_list if router.available()]
+ return router_list
+
+ def _get_random_connection_params(self, priority):
+ """Get a random router from the group with the given priority.
+
+ Returns:
+ Router: A random router.
+ """
+ router_list = self._get_available_routers(priority)
+ if not router_list:
+ return None
+ if len(router_list) == 1:
+ return router_list[0]
+
+ last = len(router_list) - 1
+ index = random.randint(0, last)
+ return router_list[index]
+
+ def can_failover(self):
+ """Returns the next connection parameters.
+
+ Returns:
+ bool: True if there is more server to failover to else False.
+ """
+ return self._can_failover
+
+ def get_next_router(self):
+ """Returns the next connection parameters.
+
+ Returns:
+ Router: with the connection parameters.
+ """
+ if not self._routers:
+ self._can_failover = False
+ router_settings = self._settings.copy()
+ router_settings["host"] = self._settings.get("host", "localhost")
+ router_settings["port"] = self._settings.get("port", 33060)
+ return Router(router_settings)
+
+ cur_priority = self.routers_priority_list[self._cur_priority_idx]
+ routers_priority_len = len(self.routers_priority_list)
+
+ search = True
+ while search:
+ router = self._get_random_connection_params(cur_priority)
+
+ if router is not None or \
+ self._cur_priority_idx >= routers_priority_len:
+ if self._cur_priority_idx == routers_priority_len -1 and \
+ len(self._get_available_routers(cur_priority)) < 2:
+ self._can_failover = False
+ break
+
+ # Search on next group
+ self._cur_priority_idx += 1
+ if self._cur_priority_idx < routers_priority_len:
+ cur_priority = self.routers_priority_list[self._cur_priority_idx]
+
+ return router
+
+ def get_routers_directory(self):
+ """Returns the directory containing all the routers managed.
+
+ Returns:
+ dict: Dictionary with priorities as connection settings.
+ """
+ return self._routers_directory
+
+
+class Connection(object):
+ """Connection to a MySQL Server.
+
+ Args:
+ settings (dict): Dictionary with connection settings.
+ """
+ def __init__(self, settings):
+ self.settings = settings
+ self.stream = SocketStream()
+ self.protocol = None
+ self.keep_open = None
+ self._user = settings.get("user")
+ self._password = settings.get("password")
+ self._schema = settings.get("schema")
+ self._active_result = None
+ self._routers = settings.get("routers", [])
+
+ if "host" in settings and settings["host"]:
+ self._routers.append({
+ "host": settings.get("host"),
+ "port": settings.get("port", None)
+ })
+
+ self.router_manager = RouterManager(self._routers, settings)
+ self._connect_timeout = settings.get("connect-timeout",
+ _CONNECT_TIMEOUT)
+ if self._connect_timeout == 0:
+ # None is assigned if connect timeout is 0, which disables timeouts
+ # on socket operations
+ self._connect_timeout = None
+
+ self._stmt_counter = 0
+ self._prepared_stmt_ids = []
+ self._prepared_stmt_supported = True
+ self._server_disconnected = False
+ self._server_disconnected_reason = None
+
+ def fetch_active_result(self):
+ """Fetch active result."""
+ if self._active_result is not None:
+ self._active_result.fetch_all()
+ self._active_result = None
+
+ def set_active_result(self, result):
+ """Set active result.
+
+ Args:
+ `Result`: It can be :class:`mysqlx.Result`,
+ :class:`mysqlx.BufferingResult`,
+ :class:`mysqlx.RowResult`, :class:`mysqlx.SqlResult` or
+ :class:`mysqlx.DocResult`.
+ """
+ self._active_result = result
+
+ def connect(self):
+ """Attempt to connect to the MySQL server.
+
+ Raises:
+ :class:`mysqlx.InterfaceError`: If fails to connect to the MySQL
+ server.
+ :class:`mysqlx.TimeoutError`: If connect timeout was exceeded.
+ """
+ # Loop and check
+ error = None
+ while self.router_manager.can_failover():
+ try:
+ router = self.router_manager.get_next_router()
+ self.stream.connect(router.get_connection_params(),
+ self._connect_timeout)
+ reader = MessageReader(self.stream)
+ writer = MessageWriter(self.stream)
+ self.protocol = Protocol(reader, writer)
+
+ caps_data = self.protocol.get_capabilites().capabilities
+ caps = {
+ get_item_or_attr(cap, "name").lower():
+ cap for cap in caps_data
+ } if caps_data else {}
+
+ # Set TLS capabilities
+ self._set_tls_capabilities(caps)
+
+ # Set connection attributes capabilities
+ if "attributes" in self.settings:
+ conn_attrs = self.settings["attributes"]
+ self.protocol.set_capabilities(
+ session_connect_attrs=conn_attrs)
+
+ # Set compression capabilities
+ compression = self.settings.get("compression", "preferred")
+ algorithms = self.settings.get("compression-algorithms")
+ algorithm = None if compression == "disabled" \
+ else self._set_compression_capabilities(caps,
+ compression,
+ algorithms)
+ self._authenticate()
+ self.protocol.set_compression(algorithm)
+ return
+ except (socket.error, RuntimeError) as err:
+ error = err
+ router.set_unavailable()
+
+ # Python 2.7 does not raise a socket.timeout exception when using
+ # settimeout(), but it raises a socket.error with errno.EAGAIN (11)
+ # or errno.EINPROGRESS (115) if connect-timeout value is too low
+ if error is not None and isinstance(error, socket.timeout):
+ if len(self._routers) <= 1:
+ raise TimeoutError("Connection attempt to the server was "
+ "aborted. Timeout of {0} ms was exceeded"
+ "".format(self._connect_timeout))
+ raise TimeoutError("All server connection attempts were aborted. "
+ "Timeout of {0} ms was exceeded for each "
+ "selected server".format(self._connect_timeout))
+ if len(self._routers) <= 1:
+ raise InterfaceError("Cannot connect to host: {0}".format(error))
+ raise InterfaceError("Unable to connect to any of the target hosts",
+ 4001)
+
+ def _set_tls_capabilities(self, caps):
+ """Set the TLS capabilities.
+
+ Args:
+ caps (dict): Dictionary with the server capabilities.
+
+ Raises:
+ :class:`mysqlx.OperationalError`: If SSL is not enabled at the
+ server.
+ :class:`mysqlx.RuntimeError`: If support for SSL is not available
+ in Python.
+
+ .. versionadded:: 8.0.21
+ """
+ if self.settings.get("ssl-mode") == SSLMode.DISABLED:
+ return
+
+ if self.stream.is_socket():
+ if self.settings.get("ssl-mode"):
+ _LOGGER.warning("SSL not required when using Unix socket.")
+ return
+
+ if "tls" not in caps:
+ self.close_connection()
+ raise OperationalError("SSL not enabled at server")
+
+ is_ol7 = False
+ if platform.system() == "Linux":
+ distname, version, _ = linux_distribution()
+ try:
+ is_ol7 = "Oracle Linux" in distname and \
+ version.split(".")[0] == "7"
+ except IndexError:
+ is_ol7 = False
+
+ if sys.version_info < (2, 7, 9) and not is_ol7:
+ self.close_connection()
+ raise RuntimeError("The support for SSL is not available for "
+ "this Python version")
+
+ self.protocol.set_capabilities(tls=True)
+ self.stream.set_ssl(self.settings.get("tls-versions", None),
+ self.settings.get("ssl-mode", SSLMode.REQUIRED),
+ self.settings.get("ssl-ca"),
+ self.settings.get("ssl-crl"),
+ self.settings.get("ssl-cert"),
+ self.settings.get("ssl-key"),
+ self.settings.get("tls-ciphersuites"))
+ if "attributes" in self.settings:
+ conn_attrs = self.settings["attributes"]
+ self.protocol.set_capabilities(session_connect_attrs=conn_attrs)
+
+ def _set_compression_capabilities(self, caps, compression,
+ algorithms=None):
+ """Set the compression capabilities.
+
+ If compression is available, negociates client and server algorithms.
+ By trying to find an algorithm from the requested compression
+ algorithms list, which is supported by the server.
+
+ If no compression algorithms list is provided, the following priority
+ is used:
+
+ 1) zstd_stream
+ 2) lz4_message
+ 3) deflate_stream
+
+ Args:
+ caps (dict): Dictionary with the server capabilities.
+ compression (str): The compression connection setting.
+ algorithms (list): List of requested compression algorithms.
+
+ Returns:
+ str: The compression algorithm.
+
+ .. versionadded:: 8.0.21
+ .. versionchanged:: 8.0.22
+ """
+ compression_data = caps.get("compression")
+ if compression_data is None:
+ msg = "Compression requested but the server does not support it"
+ if compression == "required":
+ raise NotSupportedError(msg)
+ _LOGGER.warning(msg)
+ return None
+
+ compression_dict = {}
+ if isinstance(compression_data, dict): # C extension is being used
+ for fld in compression_data["value"]["obj"]["fld"]:
+ compression_dict[fld["key"]] = [
+ value["scalar"]["v_string"]["value"].decode("utf-8")
+ for value in fld["value"]["array"]["value"]
+ ]
+ else:
+ for fld in compression_data.value.obj.fld:
+ compression_dict[fld.key] = [
+ value.scalar.v_string.value.decode("utf-8")
+ for value in fld.value.array.value
+ ]
+
+ server_algorithms = compression_dict.get("algorithm", [])
+ algorithm = None
+
+ # Try to find an algorithm from the requested compression algorithms
+ # list, which is supported by the server
+ if algorithms:
+ # Resolve compression algorithms aliases and ignore unsupported
+ client_algorithms = [
+ COMPRESSION_ALGORITHMS[item] for item in algorithms
+ if item in COMPRESSION_ALGORITHMS
+ ]
+ matched = [
+ item for item in client_algorithms
+ if item in server_algorithms
+ ]
+ if matched:
+ algorithm = COMPRESSION_ALGORITHMS.get(matched[0])
+ elif compression == "required":
+ raise InterfaceError("The connection compression is set as "
+ "required, but none of the provided "
+ "compression algorithms are supported.")
+ else:
+ return None # Disable compression
+
+ # No compression algorithms list was provided or couldn't found one
+ # supported by the server
+ if algorithm is None:
+ if HAVE_ZSTD and "zstd_stream" in server_algorithms:
+ algorithm = "zstd_stream"
+ elif HAVE_LZ4 and "lz4_message" in server_algorithms:
+ algorithm = "lz4_message"
+ else:
+ algorithm = "deflate_stream"
+
+ if algorithm not in server_algorithms:
+ msg = ("Compression requested but the compression algorithm "
+ "negotiation failed")
+ if compression == "required":
+ raise InterfaceError(msg)
+ _LOGGER.warning(msg)
+ return None
+
+ self.protocol.set_capabilities(compression={"algorithm": algorithm})
+ return algorithm
+
+ def _authenticate(self):
+ """Authenticate with the MySQL server."""
+ auth = self.settings.get("auth")
+ if auth:
+ if auth == Auth.PLAIN:
+ self._authenticate_plain()
+ elif auth == Auth.SHA256_MEMORY:
+ self._authenticate_sha256_memory()
+ elif auth == Auth.MYSQL41:
+ self._authenticate_mysql41()
+ elif self.stream.is_secure():
+ # Use PLAIN if no auth provided and connection is secure
+ self._authenticate_plain()
+ else:
+ # Use MYSQL41 if connection is not secure
+ try:
+ self._authenticate_mysql41()
+ except InterfaceError:
+ pass
+ else:
+ return
+ # Try SHA256_MEMORY if MYSQL41 fails
+ try:
+ self._authenticate_sha256_memory()
+ except InterfaceError:
+ raise InterfaceError("Authentication failed using MYSQL41 and "
+ "SHA256_MEMORY, check username and "
+ "password or try a secure connection")
+
+ def _authenticate_mysql41(self):
+ """Authenticate with the MySQL server using `MySQL41AuthPlugin`."""
+ plugin = MySQL41AuthPlugin(self._user, self._password)
+ self.protocol.send_auth_start(plugin.auth_name())
+ extra_data = self.protocol.read_auth_continue()
+ self.protocol.send_auth_continue(plugin.auth_data(extra_data))
+ self.protocol.read_auth_ok()
+
+ def _authenticate_plain(self):
+ """Authenticate with the MySQL server using `PlainAuthPlugin`."""
+ if not self.stream.is_secure():
+ raise InterfaceError("PLAIN authentication is not allowed via "
+ "unencrypted connection")
+ plugin = PlainAuthPlugin(self._user, self._password)
+ self.protocol.send_auth_start(plugin.auth_name(),
+ auth_data=plugin.auth_data())
+ self.protocol.read_auth_ok()
+
+ def _authenticate_sha256_memory(self):
+ """Authenticate with the MySQL server using `Sha256MemoryAuthPlugin`."""
+ plugin = Sha256MemoryAuthPlugin(self._user, self._password)
+ self.protocol.send_auth_start(plugin.auth_name())
+ extra_data = self.protocol.read_auth_continue()
+ self.protocol.send_auth_continue(plugin.auth_data(extra_data))
+ self.protocol.read_auth_ok()
+
+ def _deallocate_statement(self, statement):
+ """Deallocates statement.
+
+ Args:
+ statement (Statement): A `Statement` based type object.
+ """
+ if statement.prepared:
+ self.protocol.send_prepare_deallocate(statement.stmt_id)
+ self._prepared_stmt_ids.remove(statement.stmt_id)
+ statement.prepared = False
+
+ def _prepare_statement(self, msg_type, msg, statement):
+ """Prepares a statement.
+
+ Args:
+ msg_type (str): Message ID string.
+ msg (mysqlx.protobuf.Message): MySQL X Protobuf Message.
+ statement (Statement): A `Statement` based type object.
+ """
+ try:
+ self.fetch_active_result()
+ self.protocol.send_prepare_prepare(msg_type, msg, statement)
+ except NotSupportedError:
+ self._prepared_stmt_supported = False
+ return
+ self._prepared_stmt_ids.append(statement.stmt_id)
+ statement.prepared = True
+
+ def _execute_prepared_pipeline(self, msg_type, msg, statement):
+ """Executes the prepared statement pipeline.
+
+ Args:
+ msg_type (str): Message ID string.
+ msg (mysqlx.protobuf.Message): MySQL X Protobuf Message.
+ statement (Statement): A `Statement` based type object.
+ """
+ # For old servers without prepared statement support
+ if not self._prepared_stmt_supported:
+ # Crud::<Operation>
+ self.protocol.send_msg_without_ps(msg_type, msg, statement)
+ return
+
+ if statement.deallocate_prepare_execute:
+ # Prepare::Deallocate + Prepare::Prepare + Prepare::Execute
+ self._deallocate_statement(statement)
+ self._prepare_statement(msg_type, msg, statement)
+ if not self._prepared_stmt_supported:
+ self.protocol.send_msg_without_ps(msg_type, msg, statement)
+ return
+ self.protocol.send_prepare_execute(msg_type, msg, statement)
+ statement.deallocate_prepare_execute = False
+ statement.reset_exec_counter()
+ elif statement.prepared and not statement.changed:
+ # Prepare::Execute
+ self.protocol.send_prepare_execute(msg_type, msg, statement)
+ elif statement.changed and not statement.repeated:
+ # Crud::<Operation>
+ self._deallocate_statement(statement)
+ self.protocol.send_msg_without_ps(msg_type, msg, statement)
+ statement.changed = False
+ statement.reset_exec_counter()
+ elif not statement.changed and not statement.repeated:
+ # Prepare::Prepare + Prepare::Execute
+ if not statement.prepared:
+ self._prepare_statement(msg_type, msg, statement)
+ if not self._prepared_stmt_supported:
+ self.protocol.send_msg_without_ps(msg_type, msg, statement)
+ return
+ self.protocol.send_prepare_execute(msg_type, msg, statement)
+ elif statement.changed and statement.repeated:
+ # Prepare::Deallocate + Crud::<Operation>
+ self._deallocate_statement(statement)
+ self.protocol.send_msg_without_ps(msg_type, msg, statement)
+ statement.changed = False
+ statement.reset_exec_counter()
+
+ statement.increment_exec_counter()
+
+ @catch_network_exception
+ def send_sql(self, statement):
+ """Execute a SQL statement.
+
+ Args:
+ sql (str): The SQL statement.
+
+ Raises:
+ :class:`mysqlx.ProgrammingError`: If the SQL statement is not a
+ valid string.
+ """
+ sql = statement.sql
+ if self.protocol is None:
+ raise OperationalError("MySQLx Connection not available")
+ if not isinstance(sql, str):
+ raise ProgrammingError("The SQL statement is not a valid string")
+ else:
+ msg_type, msg = self.protocol.build_execute_statement(
+ "sql", sql)
+ self.protocol.send_msg_without_ps(msg_type, msg, statement)
+ return SqlResult(self)
+
+ @catch_network_exception
+ def send_insert(self, statement):
+ """Send an insert statement.
+
+ Args:
+ statement (`Statement`): It can be :class:`mysqlx.InsertStatement`
+ or :class:`mysqlx.AddStatement`.
+
+ Returns:
+ :class:`mysqlx.Result`: A result object.
+ """
+ if self.protocol is None:
+ raise OperationalError("MySQLx Connection not available")
+ msg_type, msg = self.protocol.build_insert(statement)
+ self.protocol.send_msg(msg_type, msg)
+ ids = None
+ if isinstance(statement, AddStatement):
+ ids = statement.ids
+ return Result(self, ids)
+
+ @catch_network_exception
+ def send_find(self, statement):
+ """Send an find statement.
+
+ Args:
+ statement (`Statement`): It can be :class:`mysqlx.SelectStatement`
+ or :class:`mysqlx.FindStatement`.
+
+ Returns:
+ `Result`: It can be class:`mysqlx.DocResult` or
+ :class:`mysqlx.RowResult`.
+ """
+ msg_type, msg = self.protocol.build_find(statement)
+ self._execute_prepared_pipeline(msg_type, msg, statement)
+ return DocResult(self) if statement.is_doc_based() else RowResult(self)
+
+ @catch_network_exception
+ def send_delete(self, statement):
+ """Send an delete statement.
+
+ Args:
+ statement (`Statement`): It can be :class:`mysqlx.RemoveStatement`
+ or :class:`mysqlx.DeleteStatement`.
+
+ Returns:
+ :class:`mysqlx.Result`: The result object.
+ """
+ msg_type, msg = self.protocol.build_delete(statement)
+ self._execute_prepared_pipeline(msg_type, msg, statement)
+ return Result(self)
+
+ @catch_network_exception
+ def send_update(self, statement):
+ """Send an delete statement.
+
+ Args:
+ statement (`Statement`): It can be :class:`mysqlx.ModifyStatement`
+ or :class:`mysqlx.UpdateStatement`.
+
+ Returns:
+ :class:`mysqlx.Result`: The result object.
+ """
+ msg_type, msg = self.protocol.build_update(statement)
+ self._execute_prepared_pipeline(msg_type, msg, statement)
+ return Result(self)
+
+ @catch_network_exception
+ def execute_nonquery(self, namespace, cmd, raise_on_fail, fields=None):
+ """Execute a non query command.
+
+ Args:
+ namespace (str): The namespace.
+ cmd (str): The command.
+ raise_on_fail (bool): `True` to raise on fail.
+ fields (Optional[dict]): The message fields.
+
+ Raises:
+ :class:`mysqlx.OperationalError`: On errors.
+
+ Returns:
+ :class:`mysqlx.Result`: The result object.
+ """
+ try:
+ msg_type, msg = \
+ self.protocol.build_execute_statement(namespace, cmd, fields)
+ self.protocol.send_msg(msg_type, msg)
+ return Result(self)
+ except OperationalError:
+ if raise_on_fail:
+ raise
+
+ @catch_network_exception
+ def execute_sql_scalar(self, sql):
+ """Execute a SQL scalar.
+
+ Args:
+ sql (str): The SQL statement.
+
+ Raises:
+ :class:`mysqlx.InterfaceError`: If no data found.
+
+ Returns:
+ :class:`mysqlx.Result`: The result.
+ """
+ msg_type, msg = self.protocol.build_execute_statement("sql", sql)
+ self.protocol.send_msg(msg_type, msg)
+ result = RowResult(self)
+ result.fetch_all()
+ if result.count == 0:
+ raise InterfaceError("No data found")
+ return result[0][0]
+
+ @catch_network_exception
+ def get_row_result(self, cmd, fields):
+ """Returns the row result.
+
+ Args:
+ cmd (str): The command.
+ fields (dict): The message fields.
+
+ Returns:
+ :class:`mysqlx.RowResult`: The result object.
+ """
+ msg_type, msg = \
+ self.protocol.build_execute_statement("mysqlx", cmd, fields)
+ self.protocol.send_msg(msg_type, msg)
+ return RowResult(self)
+
+ @catch_network_exception
+ def read_row(self, result):
+ """Read row.
+
+ Args:
+ result (:class:`mysqlx.RowResult`): The result object.
+ """
+ return self.protocol.read_row(result)
+
+ @catch_network_exception
+ def close_result(self, result):
+ """Close result.
+
+ Args:
+ result (:class:`mysqlx.Result`): The result object.
+ """
+ self.protocol.close_result(result)
+
+ @catch_network_exception
+ def get_column_metadata(self, result):
+ """Get column metadata.
+
+ Args:
+ result (:class:`mysqlx.Result`): The result object.
+ """
+ return self.protocol.get_column_metadata(result)
+
+ def get_next_statement_id(self):
+ """Returns the next statement ID.
+
+ Returns:
+ int: A statement ID.
+
+ .. versionadded:: 8.0.16
+ """
+ self._stmt_counter += 1
+ return self._stmt_counter
+
+ def is_open(self):
+ """Check if connection is open.
+
+ Returns:
+ bool: `True` if connection is open.
+ """
+ return self.stream.is_open()
+
+ def set_server_disconnected(self, reason):
+ """Set the disconnection message from the server.
+
+ Args:
+ reason (str): disconnection reason from the server.
+ """
+ self._server_disconnected = True
+ self._server_disconnected_reason = reason
+
+ def is_server_disconnected(self):
+ """Verify if the session has been disconnect from the server.
+
+ Returns:
+ bool: `True` if the connection has been closed from the server
+ otherwise `False`.
+ """
+ return self._server_disconnected
+
+ def get_disconnected_reason(self):
+ """Get the disconnection message sent by the server.
+
+ Returns:
+ string: disconnection reason from the server.
+ """
+ return self._server_disconnected_reason
+
+ def disconnect(self):
+ """Disconnect from server."""
+ if not self.is_open():
+ return
+ self.stream.close()
+
+ def close_session(self):
+ """Close a sucessfully authenticated session."""
+ if not self.is_open():
+ return
+
+ try:
+ # Fetch any active result
+ self.fetch_active_result()
+ # Deallocate all prepared statements
+ if self._prepared_stmt_supported:
+ for stmt_id in self._prepared_stmt_ids:
+ self.protocol.send_prepare_deallocate(stmt_id)
+ self._stmt_counter = 0
+ # Send session close
+ self.protocol.send_close()
+ self.protocol.read_ok()
+ except (InterfaceError, OperationalError, OSError) as err:
+ _LOGGER.warning("Warning: An error occurred while attempting to "
+ "close the connection: {}".format(err))
+ finally:
+ # The remote connection with the server has been lost,
+ # close the connection locally.
+ self.stream.close()
+
+ def reset_session(self):
+ """Reset a sucessfully authenticated session."""
+ if not self.is_open():
+ return
+ if self._active_result is not None:
+ self._active_result.fetch_all()
+ try:
+ self.keep_open = self.protocol.send_reset(self.keep_open)
+ except (InterfaceError, OperationalError) as err:
+ _LOGGER.warning("Warning: An error occurred while attempting to "
+ "reset the session: {}".format(err))
+
+ def close_connection(self):
+ """Announce to the server that the client wants to close the
+ connection. Discards any session state of the server.
+ """
+ if not self.is_open():
+ return
+ if self._active_result is not None:
+ self._active_result.fetch_all()
+ self.protocol.send_connection_close()
+ self.protocol.read_ok()
+ self.stream.close()
+
+
+class PooledConnection(Connection):
+ """Class to hold :class:`Connection` instances in a pool.
+
+ PooledConnection is used by :class:`ConnectionPool` to facilitate the
+ connection to return to the pool once is not required, more specifically
+ once the close_session() method is invoked. It works like a normal
+ Connection except for methods like close() and sql().
+
+ The close_session() method will add the connection back to the pool rather
+ than disconnecting from the MySQL server.
+
+ The sql() method is used to execute sql statements.
+
+ Args:
+ pool (ConnectionPool): The pool where this connection must return.
+
+ .. versionadded:: 8.0.13
+ """
+ def __init__(self, pool):
+ if not isinstance(pool, ConnectionPool):
+ raise AttributeError("pool should be a ConnectionPool object")
+ super(PooledConnection, self).__init__(pool.cnx_config)
+ self.pool = pool
+ self.host = pool.cnx_config["host"]
+ self.port = pool.cnx_config["port"]
+
+ def close_connection(self):
+ """Closes the connection.
+
+ This method closes the socket.
+ """
+ super(PooledConnection, self).close_session()
+
+ def close_session(self):
+ """Do not close, but add connection back to pool.
+
+ The close_session() method does not close the connection with the
+ MySQL server. The connection is added back to the pool so it
+ can be reused.
+
+ When the pool is configured to reset the session, the session
+ state will be cleared by re-authenticating the user once the connection
+ is get from the pool.
+ """
+ self.pool.add_connection(self)
+
+ def reconnect(self):
+ """Reconnect this connection.
+ """
+ if self._active_result is not None:
+ self._active_result.fetch_all()
+ self._authenticate()
+
+ def reset(self):
+ """Reset the connection.
+
+ Resets the connection by re-authenticate.
+ """
+ self.reconnect()
+
+ def sql(self, sql):
+ """Creates a :class:`mysqlx.SqlStatement` object to allow running the
+ SQL statement on the target MySQL Server.
+
+ Args:
+ sql (string): The SQL statement to be executed.
+
+ Returns:
+ mysqlx.SqlStatement: SqlStatement object.
+ """
+ return SqlStatement(self, sql)
+
+
+class ConnectionPool(queue.Queue):
+ """This class represents a pool of connections.
+
+ Initializes the Pool with the given name and settings.
+
+ Args:
+ name (str): The name of the pool, used to track a single pool per
+ combination of host and user.
+ **kwargs:
+ max_size (int): The maximun number of connections to hold in
+ the pool.
+ reset_session (bool): If the connection should be reseted when
+ is taken from the pool.
+ max_idle_time (int): The maximum number of milliseconds to allow
+ a connection to be idle in the queue before
+ being closed. Zero value means infinite.
+ queue_timeout (int): The maximum number of milliseconds a
+ request will wait for a connection to
+ become available. A zero value means
+ infinite.
+ priority (int): The router priority, to choose this pool over
+ other with lower priority.
+
+ Raises:
+ :class:`mysqlx.PoolError` on errors.
+
+ .. versionadded:: 8.0.13
+ """
+ def __init__(self, name, **kwargs):
+ self._set_pool_name(name)
+ self._open_sessions = 0
+ self._connections_openned = []
+ self._available = True
+ self._timeout = 0
+ self._timeout_stamp = datetime.now()
+ self.pool_max_size = kwargs.get("max_size", 25)
+ # Can't invoke super due to Queue not is a new-style class
+ queue.Queue.__init__(self, self.pool_max_size)
+ self.reset_session = kwargs.get("reset_session", True)
+ self.max_idle_time = kwargs.get("max_idle_time", 25)
+ self.settings = kwargs
+ self.queue_timeout = kwargs.get("queue_timeout", 25)
+ self.priority = kwargs.get("priority", 0)
+ self.cnx_config = kwargs
+ self.host = kwargs['host']
+ self.port = kwargs['port']
+
+ def _set_pool_name(self, pool_name):
+ r"""Set the name of the pool.
+
+ This method checks the validity and sets the name of the pool.
+
+ Args:
+ pool_name (str): The pool name.
+
+ Raises:
+ AttributeError: If the pool_name contains illegal characters
+ ([^a-zA-Z0-9._\-*$#]) or is longer than
+ connection._CNX_POOL_MAX_NAME_SIZE.
+ """
+ if _CNX_POOL_NAME_REGEX.search(pool_name):
+ raise AttributeError(
+ "Pool name '{0}' contains illegal characters".format(pool_name))
+ if len(pool_name) > _CNX_POOL_MAX_NAME_SIZE:
+ raise AttributeError(
+ "Pool name '{0}' is too long".format(pool_name))
+ self.name = pool_name
+
+ @property
+ def open_connections(self):
+ """Returns the number of open connections that can return to this pool.
+ """
+ return len(self._connections_openned)
+
+ def remove_connection(self, cnx=None):
+ """Removes a connection from this pool.
+
+ Args:
+ cnx (PooledConnection): The connection object.
+ """
+ self._connections_openned.remove(cnx)
+
+ def remove_connections(self):
+ """Removes all the connections from the pool."""
+ while self.qsize() > 0:
+ try:
+ cnx = self.get(block=True,
+ timeout=self.queue_timeout)
+ except queue.Empty:
+ pass
+ else:
+ try:
+ cnx.close_connection()
+ except (RuntimeError, socket.error, InterfaceError):
+ pass
+ finally:
+ self.remove_connection(cnx)
+
+ def add_connection(self, cnx=None):
+ """Adds a connection to this pool.
+
+ This method instantiates a Connection using the configuration passed
+ when initializing the ConnectionPool instance or using the set_config()
+ method.
+ If cnx is a Connection instance, it will be added to the queue.
+
+ Args:
+ cnx (PooledConnection): The connection object.
+
+ Raises:
+ PoolError: If no configuration is set, if no more connection can
+ be added (maximum reached) or if the connection can not
+ be instantiated.
+ """
+ if not self.cnx_config:
+ raise PoolError("Connection configuration not available")
+
+ if self.full():
+ raise PoolError("Failed adding connection; queue is full")
+
+ if not cnx:
+ cnx = PooledConnection(self)
+ # mysqlx_wait_timeout is only available on MySQL 8
+ ver = cnx.sql('show variables like "version"'
+ ).execute().fetch_all()[0][1]
+ if tuple([int(n) for n in ver.split("-")[0].split(".")]) > \
+ (8, 0, 10):
+ cnx.sql("set mysqlx_wait_timeout = {}"
+ "".format(self.max_idle_time)).execute()
+ self._connections_openned.append(cnx)
+ else:
+ if not isinstance(cnx, PooledConnection):
+ raise PoolError(
+ "Connection instance not subclass of PooledSession.")
+ if cnx.is_server_disconnected():
+ self.remove_connections()
+ cnx.close()
+
+ self.queue_connection(cnx)
+
+ def queue_connection(self, cnx):
+ """Put connection back in the queue:
+
+ This method is putting a connection back in the queue.
+ It will not acquire a lock as the methods using _queue_connection() will
+ have it set.
+
+ Args:
+ PooledConnection: The connection object.
+
+ Raises:
+ PoolError: On errors.
+ """
+ if not isinstance(cnx, PooledConnection):
+ raise PoolError(
+ "Connection instance not subclass of PooledSession.")
+
+ # Reset the connection
+ if self.reset_session:
+ cnx.reset_session()
+ try:
+ self.put(cnx, block=False)
+ except queue.Full:
+ PoolError("Failed adding connection; queue is full")
+
+ def track_connection(self, connection):
+ """Tracks connection in order of close it when client.close() is invoke.
+ """
+ self._connections_openned.append(connection)
+
+ def __str__(self):
+ return self.name
+
+ def available(self):
+ """Returns if this pool is available for pool connections from it.
+
+ Returns:
+ bool: True if this pool is available else False.
+ .. versionadded:: 8.0.20
+ """
+ return self._available
+
+ def set_unavailable(self, time_out=-1):
+ """Sets this pool unavailable for a period of time (in seconds).
+
+ .. versionadded:: 8.0.20
+ """
+ if self._available:
+ _LOGGER.warning("ConnectionPool.set_unavailable pool: %s "
+ "time_out: %s", self, time_out)
+ self._available = False
+ self._timeout_stamp = datetime.now()
+ self._timeout = time_out
+
+ def set_available(self):
+ """Sets this pool available for pool connections from it.
+
+ .. versionadded:: 8.0.20
+ """
+ self._available = True
+ self._timeout_stamp = datetime.now()
+
+ def get_timeout_stamp(self):
+ """Returns the penalized time (timeout) and the time at the penalty.
+
+ Returns:
+ tuple: penalty seconds (int), timestamp at penalty (datetime object)
+ .. versionadded:: 8.0.20
+ """
+ return (self._timeout, self._timeout_stamp)
+
+ def close(self):
+ """Empty this ConnectionPool.
+ """
+ for cnx in self._connections_openned:
+ cnx.close_connection()
+
+
+class PoolsManager(object):
+ """Manages a pool of connections for a host or hosts in routers.
+
+ This class handles all the pools of Connections.
+
+ .. versionadded:: 8.0.13
+ """
+ __instance = None
+ __pools = {}
+
+ def __new__(cls):
+ if PoolsManager.__instance is None:
+ PoolsManager.__instance = object.__new__(cls)
+ PoolsManager.__pools = {}
+ return PoolsManager.__instance
+
+ def _pool_exists(self, client_id, pool_name):
+ """Verifies if a pool exists with the given name.
+
+ Args:
+ client_id (str): The client id.
+ pool_name (str): The name of the pool.
+
+ Returns:
+ bool: Returns `True` if the pool exists otherwise `False`.
+ """
+ pools = self.__pools.get(client_id, [])
+ for pool in pools:
+ if pool.name == pool_name:
+ return True
+ return False
+
+ def _get_pools(self, settings):
+ """Retrieves a list of pools that shares the given settings.
+
+ Args:
+ settings (dict): the configuration of the pool.
+
+ Returns:
+ list: A list of pools that shares the given settings.
+ """
+ available_pools = []
+ pool_names = []
+ connections_settings = self._get_connections_settings(settings)
+
+ # Generate the names of the pools this settings can connect to
+ for router_name, _ in connections_settings:
+ pool_names.append(router_name)
+
+ # Generate the names of the pools this settings can connect to
+ for pool in self.__pools.get(settings.get("client_id", "No id"), []):
+ if pool.name in pool_names:
+ available_pools.append(pool)
+ return available_pools
+
+ def _get_connections_settings(self, settings):
+ """Generates a list of separated connection settings for each host.
+
+ Gets a list of connection settings for each host or router found in the
+ given settings.
+
+ Args:
+ settings (dict): The configuration for the connections.
+
+ Returns:
+ list: A list of connections settings
+ """
+ pool_settings = settings.copy()
+ routers = pool_settings.get("routers", [])
+ connections_settings = []
+ if "routers" in pool_settings:
+ pool_settings.pop("routers")
+ if "host" in pool_settings and "port" in pool_settings:
+ routers.append({"priority": 100,
+ "weight": 0,
+ "host": pool_settings["host"],
+ "port": pool_settings["port"]})
+ # Order routers
+ routers.sort(key=lambda x: (x["priority"], -x.get("weight", 0)))
+ for router in routers:
+ connection_settings = pool_settings.copy()
+ connection_settings["host"] = router["host"]
+ connection_settings["port"] = router["port"]
+ connection_settings["priority"] = router["priority"]
+ connection_settings["weight"] = router.get("weight", 0)
+ connections_settings.append(
+ (generate_pool_name(**connection_settings),
+ connection_settings))
+ return connections_settings
+
+ def create_pool(self, cnx_settings):
+ """Creates a `ConnectionPool` instance to hold the connections.
+
+ Creates a `ConnectionPool` instance to hold the connections only if
+ no other pool exists with the same configuration.
+
+ Args:
+ cnx_settings (dict): The configuration for the connections.
+ """
+ connections_settings = self._get_connections_settings(cnx_settings)
+
+ # Subscribe client if it does not exists
+ if cnx_settings.get("client_id", "No id") not in self.__pools:
+ self.__pools[cnx_settings.get("client_id", "No id")] = []
+
+ # Create a pool for each router
+ for router_name, settings in connections_settings:
+ if self._pool_exists(cnx_settings.get("client_id", "No id"),
+ router_name):
+ continue
+ else:
+ pool = self.__pools.get(cnx_settings.get("client_id", "No id"),
+ [])
+ pool.append(ConnectionPool(router_name, **settings))
+
+ def _get_random_pool(self, pool_list):
+ """Get a random router from the group with the given priority.
+
+ Returns:
+ Router: a random router.
+
+ .. versionadded:: 8.0.20
+ """
+ if not pool_list:
+ return None
+ if len(pool_list) == 1:
+ return pool_list[0]
+
+ last = len(pool_list) - 1
+ index = random.randint(0, last)
+ return pool_list[index]
+
+ def get_sublist(self, pools, index, cur_priority):
+ sublist = []
+ next_priority = None
+ while index < len(pools):
+ next_priority = pools[index].priority
+ if cur_priority == next_priority and pools[index].available():
+ sublist.append(pools[index])
+ elif cur_priority != next_priority:
+ break
+ index += 1
+ return sublist
+
+ def _get_next_pool(self, pools, cur_priority):
+ index = 0
+ for pool in pools:
+ if pool.available() and cur_priority == pool.priority:
+ break
+ index += 1
+ subpool = []
+ while not subpool and index < len(pools):
+ subpool = self.get_sublist(pools, index, cur_priority)
+ index += 1
+ return self._get_random_pool(subpool)
+
+ def _get_next_priority(self, pools, cur_priority=None):
+ if cur_priority is None and pools:
+ return pools[0].priority
+ else:
+ # find the first pool that does not share the same priority
+ for t_pool in pools:
+ if t_pool.available():
+ cur_priority = t_pool.priority
+ return cur_priority
+ return pools[0].priority
+
+ def _check_unavailable_pools(self, settings, revive=None):
+ pools = self._get_pools(settings)
+ for pool in pools:
+ if pool.available():
+ continue
+ timeout, timeout_stamp = pool.get_timeout_stamp()
+ if revive:
+ timeout = revive
+ if datetime.now() > (timeout_stamp + timedelta(seconds=timeout)):
+ pool.set_available()
+
+ def get_connection(self, settings):
+ """Get a connection from the pool.
+
+ This method returns an `PooledConnection` instance which has a reference
+ to the pool that created it, and can be used as a normal Connection.
+
+ When the MySQL connection is not connected, a reconnect is attempted.
+
+ Raises:
+ :class:`PoolError`: On errors.
+
+ Returns:
+ PooledConnection: A pooled connection object.
+ """
+ def set_mysqlx_wait_timeout(cnx):
+ ver = cnx.sql(_SHOW_VERSION_QUERY).execute().fetch_all()[0][1]
+ # mysqlx_wait_timeout is only available on MySQL 8
+ if tuple([int(n) for n in
+ ver.split("-")[0].split(".")]) > (8, 0, 10):
+ cnx.sql("set mysqlx_wait_timeout = {}"
+ "".format(pool.max_idle_time)).execute()
+
+ pools = self._get_pools(settings)
+ cur_priority = settings.get("cur_priority", None)
+ error_list = []
+ self._check_unavailable_pools(settings)
+ cur_priority = self._get_next_priority(pools, cur_priority)
+ if cur_priority is None:
+ raise PoolError("Unable to connect to any of the target hosts. "
+ "No pool is available.")
+ settings["cur_priority"] = cur_priority
+ pool = self._get_next_pool(pools, cur_priority)
+ while pool is not None:
+ try:
+ # Check connections aviability in this pool
+ if pool.qsize() > 0:
+ # We have connections in pool, try to return a working one
+ with threading.RLock():
+ try:
+ cnx = pool.get(block=True,
+ timeout=pool.queue_timeout)
+ except queue.Empty:
+ raise PoolError(
+ "Failed getting connection; pool exhausted")
+ try:
+ if cnx.is_server_disconnected():
+ pool.remove_connections()
+ # Only reset the connection by re-authentification
+ # if the connection was unable to keep open by the
+ # server
+ if not cnx.keep_open:
+ cnx.reset()
+ set_mysqlx_wait_timeout(cnx)
+ except (RuntimeError, socket.error, InterfaceError):
+ # Unable to reset connection, close and remove
+ try:
+ cnx.close_connection()
+ except (RuntimeError, socket.error, InterfaceError):
+ pass
+ finally:
+ pool.remove_connection(cnx)
+ # By WL#13222 all idle sessions that connect to the
+ # same endpoint should be removed from the pool.
+ while pool.qsize() > 0:
+ try:
+ cnx = pool.get(block=True,
+ timeout=pool.queue_timeout)
+ except queue.Empty:
+ pass
+ else:
+ try:
+ cnx.close_connection()
+ except (RuntimeError, socket.error, InterfaceError):
+ pass
+ finally:
+ pool.remove_connection(cnx)
+ # Connection was closed by the server, create new
+ try:
+ cnx = PooledConnection(pool)
+ pool.track_connection(cnx)
+ cnx.connect()
+ set_mysqlx_wait_timeout(cnx)
+ except (RuntimeError, socket.error, InterfaceError):
+ pass
+ finally:
+ # Server must be down, take down idle
+ # connections from this pool
+ while pool.qsize() > 0:
+ try:
+ cnx = pool.get(block=True,
+ timeout=pool.queue_timeout)
+ cnx.close_connection()
+ pool.remove_connection(cnx)
+ except (RuntimeError, socket.error, InterfaceError):
+ pass
+ return cnx
+ elif pool.open_connections < pool.pool_max_size:
+ # No connections in pool, but we can open a new one
+ cnx = PooledConnection(pool)
+ pool.track_connection(cnx)
+ cnx.connect()
+ set_mysqlx_wait_timeout(cnx)
+ return cnx
+ else:
+ # Pool is exaust so the client needs to wait
+ with threading.RLock():
+ try:
+ cnx = pool.get(block=True,
+ timeout=pool.queue_timeout)
+ cnx.reset()
+ set_mysqlx_wait_timeout(cnx)
+ return cnx
+ except queue.Empty:
+ raise PoolError("pool max size has been reached")
+ except (InterfaceError, TimeoutError, PoolError) as err:
+ error_list.append("pool: {} error: {}".format(pool, err))
+ if isinstance(err, PoolError):
+ # Pool can be exhaust now but can be ready again in no time,
+ # e.g a connection is returned to the pool.
+ pool.set_unavailable(2)
+ else:
+ self.set_pool_unavailable(pool, err)
+
+ self._check_unavailable_pools(settings)
+ # Try next pool with the same priority
+ pool = self._get_next_pool(pools, cur_priority)
+
+ if pool is None:
+ cur_priority = self._get_next_priority(pools, cur_priority)
+ settings["cur_priority"] = cur_priority
+ pool = self._get_next_pool(pools, cur_priority)
+ if pool is None:
+ msg = "\n ".join(error_list)
+ raise PoolError("Unable to connect to any of the "
+ "target hosts: [\n {}\n]".format(msg))
+ continue
+
+ raise PoolError("Unable to connect to any of the target hosts")
+
+ def close_pool(self, cnx_settings):
+ """Closes the connections in the pools
+
+ Returns:
+ int: The number of closed pools
+ """
+ pools = self._get_pools(cnx_settings)
+ for pool in pools:
+ pool.close()
+ # Remove the pool
+ if cnx_settings.get("client_id", None) is not None:
+ client_pools = self.__pools.get(cnx_settings.get("client_id"))
+ if pool in client_pools:
+ client_pools.remove(pool)
+ return len(pools)
+
+ def set_pool_unavailable(self, pool, err):
+ """Sets a pool as unavailable.
+
+ The time a pool is set unavailable depends on the given error message
+ or the error number.
+
+ Args:
+ pool (ConnectionPool): The pool to set unavailable.
+ err (Exception): The raised exception raised by a connection belonging
+ to the pool.
+ """
+ penalty = None
+ try:
+ err_no = err.errno
+ penalty = _TIMEOUT_PENALTIES_BY_ERR_NO[err_no]
+ except (AttributeError, KeyError):
+ pass
+ if not penalty:
+ err_msg = err.msg
+ for timeout_penalty in _TIMEOUT_PENALTIES:
+ if timeout_penalty in err_msg:
+ penalty = _TIMEOUT_PENALTIES[timeout_penalty]
+ if penalty:
+ pool.set_unavailable(penalty)
+ else:
+ # Other errors are severe punished
+ pool.set_unavailable(100000)
+
+class Session(object):
+ """Enables interaction with a X Protocol enabled MySQL Product.
+
+ The functionality includes:
+
+ - Accessing available schemas.
+ - Schema management operations.
+ - Enabling/disabling warning generation.
+ - Retrieval of connection information.
+
+ Args:
+ settings (dict): Connection data used to connect to the database.
+ """
+
+ def __init__(self, settings):
+ self.use_pure = settings.get("use-pure", Protobuf.use_pure)
+ self._settings = settings
+
+ # Check for DNS SRV
+ if settings.get("host") and settings.get("dns-srv"):
+ if not HAVE_DNSPYTHON:
+ raise InterfaceError("MySQL host configuration requested DNS "
+ "SRV. This requires the Python dnspython "
+ "module. Please refer to documentation")
+ try:
+ srv_records = dns.resolver.query(settings["host"], "SRV")
+ except dns.exception.DNSException:
+ raise InterfaceError("Unable to locate any hosts for '{0}'"
+ "".format(settings["host"]))
+ self._settings["routers"] = []
+ for srv in srv_records:
+ self._settings["routers"].append({
+ "host": srv.target.to_text(omit_final_dot=True),
+ "port": srv.port,
+ "priority": srv.priority,
+ "weight": srv.weight
+ })
+
+ if "connection-attributes" not in self._settings or \
+ self._settings["connection-attributes"] != False:
+ self._settings["attributes"] = {}
+ self._init_attributes()
+
+ if "pooling" in settings and settings["pooling"]:
+ # Create pool and retrieve a Connection instance
+ PoolsManager().create_pool(settings)
+ self._connection = PoolsManager().get_connection(settings)
+ if self._connection is None:
+ raise PoolError("Connection could not be retrieved from pool")
+ else:
+ self._connection = Connection(self._settings)
+ self._connection.connect()
+ # Set default schema
+ schema = self._settings.get("schema")
+ if schema:
+ try:
+ self.sql("USE {}".format(quote_identifier(schema))).execute()
+ except OperationalError as err:
+ # Access denied for user will raise err.errno = 1044
+ errmsg = err.msg if err.errno == 1044 \
+ else "Default schema '{}' does not exists".format(schema)
+ raise InterfaceError(errmsg, err.errno)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+
+ def _init_attributes(self):
+ """Setup default and user defined connection-attributes."""
+ if os.name == "nt":
+ if "64" in platform.architecture()[0]:
+ platform_arch = 'x86_64'
+ elif "32" in platform.architecture()[0]:
+ platform_arch = 'i386'
+ else:
+ platform_arch = platform.architecture()
+ os_ver = "Windows-{}".format(platform.win32_ver()[1])
+ else:
+ platform_arch = platform.machine()
+ if platform.system() == "Darwin":
+ os_ver = "{}-{}".format("macOS", platform.mac_ver()[0])
+ else:
+ os_ver = "-".join(linux_distribution()[0:2])
+
+ license_chunks = LICENSE.split(' ')
+ if license_chunks[0] == "GPLv2":
+ client_license = "GPL-2.0"
+ else:
+ client_license = "Commercial"
+
+ default_attributes = {
+ # Process id
+ "_pid": str(os.getpid()),
+ # Platform architecture
+ "_platform": platform_arch,
+ # OS version
+ "_os": os_ver,
+ # Hostname of the local machine
+ "_source_host": socket.gethostname(),
+ # Client's name
+ "_client_name": "mysql-connector-python",
+ # Client's version
+ "_client_version": ".".join([str(x) for x in VERSION[0:3]]),
+ # Client's License identifier
+ "_client_license": client_license
+ }
+ self._settings["attributes"].update(default_attributes)
+
+ if "connection-attributes" in self._settings:
+ for attr_name in self._settings["connection-attributes"]:
+ attr_value = self._settings["connection-attributes"][attr_name]
+ # Validate name type
+ if not isinstance(attr_name, str):
+ raise InterfaceError("Attribute name '{}' must be a string "
+ "type".format(attr_name))
+ # Validate attribute name limit 32 characters
+ if len(attr_name) > 32:
+ raise InterfaceError("Attribute name '{}' exceeds 32 "
+ "characters limit size"
+ "".format(attr_name))
+ # Validate names in connection-attributes cannot start with "_"
+ if attr_name.startswith("_"):
+ raise InterfaceError("Key names in 'session-connect-"
+ "attributes' cannot start with '_', "
+ "found: {}".format(attr_name))
+ # Validate value type
+ if not isinstance(attr_value, str):
+ raise InterfaceError("Attribute name '{}' value '{}' must "
+ "be a string type"
+ "".format(attr_name, attr_value))
+
+ # Validate attribute value limit 1024 characters
+ if len(attr_value) > 1024:
+ raise InterfaceError("Attribute name '{}' value: '{}' "
+ "exceeds 1024 characters limit size"
+ "".format(attr_name, attr_value))
+
+ self._settings["attributes"][attr_name] = attr_value
+
+ @property
+ def use_pure(self):
+ """bool: `True` to use pure Python Protobuf implementation.
+ """
+ return Protobuf.use_pure
+
+ @use_pure.setter
+ def use_pure(self, value):
+ if not isinstance(value, bool):
+ raise ProgrammingError("'use_pure' option should be True or False")
+ Protobuf.set_use_pure(value)
+
+ def is_open(self):
+ """Returns `True` if the session is open.
+
+ Returns:
+ bool: Returns `True` if the session is open.
+ """
+ return self._connection.stream.is_open()
+
+ def sql(self, sql):
+ """Creates a :class:`mysqlx.SqlStatement` object to allow running the
+ SQL statement on the target MySQL Server.
+
+ Args:
+ sql (string): The SQL statement to be executed.
+
+ Returns:
+ mysqlx.SqlStatement: SqlStatement object.
+ """
+ return SqlStatement(self._connection, sql)
+
+ def get_connection(self):
+ """Returns the underlying connection.
+
+ Returns:
+ mysqlx.connection.Connection: The connection object.
+ """
+ return self._connection
+
+ def get_schemas(self):
+ """Returns the list of schemas in the current session.
+
+ Returns:
+ `list`: The list of schemas in the current session.
+
+ .. versionadded:: 8.0.12
+ """
+ result = self.sql("SHOW DATABASES").execute()
+ return [row[0] for row in result.fetch_all()]
+
+ def get_schema(self, name):
+ """Retrieves a Schema object from the current session by it's name.
+
+ Args:
+ name (string): The name of the Schema object to be retrieved.
+
+ Returns:
+ mysqlx.Schema: The Schema object with the given name.
+ """
+ return Schema(self, name)
+
+ def get_default_schema(self):
+ """Retrieves a Schema object from the current session by the schema
+ name configured in the connection settings.
+
+ Returns:
+ mysqlx.Schema: The Schema object with the given name at connect
+ time.
+ None: In case the default schema was not provided with the
+ initialization data.
+
+ Raises:
+ :class:`mysqlx.ProgrammingError`: If the provided default schema
+ does not exists.
+ """
+ schema = self._connection.settings.get("schema")
+ if schema:
+ res = self.sql(
+ _SELECT_SCHEMA_NAME_QUERY.format(escape(schema))
+ ).execute().fetch_all()
+ try:
+ if res[0][0] == schema:
+ return Schema(self, schema)
+ except IndexError:
+ raise ProgrammingError(
+ "Default schema '{}' does not exists".format(schema))
+ return None
+
+ def drop_schema(self, name):
+ """Drops the schema with the specified name.
+
+ Args:
+ name (string): The name of the Schema object to be retrieved.
+ """
+ self._connection.execute_nonquery(
+ "sql", _DROP_DATABASE_QUERY.format(quote_identifier(name)), True)
+
+ def create_schema(self, name):
+ """Creates a schema on the database and returns the corresponding
+ object.
+
+ Args:
+ name (string): A string value indicating the schema name.
+ """
+ self._connection.execute_nonquery(
+ "sql", _CREATE_DATABASE_QUERY.format(quote_identifier(name)), True)
+ return Schema(self, name)
+
+ def start_transaction(self):
+ """Starts a transaction context on the server."""
+ self._connection.execute_nonquery("sql", "START TRANSACTION", True)
+
+ def commit(self):
+ """Commits all the operations executed after a call to
+ startTransaction().
+ """
+ self._connection.execute_nonquery("sql", "COMMIT", True)
+
+ def rollback(self):
+ """Discards all the operations executed after a call to
+ startTransaction().
+ """
+ self._connection.execute_nonquery("sql", "ROLLBACK", True)
+
+ def set_savepoint(self, name=None):
+ """Creates a transaction savepoint.
+
+ If a name is not provided, one will be generated using the uuid.uuid1()
+ function.
+
+ Args:
+ name (Optional[string]): The savepoint name.
+
+ Returns:
+ string: The savepoint name.
+ """
+ if name is None:
+ name = "{0}".format(uuid.uuid1())
+ elif not isinstance(name, str) or len(name.strip()) == 0:
+ raise ProgrammingError("Invalid SAVEPOINT name")
+ self._connection.execute_nonquery("sql", "SAVEPOINT {0}"
+ "".format(quote_identifier(name)),
+ True)
+ return name
+
+ def rollback_to(self, name):
+ """Rollback to a transaction savepoint with the given name.
+
+ Args:
+ name (string): The savepoint name.
+ """
+ if not isinstance(name, str) or len(name.strip()) == 0:
+ raise ProgrammingError("Invalid SAVEPOINT name")
+ self._connection.execute_nonquery("sql", "ROLLBACK TO SAVEPOINT {0}"
+ "".format(quote_identifier(name)),
+ True)
+
+ def release_savepoint(self, name):
+ """Release a transaction savepoint with the given name.
+
+ Args:
+ name (string): The savepoint name.
+ """
+ if not isinstance(name, str) or len(name.strip()) == 0:
+ raise ProgrammingError("Invalid SAVEPOINT name")
+ self._connection.execute_nonquery("sql", "RELEASE SAVEPOINT {0}"
+ "".format(quote_identifier(name)),
+ True)
+
+ def close(self):
+ """Closes the session."""
+ self._connection.close_session()
+ # Set an unconnected connection
+ self._connection = Connection(self._settings)
+
+ def close_connections(self):
+ """Closes all underliying connections as pooled connections"""
+ self._connection.close_connection()
+
+
+class Client(object):
+ """Class defining a client, it stores a connection configuration.
+
+ Args:
+ connection_dict (dict): The connection information to connect to a
+ MySQL server.
+ options_dict (dict): The options to configure this client.
+
+ .. versionadded:: 8.0.13
+ """
+ def __init__(self, connection_dict, options_dict=None):
+ self.settings = connection_dict
+ if options_dict is None:
+ options_dict = {}
+
+ self.sessions = []
+ self.client_id = uuid.uuid4()
+
+ self._set_pool_size(options_dict.get("max_size", 25))
+ self._set_max_idle_time(options_dict.get("max_idle_time", 0))
+ self._set_queue_timeout(options_dict.get("queue_timeout", 0))
+ self._set_pool_enabled(options_dict.get("enabled", True))
+
+ self.settings["pooling"] = self.pooling_enabled
+ self.settings["max_size"] = self.max_size
+ self.settings["client_id"] = self.client_id
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+
+ def _set_pool_size(self, pool_size):
+ """Set the size of the pool.
+
+ This method sets the size of the pool but it will not resize the pool.
+
+ Args:
+ pool_size (int): An integer equal or greater than 0 indicating
+ the pool size.
+
+ Raises:
+ :class:`AttributeError`: If the pool_size value is not an integer
+ greater or equal to 0.
+ """
+ if isinstance(pool_size, bool) or not isinstance(pool_size, int) or \
+ not pool_size > 0:
+ raise AttributeError("Pool max_size value must be an integer "
+ "greater than 0, the given value {} "
+ "is not valid.".format(pool_size))
+
+ self.max_size = _CNX_POOL_MAXSIZE if pool_size == 0 else pool_size
+
+ def _set_max_idle_time(self, max_idle_time):
+ """Set the max idle time.
+
+ This method sets the max idle time.
+
+ Args:
+ max_idle_time (int): An integer equal or greater than 0 indicating
+ the max idle time.
+
+ Raises:
+ :class:`AttributeError`: If the max_idle_time value is not an
+ integer greater or equal to 0.
+ """
+ if isinstance(max_idle_time, bool) or \
+ not isinstance(max_idle_time, int) or not max_idle_time > -1:
+ raise AttributeError("Connection max_idle_time value must be an "
+ "integer greater or equal to 0, the given "
+ "value {} is not valid.".format(max_idle_time))
+
+ self.max_idle_time = max_idle_time
+ self.settings["max_idle_time"] = _CNX_POOL_MAX_IDLE_TIME \
+ if max_idle_time == 0 else int(max_idle_time / 1000)
+
+ def _set_pool_enabled(self, enabled):
+ """Set if the pool is enabled.
+
+ This method sets if the pool is enabled.
+
+ Args:
+ enabled (bool): True if to enabling the pool.
+
+ Raises:
+ :class:`AttributeError`: If the value of enabled is not a bool type.
+ """
+ if not isinstance(enabled, bool):
+ raise AttributeError("The enabled value should be True or False.")
+ self.pooling_enabled = enabled
+
+ def _set_queue_timeout(self, queue_timeout):
+ """Set the queue timeout.
+
+ This method sets the queue timeout.
+
+ Args:
+ queue_timeout (int): An integer equal or greater than 0 indicating
+ the queue timeout.
+
+ Raises:
+ :class:`AttributeError`: If the queue_timeout value is not an
+ integer greater or equal to 0.
+ """
+ if isinstance(queue_timeout, bool) or \
+ not isinstance(queue_timeout, int) or not queue_timeout > -1:
+ raise AttributeError("Connection queue_timeout value must be an "
+ "integer greater or equal to 0, the given "
+ "value {} is not valid.".format(queue_timeout))
+
+ self.queue_timeout = queue_timeout
+ self.settings["queue_timeout"] = _CNX_POOL_QUEUE_TIMEOUT \
+ if queue_timeout == 0 else int(queue_timeout / 1000)
+ # To avoid a connection stall waiting for the server, if the
+ # connect-timeout is not given, use the queue_timeout
+ if not "connect-timeout" in self.settings:
+ self.settings["connect-timeout"] = self.queue_timeout
+
+ def get_session(self):
+ """Creates a Session instance using the provided connection data.
+
+ Returns:
+ Session: Session object.
+ """
+ session = Session(self.settings)
+ self.sessions.append(session)
+ return session
+
+ def close(self):
+ """Closes the sessions opened by this client.
+ """
+ PoolsManager().close_pool(self.settings)
+ for session in self.sessions:
+ session.close_connections()
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/constants.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..a1b4e99e90f0a15ae8ac6a6fce558c7b4ba535a0
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/constants.py
@@ -0,0 +1,323 @@
+# Copyright (c) 2016, 2019, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Constants."""
+
+from collections import namedtuple
+
+
+# pylint: disable=C0103
+def create_enum(name, fields, values=None):
+ """Emulates an enum by creating a namedtuple.
+
+ Args:
+ name (string): The type name.
+ fields (tuple): The fields names.
+ values (tuple): The values of the fields.
+
+ Returns:
+ namedtuple: A namedtuple object.
+ """
+ Enum = namedtuple(name, fields)
+ if values is None:
+ return Enum(*fields)
+ return Enum(*values)
+
+
+SSLMode = create_enum("SSLMode",
+ ("REQUIRED", "DISABLED", "VERIFY_CA", "VERIFY_IDENTITY"),
+ ("required", "disabled", "verify_ca", "verify_identity"))
+Auth = create_enum("Auth",
+ ("PLAIN", "MYSQL41", "SHA256_MEMORY"),
+ ("plain", "mysql41", "sha256_memory"))
+LockContention = create_enum("LockContention",
+ ("DEFAULT", "NOWAIT", "SKIP_LOCKED"), (0, 1, 2))
+
+# Compression algorithms and aliases
+COMPRESSION_ALGORITHMS = {
+ "deflate": "deflate_stream",
+ "deflate_stream": "deflate_stream",
+ "lz4": "lz4_message",
+ "lz4_message": "lz4_message",
+ "zstd": "zstd_stream",
+ "zstd_stream": "zstd_stream"
+}
+
+TLS_VERSIONS = ["TLSv1", "TLSv1.1", "TLSv1.2", "TLSv1.3"]
+
+# TLS v1.0 cipher suites IANI to OpenSSL name translation
+TLSV1_CIPHER_SUITES = {
+ "TLS_RSA_WITH_NULL_MD5": "NULL-MD5",
+ "TLS_RSA_WITH_NULL_SHA": "NULL-SHA",
+ "TLS_RSA_WITH_RC4_128_MD5": "RC4-MD5",
+ "TLS_RSA_WITH_RC4_128_SHA": "RC4-SHA",
+ "TLS_RSA_WITH_IDEA_CBC_SHA": "IDEA-CBC-SHA",
+ "TLS_RSA_WITH_3DES_EDE_CBC_SHA": "DES-CBC3-SHA",
+
+ "TLS_DH_DSS_WITH_3DES_EDE_CBC_SHA": "Not implemented.",
+ "TLS_DH_RSA_WITH_3DES_EDE_CBC_SHA": "Not implemented.",
+ "TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA": "DHE-DSS-DES-CBC3-SHA",
+ "TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA": "DHE-RSA-DES-CBC3-SHA",
+
+ "TLS_DH_anon_WITH_RC4_128_MD5": "ADH-RC4-MD5",
+ "TLS_DH_anon_WITH_3DES_EDE_CBC_SHA": "ADH-DES-CBC3-SHA",
+
+ # AES cipher suites from RFC3268, extending TLS v1.0
+ "TLS_RSA_WITH_AES_128_CBC_SHA": "AES128-SHA",
+ "TLS_RSA_WITH_AES_256_CBC_SHA": "AES256-SHA",
+
+ "TLS_DH_DSS_WITH_AES_128_CBC_SHA": "DH-DSS-AES128-SHA",
+ "TLS_DH_DSS_WITH_AES_256_CBC_SHA": "DH-DSS-AES256-SHA",
+ "TLS_DH_RSA_WITH_AES_128_CBC_SHA": "DH-RSA-AES128-SHA",
+ "TLS_DH_RSA_WITH_AES_256_CBC_SHA": "DH-RSA-AES256-SHA",
+
+ "TLS_DHE_DSS_WITH_AES_128_CBC_SHA": "DHE-DSS-AES128-SHA",
+ "TLS_DHE_DSS_WITH_AES_256_CBC_SHA": "DHE-DSS-AES256-SHA",
+ "TLS_DHE_RSA_WITH_AES_128_CBC_SHA": "DHE-RSA-AES128-SHA",
+ "TLS_DHE_RSA_WITH_AES_256_CBC_SHA": "DHE-RSA-AES256-SHA",
+
+ "TLS_DH_anon_WITH_AES_128_CBC_SHA": "ADH-AES128-SHA",
+ "TLS_DH_anon_WITH_AES_256_CBC_SHA": "ADH-AES256-SHA",
+
+ # Camellia cipher suites from RFC4132, extending TLS v1.0
+ "TLS_RSA_WITH_CAMELLIA_128_CBC_SHA": "CAMELLIA128-SHA",
+ "TLS_RSA_WITH_CAMELLIA_256_CBC_SHA": "CAMELLIA256-SHA",
+
+ "TLS_DH_DSS_WITH_CAMELLIA_128_CBC_SHA": "DH-DSS-CAMELLIA128-SHA",
+ "TLS_DH_DSS_WITH_CAMELLIA_256_CBC_SHA": "DH-DSS-CAMELLIA256-SHA",
+ "TLS_DH_RSA_WITH_CAMELLIA_128_CBC_SHA": "DH-RSA-CAMELLIA128-SHA",
+ "TLS_DH_RSA_WITH_CAMELLIA_256_CBC_SHA": "DH-RSA-CAMELLIA256-SHA",
+
+ "TLS_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA": "DHE-DSS-CAMELLIA128-SHA",
+ "TLS_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA": "DHE-DSS-CAMELLIA256-SHA",
+ "TLS_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA": "DHE-RSA-CAMELLIA128-SHA",
+ "TLS_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA": "DHE-RSA-CAMELLIA256-SHA",
+
+ "TLS_DH_anon_WITH_CAMELLIA_128_CBC_SHA": "ADH-CAMELLIA128-SHA",
+ "TLS_DH_anon_WITH_CAMELLIA_256_CBC_SHA": "ADH-CAMELLIA256-SHA",
+
+ # SEED cipher suites from RFC4162, extending TLS v1.0
+ "TLS_RSA_WITH_SEED_CBC_SHA": "SEED-SHA",
+
+ "TLS_DH_DSS_WITH_SEED_CBC_SHA": "DH-DSS-SEED-SHA",
+ "TLS_DH_RSA_WITH_SEED_CBC_SHA": "DH-RSA-SEED-SHA",
+
+ "TLS_DHE_DSS_WITH_SEED_CBC_SHA": "DHE-DSS-SEED-SHA",
+ "TLS_DHE_RSA_WITH_SEED_CBC_SHA": "DHE-RSA-SEED-SHA",
+
+ "TLS_DH_anon_WITH_SEED_CBC_SHA": "ADH-SEED-SHA",
+
+ # GOST cipher suites from draft-chudov-cryptopro-cptls, extending TLS v1.0
+ "TLS_GOSTR341094_WITH_28147_CNT_IMIT": "GOST94-GOST89-GOST89",
+ "TLS_GOSTR341001_WITH_28147_CNT_IMIT": "GOST2001-GOST89-GOST89",
+ "TLS_GOSTR341094_WITH_NULL_GOSTR3411": "GOST94-NULL-GOST94",
+ "TLS_GOSTR341001_WITH_NULL_GOSTR3411": "GOST2001-NULL-GOST94"}
+
+# TLS v1.1 cipher suites IANI to OpenSSL name translation
+TLSV1_1_CIPHER_SUITES = TLSV1_CIPHER_SUITES
+
+# TLS v1.2 cipher suites IANI to OpenSSL name translation
+TLSV1_2_CIPHER_SUITES = {
+ "TLS_RSA_WITH_NULL_SHA256": "NULL-SHA256",
+
+ "TLS_RSA_WITH_AES_128_CBC_SHA256": "AES128-SHA256",
+ "TLS_RSA_WITH_AES_256_CBC_SHA256": "AES256-SHA256",
+ "TLS_RSA_WITH_AES_128_GCM_SHA256": "AES128-GCM-SHA256",
+ "TLS_RSA_WITH_AES_256_GCM_SHA384": "AES256-GCM-SHA384",
+
+ "TLS_DH_RSA_WITH_AES_128_CBC_SHA256": "DH-RSA-AES128-SHA256",
+ "TLS_DH_RSA_WITH_AES_256_CBC_SHA256": "DH-RSA-AES256-SHA256",
+ "TLS_DH_RSA_WITH_AES_128_GCM_SHA256": "DH-RSA-AES128-GCM-SHA256",
+ "TLS_DH_RSA_WITH_AES_256_GCM_SHA384": "DH-RSA-AES256-GCM-SHA384",
+
+ "TLS_DH_DSS_WITH_AES_128_CBC_SHA256": "DH-DSS-AES128-SHA256",
+ "TLS_DH_DSS_WITH_AES_256_CBC_SHA256": "DH-DSS-AES256-SHA256",
+ "TLS_DH_DSS_WITH_AES_128_GCM_SHA256": "DH-DSS-AES128-GCM-SHA256",
+ "TLS_DH_DSS_WITH_AES_256_GCM_SHA384": "DH-DSS-AES256-GCM-SHA384",
+
+ "TLS_DHE_RSA_WITH_AES_128_CBC_SHA256": "DHE-RSA-AES128-SHA256",
+ "TLS_DHE_RSA_WITH_AES_256_CBC_SHA256": "DHE-RSA-AES256-SHA256",
+ "TLS_DHE_RSA_WITH_AES_128_GCM_SHA256": "DHE-RSA-AES128-GCM-SHA256",
+ "TLS_DHE_RSA_WITH_AES_256_GCM_SHA384": "DHE-RSA-AES256-GCM-SHA384",
+
+ "TLS_DHE_DSS_WITH_AES_128_CBC_SHA256": "DHE-DSS-AES128-SHA256",
+ "TLS_DHE_DSS_WITH_AES_256_CBC_SHA256": "DHE-DSS-AES256-SHA256",
+ "TLS_DHE_DSS_WITH_AES_128_GCM_SHA256": "DHE-DSS-AES128-GCM-SHA256",
+ "TLS_DHE_DSS_WITH_AES_256_GCM_SHA384": "DHE-DSS-AES256-GCM-SHA384",
+
+ "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256": "ECDHE-RSA-AES128-SHA256",
+ "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384": "ECDHE-RSA-AES256-SHA384",
+ "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256": "ECDHE-RSA-AES128-GCM-SHA256",
+ "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384": "ECDHE-RSA-AES256-GCM-SHA384",
+
+ "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256": "ECDHE-ECDSA-AES128-SHA256",
+ "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384": "ECDHE-ECDSA-AES256-SHA384",
+ "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256": "ECDHE-ECDSA-AES128-GCM-SHA256",
+ "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384": "ECDHE-ECDSA-AES256-GCM-SHA384",
+
+ "TLS_DH_anon_WITH_AES_128_CBC_SHA256": "ADH-AES128-SHA256",
+ "TLS_DH_anon_WITH_AES_256_CBC_SHA256": "ADH-AES256-SHA256",
+ "TLS_DH_anon_WITH_AES_128_GCM_SHA256": "ADH-AES128-GCM-SHA256",
+ "TLS_DH_anon_WITH_AES_256_GCM_SHA384": "ADH-AES256-GCM-SHA384",
+
+ "RSA_WITH_AES_128_CCM": "AES128-CCM",
+ "RSA_WITH_AES_256_CCM": "AES256-CCM",
+ "DHE_RSA_WITH_AES_128_CCM": "DHE-RSA-AES128-CCM",
+ "DHE_RSA_WITH_AES_256_CCM": "DHE-RSA-AES256-CCM",
+ "RSA_WITH_AES_128_CCM_8": "AES128-CCM8",
+ "RSA_WITH_AES_256_CCM_8": "AES256-CCM8",
+ "DHE_RSA_WITH_AES_128_CCM_8": "DHE-RSA-AES128-CCM8",
+ "DHE_RSA_WITH_AES_256_CCM_8": "DHE-RSA-AES256-CCM8",
+ "ECDHE_ECDSA_WITH_AES_128_CCM": "ECDHE-ECDSA-AES128-CCM",
+ "ECDHE_ECDSA_WITH_AES_256_CCM": "ECDHE-ECDSA-AES256-CCM",
+ "ECDHE_ECDSA_WITH_AES_128_CCM_8": "ECDHE-ECDSA-AES128-CCM8",
+ "ECDHE_ECDSA_WITH_AES_256_CCM_8": "ECDHE-ECDSA-AES256-CCM8",
+
+ # ARIA cipher suites from RFC6209, extending TLS v1.2
+ "TLS_RSA_WITH_ARIA_128_GCM_SHA256": "ARIA128-GCM-SHA256",
+ "TLS_RSA_WITH_ARIA_256_GCM_SHA384": "ARIA256-GCM-SHA384",
+ "TLS_DHE_RSA_WITH_ARIA_128_GCM_SHA256": "DHE-RSA-ARIA128-GCM-SHA256",
+ "TLS_DHE_RSA_WITH_ARIA_256_GCM_SHA384": "DHE-RSA-ARIA256-GCM-SHA384",
+ "TLS_DHE_DSS_WITH_ARIA_128_GCM_SHA256": "DHE-DSS-ARIA128-GCM-SHA256",
+ "TLS_DHE_DSS_WITH_ARIA_256_GCM_SHA384": "DHE-DSS-ARIA256-GCM-SHA384",
+ "TLS_ECDHE_ECDSA_WITH_ARIA_128_GCM_SHA256": "ECDHE-ECDSA-ARIA128-GCM-SHA256",
+ "TLS_ECDHE_ECDSA_WITH_ARIA_256_GCM_SHA384": "ECDHE-ECDSA-ARIA256-GCM-SHA384",
+ "TLS_ECDHE_RSA_WITH_ARIA_128_GCM_SHA256": "ECDHE-ARIA128-GCM-SHA256",
+ "TLS_ECDHE_RSA_WITH_ARIA_256_GCM_SHA384": "ECDHE-ARIA256-GCM-SHA384",
+ "TLS_PSK_WITH_ARIA_128_GCM_SHA256": "PSK-ARIA128-GCM-SHA256",
+ "TLS_PSK_WITH_ARIA_256_GCM_SHA384": "PSK-ARIA256-GCM-SHA384",
+ "TLS_DHE_PSK_WITH_ARIA_128_GCM_SHA256": "DHE-PSK-ARIA128-GCM-SHA256",
+ "TLS_DHE_PSK_WITH_ARIA_256_GCM_SHA384": "DHE-PSK-ARIA256-GCM-SHA384",
+ "TLS_RSA_PSK_WITH_ARIA_128_GCM_SHA256": "RSA-PSK-ARIA128-GCM-SHA256",
+ "TLS_RSA_PSK_WITH_ARIA_256_GCM_SHA384": "RSA-PSK-ARIA256-GCM-SHA384",
+
+ # Camellia HMAC-Based cipher suites from RFC6367, extending TLS v1.2
+ "TLS_ECDHE_ECDSA_WITH_CAMELLIA_128_CBC_SHA256": "ECDHE-ECDSA-CAMELLIA128-SHA256",
+ "TLS_ECDHE_ECDSA_WITH_CAMELLIA_256_CBC_SHA384": "ECDHE-ECDSA-CAMELLIA256-SHA384",
+ "TLS_ECDHE_RSA_WITH_CAMELLIA_128_CBC_SHA256": "ECDHE-RSA-CAMELLIA128-SHA256",
+ "TLS_ECDHE_RSA_WITH_CAMELLIA_256_CBC_SHA384": "ECDHE-RSA-CAMELLIA256-SHA384",
+
+ # Pre-shared keying (PSK) cipher suites",
+ "PSK_WITH_NULL_SHA": "PSK-NULL-SHA",
+ "DHE_PSK_WITH_NULL_SHA": "DHE-PSK-NULL-SHA",
+ "RSA_PSK_WITH_NULL_SHA": "RSA-PSK-NULL-SHA",
+
+ "PSK_WITH_RC4_128_SHA": "PSK-RC4-SHA",
+ "PSK_WITH_3DES_EDE_CBC_SHA": "PSK-3DES-EDE-CBC-SHA",
+ "PSK_WITH_AES_128_CBC_SHA": "PSK-AES128-CBC-SHA",
+ "PSK_WITH_AES_256_CBC_SHA": "PSK-AES256-CBC-SHA",
+
+ "DHE_PSK_WITH_RC4_128_SHA": "DHE-PSK-RC4-SHA",
+ "DHE_PSK_WITH_3DES_EDE_CBC_SHA": "DHE-PSK-3DES-EDE-CBC-SHA",
+ "DHE_PSK_WITH_AES_128_CBC_SHA": "DHE-PSK-AES128-CBC-SHA",
+ "DHE_PSK_WITH_AES_256_CBC_SHA": "DHE-PSK-AES256-CBC-SHA",
+
+ "RSA_PSK_WITH_RC4_128_SHA": "RSA-PSK-RC4-SHA",
+ "RSA_PSK_WITH_3DES_EDE_CBC_SHA": "RSA-PSK-3DES-EDE-CBC-SHA",
+ "RSA_PSK_WITH_AES_128_CBC_SHA": "RSA-PSK-AES128-CBC-SHA",
+ "RSA_PSK_WITH_AES_256_CBC_SHA": "RSA-PSK-AES256-CBC-SHA",
+
+ "PSK_WITH_AES_128_GCM_SHA256": "PSK-AES128-GCM-SHA256",
+ "PSK_WITH_AES_256_GCM_SHA384": "PSK-AES256-GCM-SHA384",
+ "DHE_PSK_WITH_AES_128_GCM_SHA256": "DHE-PSK-AES128-GCM-SHA256",
+ "DHE_PSK_WITH_AES_256_GCM_SHA384": "DHE-PSK-AES256-GCM-SHA384",
+ "RSA_PSK_WITH_AES_128_GCM_SHA256": "RSA-PSK-AES128-GCM-SHA256",
+ "RSA_PSK_WITH_AES_256_GCM_SHA384": "RSA-PSK-AES256-GCM-SHA384",
+
+ "PSK_WITH_AES_128_CBC_SHA256": "PSK-AES128-CBC-SHA256",
+ "PSK_WITH_AES_256_CBC_SHA384": "PSK-AES256-CBC-SHA384",
+ "PSK_WITH_NULL_SHA256": "PSK-NULL-SHA256",
+ "PSK_WITH_NULL_SHA384": "PSK-NULL-SHA384",
+ "DHE_PSK_WITH_AES_128_CBC_SHA256": "DHE-PSK-AES128-CBC-SHA256",
+ "DHE_PSK_WITH_AES_256_CBC_SHA384": "DHE-PSK-AES256-CBC-SHA384",
+ "DHE_PSK_WITH_NULL_SHA256": "DHE-PSK-NULL-SHA256",
+ "DHE_PSK_WITH_NULL_SHA384": "DHE-PSK-NULL-SHA384",
+ "RSA_PSK_WITH_AES_128_CBC_SHA256": "RSA-PSK-AES128-CBC-SHA256",
+ "RSA_PSK_WITH_AES_256_CBC_SHA384": "RSA-PSK-AES256-CBC-SHA384",
+ "RSA_PSK_WITH_NULL_SHA256": "RSA-PSK-NULL-SHA256",
+ "RSA_PSK_WITH_NULL_SHA384": "RSA-PSK-NULL-SHA384",
+
+ "ECDHE_PSK_WITH_RC4_128_SHA": "ECDHE-PSK-RC4-SHA",
+ "ECDHE_PSK_WITH_3DES_EDE_CBC_SHA": "ECDHE-PSK-3DES-EDE-CBC-SHA",
+ "ECDHE_PSK_WITH_AES_128_CBC_SHA": "ECDHE-PSK-AES128-CBC-SHA",
+ "ECDHE_PSK_WITH_AES_256_CBC_SHA": "ECDHE-PSK-AES256-CBC-SHA",
+ "ECDHE_PSK_WITH_AES_128_CBC_SHA256": "ECDHE-PSK-AES128-CBC-SHA256",
+ "ECDHE_PSK_WITH_AES_256_CBC_SHA384": "ECDHE-PSK-AES256-CBC-SHA384",
+ "ECDHE_PSK_WITH_NULL_SHA": "ECDHE-PSK-NULL-SHA",
+ "ECDHE_PSK_WITH_NULL_SHA256": "ECDHE-PSK-NULL-SHA256",
+ "ECDHE_PSK_WITH_NULL_SHA384": "ECDHE-PSK-NULL-SHA384",
+
+ "PSK_WITH_CAMELLIA_128_CBC_SHA256": "PSK-CAMELLIA128-SHA256",
+ "PSK_WITH_CAMELLIA_256_CBC_SHA384": "PSK-CAMELLIA256-SHA384",
+
+ "DHE_PSK_WITH_CAMELLIA_128_CBC_SHA256": "DHE-PSK-CAMELLIA128-SHA256",
+ "DHE_PSK_WITH_CAMELLIA_256_CBC_SHA384": "DHE-PSK-CAMELLIA256-SHA384",
+
+ "RSA_PSK_WITH_CAMELLIA_128_CBC_SHA256": "RSA-PSK-CAMELLIA128-SHA256",
+ "RSA_PSK_WITH_CAMELLIA_256_CBC_SHA384": "RSA-PSK-CAMELLIA256-SHA384",
+
+ "ECDHE_PSK_WITH_CAMELLIA_128_CBC_SHA256": "ECDHE-PSK-CAMELLIA128-SHA256",
+ "ECDHE_PSK_WITH_CAMELLIA_256_CBC_SHA384": "ECDHE-PSK-CAMELLIA256-SHA384",
+
+ "PSK_WITH_AES_128_CCM": "PSK-AES128-CCM",
+ "PSK_WITH_AES_256_CCM": "PSK-AES256-CCM",
+ "DHE_PSK_WITH_AES_128_CCM": "DHE-PSK-AES128-CCM",
+ "DHE_PSK_WITH_AES_256_CCM": "DHE-PSK-AES256-CCM",
+ "PSK_WITH_AES_128_CCM_8": "PSK-AES128-CCM8",
+ "PSK_WITH_AES_256_CCM_8": "PSK-AES256-CCM8",
+ "DHE_PSK_WITH_AES_128_CCM_8": "DHE-PSK-AES128-CCM8",
+ "DHE_PSK_WITH_AES_256_CCM_8": "DHE-PSK-AES256-CCM8",
+
+ # ChaCha20-Poly1305 cipher suites, extending TLS v1.2
+ "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256": "ECDHE-RSA-CHACHA20-POLY1305",
+ "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256": "ECDHE-ECDSA-CHACHA20-POLY1305",
+ "TLS_DHE_RSA_WITH_CHACHA20_POLY1305_SHA256": "DHE-RSA-CHACHA20-POLY1305",
+ "TLS_PSK_WITH_CHACHA20_POLY1305_SHA256": "PSK-CHACHA20-POLY1305",
+ "TLS_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256": "ECDHE-PSK-CHACHA20-POLY1305",
+ "TLS_DHE_PSK_WITH_CHACHA20_POLY1305_SHA256": "DHE-PSK-CHACHA20-POLY1305",
+ "TLS_RSA_PSK_WITH_CHACHA20_POLY1305_SHA256": "RSA-PSK-CHACHA20-POLY1305"}
+
+# TLS v1.3 cipher suites IANI to OpenSSL name translation
+TLSV1_3_CIPHER_SUITES = {
+ "TLS_AES_128_GCM_SHA256": "TLS_AES_128_GCM_SHA256",
+ "TLS_AES_256_GCM_SHA384": "TLS_AES_256_GCM_SHA384",
+ "TLS_CHACHA20_POLY1305_SHA256": "TLS_CHACHA20_POLY1305_SHA256",
+ "TLS_AES_128_CCM_SHA256": "TLS_AES_128_CCM_SHA256",
+ "TLS_AES_128_CCM_8_SHA256": "TLS_AES_128_CCM_8_SHA256"}
+
+TLS_CIPHER_SUITES = {
+ "TLSv1": TLSV1_CIPHER_SUITES,
+ "TLSv1.1": TLSV1_1_CIPHER_SUITES,
+ "TLSv1.2": TLSV1_2_CIPHER_SUITES,
+ "TLSv1.3": TLSV1_3_CIPHER_SUITES}
+
+OPENSSL_CS_NAMES = {
+ "TLSv1": TLSV1_CIPHER_SUITES.values(),
+ "TLSv1.1": TLSV1_1_CIPHER_SUITES.values(),
+ "TLSv1.2": TLSV1_2_CIPHER_SUITES.values(),
+ "TLSv1.3": TLSV1_3_CIPHER_SUITES.values()}
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/crud.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/crud.py
new file mode 100644
index 0000000000000000000000000000000000000000..281edc800a9137ed43279238bc0caf54b5f60d08
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/crud.py
@@ -0,0 +1,722 @@
+# Copyright (c) 2016, 2021, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Implementation of the CRUD database objects."""
+
+import json
+import warnings
+
+from .dbdoc import DbDoc
+from .errorcode import (ER_NO_SUCH_TABLE, ER_TABLE_EXISTS_ERROR,
+ ER_X_CMD_NUM_ARGUMENTS, ER_X_INVALID_ADMIN_COMMAND)
+from .errors import NotSupportedError, OperationalError, ProgrammingError
+from .helpers import deprecated, escape, quote_identifier
+from .statement import (FindStatement, AddStatement, RemoveStatement,
+ ModifyStatement, SelectStatement, InsertStatement,
+ DeleteStatement, UpdateStatement,
+ CreateCollectionIndexStatement)
+
+
+_COUNT_VIEWS_QUERY = ("SELECT COUNT(*) FROM information_schema.views "
+ "WHERE table_schema = '{0}' AND table_name = '{1}'")
+_COUNT_TABLES_QUERY = ("SELECT COUNT(*) FROM information_schema.tables "
+ "WHERE table_schema = '{0}' AND table_name = '{1}'")
+_COUNT_SCHEMAS_QUERY = ("SELECT COUNT(*) FROM information_schema.schemata "
+ "WHERE schema_name = '{0}'")
+_COUNT_QUERY = "SELECT COUNT(*) FROM {0}.{1}"
+_DROP_TABLE_QUERY = "DROP TABLE IF EXISTS {0}.{1}"
+
+
+class DatabaseObject(object):
+ """Provides base functionality for database objects.
+
+ Args:
+ schema (mysqlx.Schema): The Schema object.
+ name (str): The database object name.
+ """
+ def __init__(self, schema, name):
+ self._schema = schema
+ self._name = name.decode() if isinstance(name, bytes) else name
+ self._session = self._schema.get_session()
+ self._connection = self._session.get_connection()
+
+ @property
+ def session(self):
+ """:class:`mysqlx.Session`: The Session object.
+ """
+ return self._session
+
+ @property
+ def schema(self):
+ """:class:`mysqlx.Schema`: The Schema object.
+ """
+ return self._schema
+
+ @property
+ def name(self):
+ """str: The name of this database object.
+ """
+ return self._name
+
+ def get_connection(self):
+ """Returns the underlying connection.
+
+ Returns:
+ mysqlx.connection.Connection: The connection object.
+ """
+ return self._connection
+
+ def get_session(self):
+ """Returns the session of this database object.
+
+ Returns:
+ mysqlx.Session: The Session object.
+ """
+ return self._session
+
+ def get_schema(self):
+ """Returns the Schema object of this database object.
+
+ Returns:
+ mysqlx.Schema: The Schema object.
+ """
+ return self._schema
+
+ def get_name(self):
+ """Returns the name of this database object.
+
+ Returns:
+ str: The name of this database object.
+ """
+ return self._name
+
+ def exists_in_database(self):
+ """Verifies if this object exists in the database.
+
+ Returns:
+ bool: `True` if object exists in database.
+
+ Raises:
+ NotImplementedError: This method must be implemented.
+ """
+ raise NotImplementedError
+
+ @deprecated("8.0.12", "Use 'exists_in_database()' method instead")
+ def am_i_real(self):
+ """Verifies if this object exists in the database.
+
+ Returns:
+ bool: `True` if object exists in database.
+
+ Raises:
+ NotImplementedError: This method must be implemented.
+
+ .. deprecated:: 8.0.12
+ Use ``exists_in_database()`` method instead.
+ """
+ return self.exists_in_database()
+
+ @deprecated("8.0.12", "Use 'get_name()' method instead")
+ def who_am_i(self):
+ """Returns the name of this database object.
+
+ Returns:
+ str: The name of this database object.
+
+ .. deprecated:: 8.0.12
+ Use ``get_name()`` method instead.
+ """
+ return self.get_name()
+
+
+class Schema(DatabaseObject):
+ """A client-side representation of a database schema. Provides access to
+ the schema contents.
+
+ Args:
+ session (mysqlx.XSession): Session object.
+ name (str): The Schema name.
+ """
+ def __init__(self, session, name):
+ self._session = session
+ super(Schema, self).__init__(self, name)
+
+ def exists_in_database(self):
+ """Verifies if this object exists in the database.
+
+ Returns:
+ bool: `True` if object exists in database.
+ """
+ sql = _COUNT_SCHEMAS_QUERY.format(escape(self._name))
+ return self._connection.execute_sql_scalar(sql) == 1
+
+ def get_collections(self):
+ """Returns a list of collections for this schema.
+
+ Returns:
+ `list`: List of Collection objects.
+ """
+ rows = self._connection.get_row_result("list_objects",
+ {"schema": self._name})
+ rows.fetch_all()
+ collections = []
+ for row in rows:
+ if row["type"] != "COLLECTION":
+ continue
+ try:
+ collection = Collection(self, row["TABLE_NAME"])
+ except ValueError:
+ collection = Collection(self, row["name"])
+ collections.append(collection)
+ return collections
+
+ def get_collection_as_table(self, name, check_existence=False):
+ """Returns a a table object for the given collection
+
+ Returns:
+ mysqlx.Table: Table object.
+
+ """
+ return self.get_table(name, check_existence)
+
+ def get_tables(self):
+ """Returns a list of tables for this schema.
+
+ Returns:
+ `list`: List of Table objects.
+ """
+ rows = self._connection.get_row_result("list_objects",
+ {"schema": self._name})
+ rows.fetch_all()
+ tables = []
+ object_types = ("TABLE", "VIEW",)
+ for row in rows:
+ if row["type"] in object_types:
+ try:
+ table = Table(self, row["TABLE_NAME"])
+ except ValueError:
+ table = Table(self, row["name"])
+ tables.append(table)
+ return tables
+
+ def get_table(self, name, check_existence=False):
+ """Returns the table of the given name for this schema.
+
+ Returns:
+ mysqlx.Table: Table object.
+ """
+ table = Table(self, name)
+ if check_existence:
+ if not table.exists_in_database():
+ raise ProgrammingError("Table does not exist")
+ return table
+
+ def get_view(self, name, check_existence=False):
+ """Returns the view of the given name for this schema.
+
+ Returns:
+ mysqlx.View: View object.
+ """
+ view = View(self, name)
+ if check_existence:
+ if not view.exists_in_database():
+ raise ProgrammingError("View does not exist")
+ return view
+
+ def get_collection(self, name, check_existence=False):
+ """Returns the collection of the given name for this schema.
+
+ Returns:
+ mysqlx.Collection: Collection object.
+ """
+ collection = Collection(self, name)
+ if check_existence:
+ if not collection.exists_in_database():
+ raise ProgrammingError("Collection does not exist")
+ return collection
+
+ def drop_collection(self, name):
+ """Drops a collection.
+
+ Args:
+ name (str): The name of the collection to be dropped.
+ """
+ self._connection.execute_nonquery(
+ "sql", _DROP_TABLE_QUERY.format(quote_identifier(self._name),
+ quote_identifier(name)), False)
+
+ def create_collection(self, name, reuse_existing=False, validation=None,
+ **kwargs):
+ """Creates in the current schema a new collection with the specified
+ name and retrieves an object representing the new collection created.
+
+ Args:
+ name (str): The name of the collection.
+ reuse_existing (bool): `True` to reuse an existing collection.
+ validation (Optional[dict]): A dict, containing the keys `level`
+ with the validation level and `schema`
+ with a dict or a string representation
+ of a JSON schema specification.
+
+ Returns:
+ mysqlx.Collection: Collection object.
+
+ Raises:
+ :class:`mysqlx.ProgrammingError`: If ``reuse_existing`` is False
+ and collection exists or the
+ collection name is invalid.
+ :class:`mysqlx.NotSupportedError`: If schema validation is not
+ supported by the server.
+
+ .. versionchanged:: 8.0.21
+ """
+ if not name:
+ raise ProgrammingError("Collection name is invalid")
+
+ if "reuse" in kwargs:
+ warnings.warn("'reuse' is deprecated since 8.0.21. "
+ "Please use 'reuse_existing' instead",
+ DeprecationWarning)
+ reuse_existing = kwargs["reuse"]
+
+ collection = Collection(self, name)
+ fields = {"schema": self._name, "name": name}
+
+ if validation is not None:
+ if not isinstance(validation, dict) or not validation:
+ raise ProgrammingError("Invalid value for 'validation'")
+
+ valid_options = ("level", "schema")
+ for option in validation:
+ if option not in valid_options:
+ raise ProgrammingError("Invalid option in 'validation': {}"
+ "".format(option))
+
+ options = []
+
+ if "level" in validation:
+ level = validation["level"]
+ if not isinstance(level, str):
+ raise ProgrammingError("Invalid value for 'level'")
+ options.append(("level", level))
+
+ if "schema" in validation:
+ schema = validation["schema"]
+ if not isinstance(schema, (str, dict)):
+ raise ProgrammingError("Invalid value for 'schema'")
+ options.append(
+ ("schema", json.dumps(schema)
+ if isinstance(schema, dict) else schema))
+
+ fields["options"] = ("validation", options)
+
+ try:
+ self._connection.execute_nonquery(
+ "mysqlx", "create_collection", True, fields)
+ except OperationalError as err:
+ if err.errno == ER_X_CMD_NUM_ARGUMENTS:
+ raise NotSupportedError(
+ "Your MySQL server does not support the requested "
+ "operation. Please update to MySQL 8.0.19 or a later "
+ "version")
+ if err.errno == ER_TABLE_EXISTS_ERROR:
+ if not reuse_existing:
+ raise ProgrammingError(
+ "Collection '{}' already exists".format(name))
+ else:
+ raise ProgrammingError(err.msg, err.errno)
+
+ return collection
+
+ def modify_collection(self, name, validation=None):
+ """Modifies a collection using a JSON schema validation.
+
+ Args:
+ name (str): The name of the collection.
+ validation (Optional[dict]): A dict, containing the keys `level`
+ with the validation level and `schema`
+ with a dict or a string representation
+ of a JSON schema specification.
+
+ Raises:
+ :class:`mysqlx.ProgrammingError`: If the collection name or
+ validation is invalid.
+ :class:`mysqlx.NotSupportedError`: If schema validation is not
+ supported by the server.
+
+ .. versionadded:: 8.0.21
+ """
+ if not name:
+ raise ProgrammingError("Collection name is invalid")
+
+ if not isinstance(validation, dict) or not validation:
+ raise ProgrammingError("Invalid value for 'validation'")
+
+ valid_options = ("level", "schema")
+ for option in validation:
+ if option not in valid_options:
+ raise ProgrammingError("Invalid option in 'validation': {}"
+ "".format(option))
+ options = []
+
+ if "level" in validation:
+ level = validation["level"]
+ if not isinstance(level, str):
+ raise ProgrammingError("Invalid value for 'level'")
+ options.append(("level", level))
+
+ if "schema" in validation:
+ schema = validation["schema"]
+ if not isinstance(schema, (str, dict)):
+ raise ProgrammingError("Invalid value for 'schema'")
+ options.append(
+ ("schema", json.dumps(schema)
+ if isinstance(schema, dict) else schema))
+
+ fields = {
+ "schema": self._name,
+ "name": name,
+ "options": ("validation", options)
+ }
+
+ try:
+ self._connection.execute_nonquery(
+ "mysqlx", "modify_collection_options", True, fields)
+ except OperationalError as err:
+ if err.errno == ER_X_INVALID_ADMIN_COMMAND:
+ raise NotSupportedError(
+ "Your MySQL server does not support the requested "
+ "operation. Please update to MySQL 8.0.19 or a later "
+ "version")
+ raise ProgrammingError(err.msg, err.errno)
+
+
+class Collection(DatabaseObject):
+ """Represents a collection of documents on a schema.
+
+ Args:
+ schema (mysqlx.Schema): The Schema object.
+ name (str): The collection name.
+ """
+
+ def exists_in_database(self):
+ """Verifies if this object exists in the database.
+
+ Returns:
+ bool: `True` if object exists in database.
+ """
+ sql = _COUNT_TABLES_QUERY.format(escape(self._schema.name),
+ escape(self._name))
+ return self._connection.execute_sql_scalar(sql) == 1
+
+ def find(self, condition=None):
+ """Retrieves documents from a collection.
+
+ Args:
+ condition (Optional[str]): The string with the filter expression of
+ the documents to be retrieved.
+ """
+ stmt = FindStatement(self, condition)
+ stmt.stmt_id = self._connection.get_next_statement_id()
+ return stmt
+
+ def add(self, *values):
+ """Adds a list of documents to a collection.
+
+ Args:
+ *values: The document list to be added into the collection.
+
+ Returns:
+ mysqlx.AddStatement: AddStatement object.
+ """
+ return AddStatement(self).add(*values)
+
+ def remove(self, condition):
+ """Removes documents based on the ``condition``.
+
+ Args:
+ condition (str): The string with the filter expression of the
+ documents to be removed.
+
+ Returns:
+ mysqlx.RemoveStatement: RemoveStatement object.
+
+ .. versionchanged:: 8.0.12
+ The ``condition`` parameter is now mandatory.
+ """
+ stmt = RemoveStatement(self, condition)
+ stmt.stmt_id = self._connection.get_next_statement_id()
+ return stmt
+
+ def modify(self, condition):
+ """Modifies documents based on the ``condition``.
+
+ Args:
+ condition (str): The string with the filter expression of the
+ documents to be modified.
+
+ Returns:
+ mysqlx.ModifyStatement: ModifyStatement object.
+
+ .. versionchanged:: 8.0.12
+ The ``condition`` parameter is now mandatory.
+ """
+ stmt = ModifyStatement(self, condition)
+ stmt.stmt_id = self._connection.get_next_statement_id()
+ return stmt
+
+ def count(self):
+ """Counts the documents in the collection.
+
+ Returns:
+ int: The total of documents in the collection.
+ """
+ sql = _COUNT_QUERY.format(quote_identifier(self._schema.name),
+ quote_identifier(self._name))
+ try:
+ res = self._connection.execute_sql_scalar(sql)
+ except OperationalError as err:
+ if err.errno == ER_NO_SUCH_TABLE:
+ raise OperationalError(
+ "Collection '{}' does not exist in schema '{}'"
+ "".format(self._name, self._schema.name))
+ raise
+ return res
+
+ def create_index(self, index_name, fields_desc):
+ """Creates a collection index.
+
+ Args:
+ index_name (str): Index name.
+ fields_desc (dict): A dictionary containing the fields members that
+ constraints the index to be created. It must
+ have the form as shown in the following::
+
+ {"fields": [{"field": member_path,
+ "type": member_type,
+ "required": member_required,
+ "array": array,
+ "collation": collation,
+ "options": options,
+ "srid": srid},
+ # {... more members,
+ # repeated as many times
+ # as needed}
+ ],
+ "type": type}
+ """
+ return CreateCollectionIndexStatement(self, index_name, fields_desc)
+
+ def drop_index(self, index_name):
+ """Drops a collection index.
+
+ Args:
+ index_name (str): Index name.
+ """
+ self._connection.execute_nonquery("mysqlx", "drop_collection_index",
+ False, {"schema": self._schema.name,
+ "collection": self._name,
+ "name": index_name})
+
+ def replace_one(self, doc_id, doc):
+ """Replaces the Document matching the document ID with a new document
+ provided.
+
+ Args:
+ doc_id (str): Document ID
+ doc (:class:`mysqlx.DbDoc` or `dict`): New Document
+ """
+ if "_id" in doc and doc["_id"] != doc_id:
+ raise ProgrammingError(
+ "Replacement document has an _id that is different than the "
+ "matched document"
+ )
+ return self.modify("_id = :id").set("$", doc) \
+ .bind("id", doc_id).execute()
+
+ def add_or_replace_one(self, doc_id, doc):
+ """Upserts the Document matching the document ID with a new document
+ provided.
+
+ Args:
+ doc_id (str): Document ID
+ doc (:class:`mysqlx.DbDoc` or dict): New Document
+ """
+ if "_id" in doc and doc["_id"] != doc_id:
+ raise ProgrammingError(
+ "Replacement document has an _id that is different than the "
+ "matched document"
+ )
+ if not isinstance(doc, DbDoc):
+ doc = DbDoc(doc)
+ return self.add(doc.copy(doc_id)).upsert(True).execute()
+
+ def get_one(self, doc_id):
+ """Returns a Document matching the Document ID.
+
+ Args:
+ doc_id (str): Document ID
+
+ Returns:
+ mysqlx.DbDoc: The Document matching the Document ID.
+ """
+ result = self.find("_id = :id").bind("id", doc_id).execute()
+ doc = result.fetch_one()
+ self._connection.fetch_active_result()
+ return doc
+
+ def remove_one(self, doc_id):
+ """Removes a Document matching the Document ID.
+
+ Args:
+ doc_id (str): Document ID
+
+ Returns:
+ mysqlx.Result: Result object.
+ """
+ return self.remove("_id = :id").bind("id", doc_id).execute()
+
+
+class Table(DatabaseObject):
+ """Represents a database table on a schema.
+
+ Provides access to the table through standard INSERT/SELECT/UPDATE/DELETE
+ statements.
+
+ Args:
+ schema (mysqlx.Schema): The Schema object.
+ name (str): The table name.
+ """
+
+ def exists_in_database(self):
+ """Verifies if this object exists in the database.
+
+ Returns:
+ bool: `True` if object exists in database.
+ """
+ sql = _COUNT_TABLES_QUERY.format(escape(self._schema.name),
+ escape(self._name))
+ return self._connection.execute_sql_scalar(sql) == 1
+
+ def select(self, *fields):
+ """Creates a new :class:`mysqlx.SelectStatement` object.
+
+ Args:
+ *fields: The fields to be retrieved.
+
+ Returns:
+ mysqlx.SelectStatement: SelectStatement object
+ """
+ stmt = SelectStatement(self, *fields)
+ stmt.stmt_id = self._connection.get_next_statement_id()
+ return stmt
+
+ def insert(self, *fields):
+ """Creates a new :class:`mysqlx.InsertStatement` object.
+
+ Args:
+ *fields: The fields to be inserted.
+
+ Returns:
+ mysqlx.InsertStatement: InsertStatement object
+ """
+ stmt = InsertStatement(self, *fields)
+ stmt.stmt_id = self._connection.get_next_statement_id()
+ return stmt
+
+ def update(self):
+ """Creates a new :class:`mysqlx.UpdateStatement` object.
+
+ Returns:
+ mysqlx.UpdateStatement: UpdateStatement object
+ """
+ stmt = UpdateStatement(self)
+ stmt.stmt_id = self._connection.get_next_statement_id()
+ return stmt
+
+ def delete(self):
+ """Creates a new :class:`mysqlx.DeleteStatement` object.
+
+ Returns:
+ mysqlx.DeleteStatement: DeleteStatement object
+
+ .. versionchanged:: 8.0.12
+ The ``condition`` parameter was removed.
+ """
+ stmt = DeleteStatement(self)
+ stmt.stmt_id = self._connection.get_next_statement_id()
+ return stmt
+
+ def count(self):
+ """Counts the rows in the table.
+
+ Returns:
+ int: The total of rows in the table.
+ """
+ sql = _COUNT_QUERY.format(quote_identifier(self._schema.name),
+ quote_identifier(self._name))
+ try:
+ res = self._connection.execute_sql_scalar(sql)
+ except OperationalError as err:
+ if err.errno == ER_NO_SUCH_TABLE:
+ raise OperationalError(
+ "Table '{}' does not exist in schema '{}'"
+ "".format(self._name, self._schema.name))
+ raise
+ return res
+
+ def is_view(self):
+ """Determine if the underlying object is a view or not.
+
+ Returns:
+ bool: `True` if the underlying object is a view.
+ """
+ sql = _COUNT_VIEWS_QUERY.format(escape(self._schema.name),
+ escape(self._name))
+ return self._connection.execute_sql_scalar(sql) == 1
+
+
+class View(Table):
+ """Represents a database view on a schema.
+
+ Provides a mechanism for creating, alter and drop views.
+
+ Args:
+ schema (mysqlx.Schema): The Schema object.
+ name (str): The table name.
+ """
+
+ def exists_in_database(self):
+ """Verifies if this object exists in the database.
+
+ Returns:
+ bool: `True` if object exists in database.
+ """
+ sql = _COUNT_VIEWS_QUERY.format(escape(self._schema.name),
+ escape(self._name))
+ return self._connection.execute_sql_scalar(sql) == 1
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/dbdoc.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/dbdoc.py
new file mode 100644
index 0000000000000000000000000000000000000000..736e4c84981417ffcc9c221f68d901e9b1ac6fb3
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/dbdoc.py
@@ -0,0 +1,114 @@
+# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Implementation of the DbDoc."""
+
+import json
+
+from .errors import ProgrammingError
+
+
+class ExprJSONEncoder(json.JSONEncoder):
+ """A :class:`json.JSONEncoder` subclass, which enables encoding of
+ :class:`mysqlx.ExprParser` objects."""
+ def default(self, o): # pylint: disable=E0202
+ if hasattr(o, "expr"):
+ return "{0}".format(o)
+ # Let the base class default method raise the TypeError
+ return json.JSONEncoder.default(self, o)
+
+
+class DbDoc(object):
+ """Represents a generic document in JSON format.
+
+ Args:
+ value (object): The value can be a JSON string or a dict.
+
+ Raises:
+ ValueError: If ``value`` type is not a basestring or dict.
+ """
+ def __init__(self, value):
+ if isinstance(value, dict):
+ self.__dict__ = value
+ elif isinstance(value, str):
+ self.__dict__ = json.loads(value)
+ else:
+ raise ValueError("Unable to handle type: {0}".format(type(value)))
+
+ def __str__(self):
+ return self.as_str()
+
+ def __repr__(self):
+ return repr(self.__dict__)
+
+ def __setitem__(self, index, value):
+ if index == "_id":
+ raise ProgrammingError("Cannot modify _id")
+ self.__dict__[index] = value
+
+ def __getitem__(self, index):
+ return self.__dict__[index]
+
+ def __contains__(self, item):
+ return item in self.__dict__
+
+ def copy(self, doc_id=None):
+ """Returns a new copy of a :class:`mysqlx.DbDoc` object containing the
+ `doc_id` provided. If `doc_id` is not provided, it will be removed from
+ new :class:`mysqlx.DbDoc` object.
+
+ Args:
+ doc_id (Optional[str]): Document ID
+
+ Returns:
+ mysqlx.DbDoc: A new instance of DbDoc containing the _id provided
+ """
+ new_dict = self.__dict__.copy()
+ if doc_id:
+ new_dict["_id"] = doc_id
+ elif "_id" in new_dict:
+ del new_dict["_id"]
+ return DbDoc(new_dict)
+
+ def keys(self):
+ """Returns the keys.
+
+ Returns:
+ `list`: The keys.
+ """
+ return self.__dict__.keys()
+
+ def as_str(self):
+ """Serialize :class:`mysqlx.DbDoc` to a JSON formatted ``str``.
+
+ Returns:
+ str: A JSON formatted ``str`` representation of the document.
+
+ .. versionadded:: 8.0.16
+ """
+ return json.dumps(self.__dict__, cls=ExprJSONEncoder)
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/errorcode.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/errorcode.py
new file mode 100644
index 0000000000000000000000000000000000000000..3c15f69319ae51d8f6c750da230d9cc7fd6153f4
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/errorcode.py
@@ -0,0 +1,1877 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2013, 2021, Oracle and/or its affiliates. All rights reserved.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+# This file was auto-generated.
+_GENERATED_ON = '2021-08-11'
+_MYSQL_VERSION = (8, 0, 27)
+
+"""This module contains the MySQL Server and Client error codes"""
+
+# Start MySQL Errors
+OBSOLETE_ER_HASHCHK = 1000
+OBSOLETE_ER_NISAMCHK = 1001
+ER_NO = 1002
+ER_YES = 1003
+ER_CANT_CREATE_FILE = 1004
+ER_CANT_CREATE_TABLE = 1005
+ER_CANT_CREATE_DB = 1006
+ER_DB_CREATE_EXISTS = 1007
+ER_DB_DROP_EXISTS = 1008
+OBSOLETE_ER_DB_DROP_DELETE = 1009
+ER_DB_DROP_RMDIR = 1010
+OBSOLETE_ER_CANT_DELETE_FILE = 1011
+ER_CANT_FIND_SYSTEM_REC = 1012
+ER_CANT_GET_STAT = 1013
+OBSOLETE_ER_CANT_GET_WD = 1014
+ER_CANT_LOCK = 1015
+ER_CANT_OPEN_FILE = 1016
+ER_FILE_NOT_FOUND = 1017
+ER_CANT_READ_DIR = 1018
+OBSOLETE_ER_CANT_SET_WD = 1019
+ER_CHECKREAD = 1020
+OBSOLETE_ER_DISK_FULL = 1021
+ER_DUP_KEY = 1022
+OBSOLETE_ER_ERROR_ON_CLOSE = 1023
+ER_ERROR_ON_READ = 1024
+ER_ERROR_ON_RENAME = 1025
+ER_ERROR_ON_WRITE = 1026
+ER_FILE_USED = 1027
+OBSOLETE_ER_FILSORT_ABORT = 1028
+OBSOLETE_ER_FORM_NOT_FOUND = 1029
+ER_GET_ERRNO = 1030
+ER_ILLEGAL_HA = 1031
+ER_KEY_NOT_FOUND = 1032
+ER_NOT_FORM_FILE = 1033
+ER_NOT_KEYFILE = 1034
+ER_OLD_KEYFILE = 1035
+ER_OPEN_AS_READONLY = 1036
+ER_OUTOFMEMORY = 1037
+ER_OUT_OF_SORTMEMORY = 1038
+OBSOLETE_ER_UNEXPECTED_EOF = 1039
+ER_CON_COUNT_ERROR = 1040
+ER_OUT_OF_RESOURCES = 1041
+ER_BAD_HOST_ERROR = 1042
+ER_HANDSHAKE_ERROR = 1043
+ER_DBACCESS_DENIED_ERROR = 1044
+ER_ACCESS_DENIED_ERROR = 1045
+ER_NO_DB_ERROR = 1046
+ER_UNKNOWN_COM_ERROR = 1047
+ER_BAD_NULL_ERROR = 1048
+ER_BAD_DB_ERROR = 1049
+ER_TABLE_EXISTS_ERROR = 1050
+ER_BAD_TABLE_ERROR = 1051
+ER_NON_UNIQ_ERROR = 1052
+ER_SERVER_SHUTDOWN = 1053
+ER_BAD_FIELD_ERROR = 1054
+ER_WRONG_FIELD_WITH_GROUP = 1055
+ER_WRONG_GROUP_FIELD = 1056
+ER_WRONG_SUM_SELECT = 1057
+ER_WRONG_VALUE_COUNT = 1058
+ER_TOO_LONG_IDENT = 1059
+ER_DUP_FIELDNAME = 1060
+ER_DUP_KEYNAME = 1061
+ER_DUP_ENTRY = 1062
+ER_WRONG_FIELD_SPEC = 1063
+ER_PARSE_ERROR = 1064
+ER_EMPTY_QUERY = 1065
+ER_NONUNIQ_TABLE = 1066
+ER_INVALID_DEFAULT = 1067
+ER_MULTIPLE_PRI_KEY = 1068
+ER_TOO_MANY_KEYS = 1069
+ER_TOO_MANY_KEY_PARTS = 1070
+ER_TOO_LONG_KEY = 1071
+ER_KEY_COLUMN_DOES_NOT_EXITS = 1072
+ER_BLOB_USED_AS_KEY = 1073
+ER_TOO_BIG_FIELDLENGTH = 1074
+ER_WRONG_AUTO_KEY = 1075
+ER_READY = 1076
+OBSOLETE_ER_NORMAL_SHUTDOWN = 1077
+OBSOLETE_ER_GOT_SIGNAL = 1078
+ER_SHUTDOWN_COMPLETE = 1079
+ER_FORCING_CLOSE = 1080
+ER_IPSOCK_ERROR = 1081
+ER_NO_SUCH_INDEX = 1082
+ER_WRONG_FIELD_TERMINATORS = 1083
+ER_BLOBS_AND_NO_TERMINATED = 1084
+ER_TEXTFILE_NOT_READABLE = 1085
+ER_FILE_EXISTS_ERROR = 1086
+ER_LOAD_INFO = 1087
+ER_ALTER_INFO = 1088
+ER_WRONG_SUB_KEY = 1089
+ER_CANT_REMOVE_ALL_FIELDS = 1090
+ER_CANT_DROP_FIELD_OR_KEY = 1091
+ER_INSERT_INFO = 1092
+ER_UPDATE_TABLE_USED = 1093
+ER_NO_SUCH_THREAD = 1094
+ER_KILL_DENIED_ERROR = 1095
+ER_NO_TABLES_USED = 1096
+ER_TOO_BIG_SET = 1097
+ER_NO_UNIQUE_LOGFILE = 1098
+ER_TABLE_NOT_LOCKED_FOR_WRITE = 1099
+ER_TABLE_NOT_LOCKED = 1100
+ER_BLOB_CANT_HAVE_DEFAULT = 1101
+ER_WRONG_DB_NAME = 1102
+ER_WRONG_TABLE_NAME = 1103
+ER_TOO_BIG_SELECT = 1104
+ER_UNKNOWN_ERROR = 1105
+ER_UNKNOWN_PROCEDURE = 1106
+ER_WRONG_PARAMCOUNT_TO_PROCEDURE = 1107
+ER_WRONG_PARAMETERS_TO_PROCEDURE = 1108
+ER_UNKNOWN_TABLE = 1109
+ER_FIELD_SPECIFIED_TWICE = 1110
+ER_INVALID_GROUP_FUNC_USE = 1111
+ER_UNSUPPORTED_EXTENSION = 1112
+ER_TABLE_MUST_HAVE_COLUMNS = 1113
+ER_RECORD_FILE_FULL = 1114
+ER_UNKNOWN_CHARACTER_SET = 1115
+ER_TOO_MANY_TABLES = 1116
+ER_TOO_MANY_FIELDS = 1117
+ER_TOO_BIG_ROWSIZE = 1118
+ER_STACK_OVERRUN = 1119
+ER_WRONG_OUTER_JOIN_UNUSED = 1120
+ER_NULL_COLUMN_IN_INDEX = 1121
+ER_CANT_FIND_UDF = 1122
+ER_CANT_INITIALIZE_UDF = 1123
+ER_UDF_NO_PATHS = 1124
+ER_UDF_EXISTS = 1125
+ER_CANT_OPEN_LIBRARY = 1126
+ER_CANT_FIND_DL_ENTRY = 1127
+ER_FUNCTION_NOT_DEFINED = 1128
+ER_HOST_IS_BLOCKED = 1129
+ER_HOST_NOT_PRIVILEGED = 1130
+ER_PASSWORD_ANONYMOUS_USER = 1131
+ER_PASSWORD_NOT_ALLOWED = 1132
+ER_PASSWORD_NO_MATCH = 1133
+ER_UPDATE_INFO = 1134
+ER_CANT_CREATE_THREAD = 1135
+ER_WRONG_VALUE_COUNT_ON_ROW = 1136
+ER_CANT_REOPEN_TABLE = 1137
+ER_INVALID_USE_OF_NULL = 1138
+ER_REGEXP_ERROR = 1139
+ER_MIX_OF_GROUP_FUNC_AND_FIELDS = 1140
+ER_NONEXISTING_GRANT = 1141
+ER_TABLEACCESS_DENIED_ERROR = 1142
+ER_COLUMNACCESS_DENIED_ERROR = 1143
+ER_ILLEGAL_GRANT_FOR_TABLE = 1144
+ER_GRANT_WRONG_HOST_OR_USER = 1145
+ER_NO_SUCH_TABLE = 1146
+ER_NONEXISTING_TABLE_GRANT = 1147
+ER_NOT_ALLOWED_COMMAND = 1148
+ER_SYNTAX_ERROR = 1149
+OBSOLETE_ER_UNUSED1 = 1150
+OBSOLETE_ER_UNUSED2 = 1151
+ER_ABORTING_CONNECTION = 1152
+ER_NET_PACKET_TOO_LARGE = 1153
+ER_NET_READ_ERROR_FROM_PIPE = 1154
+ER_NET_FCNTL_ERROR = 1155
+ER_NET_PACKETS_OUT_OF_ORDER = 1156
+ER_NET_UNCOMPRESS_ERROR = 1157
+ER_NET_READ_ERROR = 1158
+ER_NET_READ_INTERRUPTED = 1159
+ER_NET_ERROR_ON_WRITE = 1160
+ER_NET_WRITE_INTERRUPTED = 1161
+ER_TOO_LONG_STRING = 1162
+ER_TABLE_CANT_HANDLE_BLOB = 1163
+ER_TABLE_CANT_HANDLE_AUTO_INCREMENT = 1164
+OBSOLETE_ER_UNUSED3 = 1165
+ER_WRONG_COLUMN_NAME = 1166
+ER_WRONG_KEY_COLUMN = 1167
+ER_WRONG_MRG_TABLE = 1168
+ER_DUP_UNIQUE = 1169
+ER_BLOB_KEY_WITHOUT_LENGTH = 1170
+ER_PRIMARY_CANT_HAVE_NULL = 1171
+ER_TOO_MANY_ROWS = 1172
+ER_REQUIRES_PRIMARY_KEY = 1173
+OBSOLETE_ER_NO_RAID_COMPILED = 1174
+ER_UPDATE_WITHOUT_KEY_IN_SAFE_MODE = 1175
+ER_KEY_DOES_NOT_EXITS = 1176
+ER_CHECK_NO_SUCH_TABLE = 1177
+ER_CHECK_NOT_IMPLEMENTED = 1178
+ER_CANT_DO_THIS_DURING_AN_TRANSACTION = 1179
+ER_ERROR_DURING_COMMIT = 1180
+ER_ERROR_DURING_ROLLBACK = 1181
+ER_ERROR_DURING_FLUSH_LOGS = 1182
+OBSOLETE_ER_ERROR_DURING_CHECKPOINT = 1183
+ER_NEW_ABORTING_CONNECTION = 1184
+OBSOLETE_ER_DUMP_NOT_IMPLEMENTED = 1185
+OBSOLETE_ER_FLUSH_MASTER_BINLOG_CLOSED = 1186
+OBSOLETE_ER_INDEX_REBUILD = 1187
+ER_MASTER = 1188
+ER_MASTER_NET_READ = 1189
+ER_MASTER_NET_WRITE = 1190
+ER_FT_MATCHING_KEY_NOT_FOUND = 1191
+ER_LOCK_OR_ACTIVE_TRANSACTION = 1192
+ER_UNKNOWN_SYSTEM_VARIABLE = 1193
+ER_CRASHED_ON_USAGE = 1194
+ER_CRASHED_ON_REPAIR = 1195
+ER_WARNING_NOT_COMPLETE_ROLLBACK = 1196
+ER_TRANS_CACHE_FULL = 1197
+OBSOLETE_ER_SLAVE_MUST_STOP = 1198
+ER_SLAVE_NOT_RUNNING = 1199
+ER_BAD_SLAVE = 1200
+ER_MASTER_INFO = 1201
+ER_SLAVE_THREAD = 1202
+ER_TOO_MANY_USER_CONNECTIONS = 1203
+ER_SET_CONSTANTS_ONLY = 1204
+ER_LOCK_WAIT_TIMEOUT = 1205
+ER_LOCK_TABLE_FULL = 1206
+ER_READ_ONLY_TRANSACTION = 1207
+OBSOLETE_ER_DROP_DB_WITH_READ_LOCK = 1208
+OBSOLETE_ER_CREATE_DB_WITH_READ_LOCK = 1209
+ER_WRONG_ARGUMENTS = 1210
+ER_NO_PERMISSION_TO_CREATE_USER = 1211
+OBSOLETE_ER_UNION_TABLES_IN_DIFFERENT_DIR = 1212
+ER_LOCK_DEADLOCK = 1213
+ER_TABLE_CANT_HANDLE_FT = 1214
+ER_CANNOT_ADD_FOREIGN = 1215
+ER_NO_REFERENCED_ROW = 1216
+ER_ROW_IS_REFERENCED = 1217
+ER_CONNECT_TO_MASTER = 1218
+OBSOLETE_ER_QUERY_ON_MASTER = 1219
+ER_ERROR_WHEN_EXECUTING_COMMAND = 1220
+ER_WRONG_USAGE = 1221
+ER_WRONG_NUMBER_OF_COLUMNS_IN_SELECT = 1222
+ER_CANT_UPDATE_WITH_READLOCK = 1223
+ER_MIXING_NOT_ALLOWED = 1224
+ER_DUP_ARGUMENT = 1225
+ER_USER_LIMIT_REACHED = 1226
+ER_SPECIFIC_ACCESS_DENIED_ERROR = 1227
+ER_LOCAL_VARIABLE = 1228
+ER_GLOBAL_VARIABLE = 1229
+ER_NO_DEFAULT = 1230
+ER_WRONG_VALUE_FOR_VAR = 1231
+ER_WRONG_TYPE_FOR_VAR = 1232
+ER_VAR_CANT_BE_READ = 1233
+ER_CANT_USE_OPTION_HERE = 1234
+ER_NOT_SUPPORTED_YET = 1235
+ER_MASTER_FATAL_ERROR_READING_BINLOG = 1236
+ER_SLAVE_IGNORED_TABLE = 1237
+ER_INCORRECT_GLOBAL_LOCAL_VAR = 1238
+ER_WRONG_FK_DEF = 1239
+ER_KEY_REF_DO_NOT_MATCH_TABLE_REF = 1240
+ER_OPERAND_COLUMNS = 1241
+ER_SUBQUERY_NO_1_ROW = 1242
+ER_UNKNOWN_STMT_HANDLER = 1243
+ER_CORRUPT_HELP_DB = 1244
+OBSOLETE_ER_CYCLIC_REFERENCE = 1245
+ER_AUTO_CONVERT = 1246
+ER_ILLEGAL_REFERENCE = 1247
+ER_DERIVED_MUST_HAVE_ALIAS = 1248
+ER_SELECT_REDUCED = 1249
+ER_TABLENAME_NOT_ALLOWED_HERE = 1250
+ER_NOT_SUPPORTED_AUTH_MODE = 1251
+ER_SPATIAL_CANT_HAVE_NULL = 1252
+ER_COLLATION_CHARSET_MISMATCH = 1253
+OBSOLETE_ER_SLAVE_WAS_RUNNING = 1254
+OBSOLETE_ER_SLAVE_WAS_NOT_RUNNING = 1255
+ER_TOO_BIG_FOR_UNCOMPRESS = 1256
+ER_ZLIB_Z_MEM_ERROR = 1257
+ER_ZLIB_Z_BUF_ERROR = 1258
+ER_ZLIB_Z_DATA_ERROR = 1259
+ER_CUT_VALUE_GROUP_CONCAT = 1260
+ER_WARN_TOO_FEW_RECORDS = 1261
+ER_WARN_TOO_MANY_RECORDS = 1262
+ER_WARN_NULL_TO_NOTNULL = 1263
+ER_WARN_DATA_OUT_OF_RANGE = 1264
+WARN_DATA_TRUNCATED = 1265
+ER_WARN_USING_OTHER_HANDLER = 1266
+ER_CANT_AGGREGATE_2COLLATIONS = 1267
+OBSOLETE_ER_DROP_USER = 1268
+ER_REVOKE_GRANTS = 1269
+ER_CANT_AGGREGATE_3COLLATIONS = 1270
+ER_CANT_AGGREGATE_NCOLLATIONS = 1271
+ER_VARIABLE_IS_NOT_STRUCT = 1272
+ER_UNKNOWN_COLLATION = 1273
+ER_SLAVE_IGNORED_SSL_PARAMS = 1274
+OBSOLETE_ER_SERVER_IS_IN_SECURE_AUTH_MODE = 1275
+ER_WARN_FIELD_RESOLVED = 1276
+ER_BAD_SLAVE_UNTIL_COND = 1277
+ER_MISSING_SKIP_SLAVE = 1278
+ER_UNTIL_COND_IGNORED = 1279
+ER_WRONG_NAME_FOR_INDEX = 1280
+ER_WRONG_NAME_FOR_CATALOG = 1281
+OBSOLETE_ER_WARN_QC_RESIZE = 1282
+ER_BAD_FT_COLUMN = 1283
+ER_UNKNOWN_KEY_CACHE = 1284
+ER_WARN_HOSTNAME_WONT_WORK = 1285
+ER_UNKNOWN_STORAGE_ENGINE = 1286
+ER_WARN_DEPRECATED_SYNTAX = 1287
+ER_NON_UPDATABLE_TABLE = 1288
+ER_FEATURE_DISABLED = 1289
+ER_OPTION_PREVENTS_STATEMENT = 1290
+ER_DUPLICATED_VALUE_IN_TYPE = 1291
+ER_TRUNCATED_WRONG_VALUE = 1292
+OBSOLETE_ER_TOO_MUCH_AUTO_TIMESTAMP_COLS = 1293
+ER_INVALID_ON_UPDATE = 1294
+ER_UNSUPPORTED_PS = 1295
+ER_GET_ERRMSG = 1296
+ER_GET_TEMPORARY_ERRMSG = 1297
+ER_UNKNOWN_TIME_ZONE = 1298
+ER_WARN_INVALID_TIMESTAMP = 1299
+ER_INVALID_CHARACTER_STRING = 1300
+ER_WARN_ALLOWED_PACKET_OVERFLOWED = 1301
+ER_CONFLICTING_DECLARATIONS = 1302
+ER_SP_NO_RECURSIVE_CREATE = 1303
+ER_SP_ALREADY_EXISTS = 1304
+ER_SP_DOES_NOT_EXIST = 1305
+ER_SP_DROP_FAILED = 1306
+ER_SP_STORE_FAILED = 1307
+ER_SP_LILABEL_MISMATCH = 1308
+ER_SP_LABEL_REDEFINE = 1309
+ER_SP_LABEL_MISMATCH = 1310
+ER_SP_UNINIT_VAR = 1311
+ER_SP_BADSELECT = 1312
+ER_SP_BADRETURN = 1313
+ER_SP_BADSTATEMENT = 1314
+ER_UPDATE_LOG_DEPRECATED_IGNORED = 1315
+ER_UPDATE_LOG_DEPRECATED_TRANSLATED = 1316
+ER_QUERY_INTERRUPTED = 1317
+ER_SP_WRONG_NO_OF_ARGS = 1318
+ER_SP_COND_MISMATCH = 1319
+ER_SP_NORETURN = 1320
+ER_SP_NORETURNEND = 1321
+ER_SP_BAD_CURSOR_QUERY = 1322
+ER_SP_BAD_CURSOR_SELECT = 1323
+ER_SP_CURSOR_MISMATCH = 1324
+ER_SP_CURSOR_ALREADY_OPEN = 1325
+ER_SP_CURSOR_NOT_OPEN = 1326
+ER_SP_UNDECLARED_VAR = 1327
+ER_SP_WRONG_NO_OF_FETCH_ARGS = 1328
+ER_SP_FETCH_NO_DATA = 1329
+ER_SP_DUP_PARAM = 1330
+ER_SP_DUP_VAR = 1331
+ER_SP_DUP_COND = 1332
+ER_SP_DUP_CURS = 1333
+ER_SP_CANT_ALTER = 1334
+ER_SP_SUBSELECT_NYI = 1335
+ER_STMT_NOT_ALLOWED_IN_SF_OR_TRG = 1336
+ER_SP_VARCOND_AFTER_CURSHNDLR = 1337
+ER_SP_CURSOR_AFTER_HANDLER = 1338
+ER_SP_CASE_NOT_FOUND = 1339
+ER_FPARSER_TOO_BIG_FILE = 1340
+ER_FPARSER_BAD_HEADER = 1341
+ER_FPARSER_EOF_IN_COMMENT = 1342
+ER_FPARSER_ERROR_IN_PARAMETER = 1343
+ER_FPARSER_EOF_IN_UNKNOWN_PARAMETER = 1344
+ER_VIEW_NO_EXPLAIN = 1345
+OBSOLETE_ER_FRM_UNKNOWN_TYPE = 1346
+ER_WRONG_OBJECT = 1347
+ER_NONUPDATEABLE_COLUMN = 1348
+OBSOLETE_ER_VIEW_SELECT_DERIVED_UNUSED = 1349
+ER_VIEW_SELECT_CLAUSE = 1350
+ER_VIEW_SELECT_VARIABLE = 1351
+ER_VIEW_SELECT_TMPTABLE = 1352
+ER_VIEW_WRONG_LIST = 1353
+ER_WARN_VIEW_MERGE = 1354
+ER_WARN_VIEW_WITHOUT_KEY = 1355
+ER_VIEW_INVALID = 1356
+ER_SP_NO_DROP_SP = 1357
+OBSOLETE_ER_SP_GOTO_IN_HNDLR = 1358
+ER_TRG_ALREADY_EXISTS = 1359
+ER_TRG_DOES_NOT_EXIST = 1360
+ER_TRG_ON_VIEW_OR_TEMP_TABLE = 1361
+ER_TRG_CANT_CHANGE_ROW = 1362
+ER_TRG_NO_SUCH_ROW_IN_TRG = 1363
+ER_NO_DEFAULT_FOR_FIELD = 1364
+ER_DIVISION_BY_ZERO = 1365
+ER_TRUNCATED_WRONG_VALUE_FOR_FIELD = 1366
+ER_ILLEGAL_VALUE_FOR_TYPE = 1367
+ER_VIEW_NONUPD_CHECK = 1368
+ER_VIEW_CHECK_FAILED = 1369
+ER_PROCACCESS_DENIED_ERROR = 1370
+ER_RELAY_LOG_FAIL = 1371
+OBSOLETE_ER_PASSWD_LENGTH = 1372
+ER_UNKNOWN_TARGET_BINLOG = 1373
+ER_IO_ERR_LOG_INDEX_READ = 1374
+ER_BINLOG_PURGE_PROHIBITED = 1375
+ER_FSEEK_FAIL = 1376
+ER_BINLOG_PURGE_FATAL_ERR = 1377
+ER_LOG_IN_USE = 1378
+ER_LOG_PURGE_UNKNOWN_ERR = 1379
+ER_RELAY_LOG_INIT = 1380
+ER_NO_BINARY_LOGGING = 1381
+ER_RESERVED_SYNTAX = 1382
+OBSOLETE_ER_WSAS_FAILED = 1383
+OBSOLETE_ER_DIFF_GROUPS_PROC = 1384
+OBSOLETE_ER_NO_GROUP_FOR_PROC = 1385
+OBSOLETE_ER_ORDER_WITH_PROC = 1386
+OBSOLETE_ER_LOGGING_PROHIBIT_CHANGING_OF = 1387
+OBSOLETE_ER_NO_FILE_MAPPING = 1388
+OBSOLETE_ER_WRONG_MAGIC = 1389
+ER_PS_MANY_PARAM = 1390
+ER_KEY_PART_0 = 1391
+ER_VIEW_CHECKSUM = 1392
+ER_VIEW_MULTIUPDATE = 1393
+ER_VIEW_NO_INSERT_FIELD_LIST = 1394
+ER_VIEW_DELETE_MERGE_VIEW = 1395
+ER_CANNOT_USER = 1396
+ER_XAER_NOTA = 1397
+ER_XAER_INVAL = 1398
+ER_XAER_RMFAIL = 1399
+ER_XAER_OUTSIDE = 1400
+ER_XAER_RMERR = 1401
+ER_XA_RBROLLBACK = 1402
+ER_NONEXISTING_PROC_GRANT = 1403
+ER_PROC_AUTO_GRANT_FAIL = 1404
+ER_PROC_AUTO_REVOKE_FAIL = 1405
+ER_DATA_TOO_LONG = 1406
+ER_SP_BAD_SQLSTATE = 1407
+ER_STARTUP = 1408
+ER_LOAD_FROM_FIXED_SIZE_ROWS_TO_VAR = 1409
+ER_CANT_CREATE_USER_WITH_GRANT = 1410
+ER_WRONG_VALUE_FOR_TYPE = 1411
+ER_TABLE_DEF_CHANGED = 1412
+ER_SP_DUP_HANDLER = 1413
+ER_SP_NOT_VAR_ARG = 1414
+ER_SP_NO_RETSET = 1415
+ER_CANT_CREATE_GEOMETRY_OBJECT = 1416
+OBSOLETE_ER_FAILED_ROUTINE_BREAK_BINLOG = 1417
+ER_BINLOG_UNSAFE_ROUTINE = 1418
+ER_BINLOG_CREATE_ROUTINE_NEED_SUPER = 1419
+OBSOLETE_ER_EXEC_STMT_WITH_OPEN_CURSOR = 1420
+ER_STMT_HAS_NO_OPEN_CURSOR = 1421
+ER_COMMIT_NOT_ALLOWED_IN_SF_OR_TRG = 1422
+ER_NO_DEFAULT_FOR_VIEW_FIELD = 1423
+ER_SP_NO_RECURSION = 1424
+ER_TOO_BIG_SCALE = 1425
+ER_TOO_BIG_PRECISION = 1426
+ER_M_BIGGER_THAN_D = 1427
+ER_WRONG_LOCK_OF_SYSTEM_TABLE = 1428
+ER_CONNECT_TO_FOREIGN_DATA_SOURCE = 1429
+ER_QUERY_ON_FOREIGN_DATA_SOURCE = 1430
+ER_FOREIGN_DATA_SOURCE_DOESNT_EXIST = 1431
+ER_FOREIGN_DATA_STRING_INVALID_CANT_CREATE = 1432
+ER_FOREIGN_DATA_STRING_INVALID = 1433
+OBSOLETE_ER_CANT_CREATE_FEDERATED_TABLE = 1434
+ER_TRG_IN_WRONG_SCHEMA = 1435
+ER_STACK_OVERRUN_NEED_MORE = 1436
+ER_TOO_LONG_BODY = 1437
+ER_WARN_CANT_DROP_DEFAULT_KEYCACHE = 1438
+ER_TOO_BIG_DISPLAYWIDTH = 1439
+ER_XAER_DUPID = 1440
+ER_DATETIME_FUNCTION_OVERFLOW = 1441
+ER_CANT_UPDATE_USED_TABLE_IN_SF_OR_TRG = 1442
+ER_VIEW_PREVENT_UPDATE = 1443
+ER_PS_NO_RECURSION = 1444
+ER_SP_CANT_SET_AUTOCOMMIT = 1445
+OBSOLETE_ER_MALFORMED_DEFINER = 1446
+ER_VIEW_FRM_NO_USER = 1447
+ER_VIEW_OTHER_USER = 1448
+ER_NO_SUCH_USER = 1449
+ER_FORBID_SCHEMA_CHANGE = 1450
+ER_ROW_IS_REFERENCED_2 = 1451
+ER_NO_REFERENCED_ROW_2 = 1452
+ER_SP_BAD_VAR_SHADOW = 1453
+ER_TRG_NO_DEFINER = 1454
+ER_OLD_FILE_FORMAT = 1455
+ER_SP_RECURSION_LIMIT = 1456
+OBSOLETE_ER_SP_PROC_TABLE_CORRUPT = 1457
+ER_SP_WRONG_NAME = 1458
+ER_TABLE_NEEDS_UPGRADE = 1459
+ER_SP_NO_AGGREGATE = 1460
+ER_MAX_PREPARED_STMT_COUNT_REACHED = 1461
+ER_VIEW_RECURSIVE = 1462
+ER_NON_GROUPING_FIELD_USED = 1463
+ER_TABLE_CANT_HANDLE_SPKEYS = 1464
+ER_NO_TRIGGERS_ON_SYSTEM_SCHEMA = 1465
+ER_REMOVED_SPACES = 1466
+ER_AUTOINC_READ_FAILED = 1467
+ER_USERNAME = 1468
+ER_HOSTNAME = 1469
+ER_WRONG_STRING_LENGTH = 1470
+ER_NON_INSERTABLE_TABLE = 1471
+ER_ADMIN_WRONG_MRG_TABLE = 1472
+ER_TOO_HIGH_LEVEL_OF_NESTING_FOR_SELECT = 1473
+ER_NAME_BECOMES_EMPTY = 1474
+ER_AMBIGUOUS_FIELD_TERM = 1475
+ER_FOREIGN_SERVER_EXISTS = 1476
+ER_FOREIGN_SERVER_DOESNT_EXIST = 1477
+ER_ILLEGAL_HA_CREATE_OPTION = 1478
+ER_PARTITION_REQUIRES_VALUES_ERROR = 1479
+ER_PARTITION_WRONG_VALUES_ERROR = 1480
+ER_PARTITION_MAXVALUE_ERROR = 1481
+OBSOLETE_ER_PARTITION_SUBPARTITION_ERROR = 1482
+OBSOLETE_ER_PARTITION_SUBPART_MIX_ERROR = 1483
+ER_PARTITION_WRONG_NO_PART_ERROR = 1484
+ER_PARTITION_WRONG_NO_SUBPART_ERROR = 1485
+ER_WRONG_EXPR_IN_PARTITION_FUNC_ERROR = 1486
+OBSOLETE_ER_NO_CONST_EXPR_IN_RANGE_OR_LIST_ERROR = 1487
+ER_FIELD_NOT_FOUND_PART_ERROR = 1488
+OBSOLETE_ER_LIST_OF_FIELDS_ONLY_IN_HASH_ERROR = 1489
+ER_INCONSISTENT_PARTITION_INFO_ERROR = 1490
+ER_PARTITION_FUNC_NOT_ALLOWED_ERROR = 1491
+ER_PARTITIONS_MUST_BE_DEFINED_ERROR = 1492
+ER_RANGE_NOT_INCREASING_ERROR = 1493
+ER_INCONSISTENT_TYPE_OF_FUNCTIONS_ERROR = 1494
+ER_MULTIPLE_DEF_CONST_IN_LIST_PART_ERROR = 1495
+ER_PARTITION_ENTRY_ERROR = 1496
+ER_MIX_HANDLER_ERROR = 1497
+ER_PARTITION_NOT_DEFINED_ERROR = 1498
+ER_TOO_MANY_PARTITIONS_ERROR = 1499
+ER_SUBPARTITION_ERROR = 1500
+ER_CANT_CREATE_HANDLER_FILE = 1501
+ER_BLOB_FIELD_IN_PART_FUNC_ERROR = 1502
+ER_UNIQUE_KEY_NEED_ALL_FIELDS_IN_PF = 1503
+ER_NO_PARTS_ERROR = 1504
+ER_PARTITION_MGMT_ON_NONPARTITIONED = 1505
+ER_FOREIGN_KEY_ON_PARTITIONED = 1506
+ER_DROP_PARTITION_NON_EXISTENT = 1507
+ER_DROP_LAST_PARTITION = 1508
+ER_COALESCE_ONLY_ON_HASH_PARTITION = 1509
+ER_REORG_HASH_ONLY_ON_SAME_NO = 1510
+ER_REORG_NO_PARAM_ERROR = 1511
+ER_ONLY_ON_RANGE_LIST_PARTITION = 1512
+ER_ADD_PARTITION_SUBPART_ERROR = 1513
+ER_ADD_PARTITION_NO_NEW_PARTITION = 1514
+ER_COALESCE_PARTITION_NO_PARTITION = 1515
+ER_REORG_PARTITION_NOT_EXIST = 1516
+ER_SAME_NAME_PARTITION = 1517
+ER_NO_BINLOG_ERROR = 1518
+ER_CONSECUTIVE_REORG_PARTITIONS = 1519
+ER_REORG_OUTSIDE_RANGE = 1520
+ER_PARTITION_FUNCTION_FAILURE = 1521
+OBSOLETE_ER_PART_STATE_ERROR = 1522
+ER_LIMITED_PART_RANGE = 1523
+ER_PLUGIN_IS_NOT_LOADED = 1524
+ER_WRONG_VALUE = 1525
+ER_NO_PARTITION_FOR_GIVEN_VALUE = 1526
+ER_FILEGROUP_OPTION_ONLY_ONCE = 1527
+ER_CREATE_FILEGROUP_FAILED = 1528
+ER_DROP_FILEGROUP_FAILED = 1529
+ER_TABLESPACE_AUTO_EXTEND_ERROR = 1530
+ER_WRONG_SIZE_NUMBER = 1531
+ER_SIZE_OVERFLOW_ERROR = 1532
+ER_ALTER_FILEGROUP_FAILED = 1533
+ER_BINLOG_ROW_LOGGING_FAILED = 1534
+OBSOLETE_ER_BINLOG_ROW_WRONG_TABLE_DEF = 1535
+OBSOLETE_ER_BINLOG_ROW_RBR_TO_SBR = 1536
+ER_EVENT_ALREADY_EXISTS = 1537
+OBSOLETE_ER_EVENT_STORE_FAILED = 1538
+ER_EVENT_DOES_NOT_EXIST = 1539
+OBSOLETE_ER_EVENT_CANT_ALTER = 1540
+OBSOLETE_ER_EVENT_DROP_FAILED = 1541
+ER_EVENT_INTERVAL_NOT_POSITIVE_OR_TOO_BIG = 1542
+ER_EVENT_ENDS_BEFORE_STARTS = 1543
+ER_EVENT_EXEC_TIME_IN_THE_PAST = 1544
+OBSOLETE_ER_EVENT_OPEN_TABLE_FAILED = 1545
+OBSOLETE_ER_EVENT_NEITHER_M_EXPR_NOR_M_AT = 1546
+OBSOLETE_ER_COL_COUNT_DOESNT_MATCH_CORRUPTED = 1547
+OBSOLETE_ER_CANNOT_LOAD_FROM_TABLE = 1548
+OBSOLETE_ER_EVENT_CANNOT_DELETE = 1549
+OBSOLETE_ER_EVENT_COMPILE_ERROR = 1550
+ER_EVENT_SAME_NAME = 1551
+OBSOLETE_ER_EVENT_DATA_TOO_LONG = 1552
+ER_DROP_INDEX_FK = 1553
+ER_WARN_DEPRECATED_SYNTAX_WITH_VER = 1554
+OBSOLETE_ER_CANT_WRITE_LOCK_LOG_TABLE = 1555
+ER_CANT_LOCK_LOG_TABLE = 1556
+ER_FOREIGN_DUPLICATE_KEY_OLD_UNUSED = 1557
+ER_COL_COUNT_DOESNT_MATCH_PLEASE_UPDATE = 1558
+OBSOLETE_ER_TEMP_TABLE_PREVENTS_SWITCH_OUT_OF_RBR = 1559
+ER_STORED_FUNCTION_PREVENTS_SWITCH_BINLOG_FORMAT = 1560
+OBSOLETE_ER_NDB_CANT_SWITCH_BINLOG_FORMAT = 1561
+ER_PARTITION_NO_TEMPORARY = 1562
+ER_PARTITION_CONST_DOMAIN_ERROR = 1563
+ER_PARTITION_FUNCTION_IS_NOT_ALLOWED = 1564
+OBSOLETE_ER_DDL_LOG_ERROR_UNUSED = 1565
+ER_NULL_IN_VALUES_LESS_THAN = 1566
+ER_WRONG_PARTITION_NAME = 1567
+ER_CANT_CHANGE_TX_CHARACTERISTICS = 1568
+ER_DUP_ENTRY_AUTOINCREMENT_CASE = 1569
+OBSOLETE_ER_EVENT_MODIFY_QUEUE_ERROR = 1570
+ER_EVENT_SET_VAR_ERROR = 1571
+ER_PARTITION_MERGE_ERROR = 1572
+OBSOLETE_ER_CANT_ACTIVATE_LOG = 1573
+OBSOLETE_ER_RBR_NOT_AVAILABLE = 1574
+ER_BASE64_DECODE_ERROR = 1575
+ER_EVENT_RECURSION_FORBIDDEN = 1576
+OBSOLETE_ER_EVENTS_DB_ERROR = 1577
+ER_ONLY_INTEGERS_ALLOWED = 1578
+ER_UNSUPORTED_LOG_ENGINE = 1579
+ER_BAD_LOG_STATEMENT = 1580
+ER_CANT_RENAME_LOG_TABLE = 1581
+ER_WRONG_PARAMCOUNT_TO_NATIVE_FCT = 1582
+ER_WRONG_PARAMETERS_TO_NATIVE_FCT = 1583
+ER_WRONG_PARAMETERS_TO_STORED_FCT = 1584
+ER_NATIVE_FCT_NAME_COLLISION = 1585
+ER_DUP_ENTRY_WITH_KEY_NAME = 1586
+ER_BINLOG_PURGE_EMFILE = 1587
+ER_EVENT_CANNOT_CREATE_IN_THE_PAST = 1588
+ER_EVENT_CANNOT_ALTER_IN_THE_PAST = 1589
+OBSOLETE_ER_SLAVE_INCIDENT = 1590
+ER_NO_PARTITION_FOR_GIVEN_VALUE_SILENT = 1591
+ER_BINLOG_UNSAFE_STATEMENT = 1592
+ER_BINLOG_FATAL_ERROR = 1593
+OBSOLETE_ER_SLAVE_RELAY_LOG_READ_FAILURE = 1594
+OBSOLETE_ER_SLAVE_RELAY_LOG_WRITE_FAILURE = 1595
+OBSOLETE_ER_SLAVE_CREATE_EVENT_FAILURE = 1596
+OBSOLETE_ER_SLAVE_MASTER_COM_FAILURE = 1597
+ER_BINLOG_LOGGING_IMPOSSIBLE = 1598
+ER_VIEW_NO_CREATION_CTX = 1599
+ER_VIEW_INVALID_CREATION_CTX = 1600
+OBSOLETE_ER_SR_INVALID_CREATION_CTX = 1601
+ER_TRG_CORRUPTED_FILE = 1602
+ER_TRG_NO_CREATION_CTX = 1603
+ER_TRG_INVALID_CREATION_CTX = 1604
+ER_EVENT_INVALID_CREATION_CTX = 1605
+ER_TRG_CANT_OPEN_TABLE = 1606
+OBSOLETE_ER_CANT_CREATE_SROUTINE = 1607
+OBSOLETE_ER_NEVER_USED = 1608
+ER_NO_FORMAT_DESCRIPTION_EVENT_BEFORE_BINLOG_STATEMENT = 1609
+ER_SLAVE_CORRUPT_EVENT = 1610
+OBSOLETE_ER_LOAD_DATA_INVALID_COLUMN_UNUSED = 1611
+ER_LOG_PURGE_NO_FILE = 1612
+ER_XA_RBTIMEOUT = 1613
+ER_XA_RBDEADLOCK = 1614
+ER_NEED_REPREPARE = 1615
+OBSOLETE_ER_DELAYED_NOT_SUPPORTED = 1616
+WARN_NO_MASTER_INFO = 1617
+WARN_OPTION_IGNORED = 1618
+ER_PLUGIN_DELETE_BUILTIN = 1619
+WARN_PLUGIN_BUSY = 1620
+ER_VARIABLE_IS_READONLY = 1621
+ER_WARN_ENGINE_TRANSACTION_ROLLBACK = 1622
+OBSOLETE_ER_SLAVE_HEARTBEAT_FAILURE = 1623
+ER_SLAVE_HEARTBEAT_VALUE_OUT_OF_RANGE = 1624
+ER_NDB_REPLICATION_SCHEMA_ERROR = 1625
+ER_CONFLICT_FN_PARSE_ERROR = 1626
+ER_EXCEPTIONS_WRITE_ERROR = 1627
+ER_TOO_LONG_TABLE_COMMENT = 1628
+ER_TOO_LONG_FIELD_COMMENT = 1629
+ER_FUNC_INEXISTENT_NAME_COLLISION = 1630
+ER_DATABASE_NAME = 1631
+ER_TABLE_NAME = 1632
+ER_PARTITION_NAME = 1633
+ER_SUBPARTITION_NAME = 1634
+ER_TEMPORARY_NAME = 1635
+ER_RENAMED_NAME = 1636
+ER_TOO_MANY_CONCURRENT_TRXS = 1637
+WARN_NON_ASCII_SEPARATOR_NOT_IMPLEMENTED = 1638
+ER_DEBUG_SYNC_TIMEOUT = 1639
+ER_DEBUG_SYNC_HIT_LIMIT = 1640
+ER_DUP_SIGNAL_SET = 1641
+ER_SIGNAL_WARN = 1642
+ER_SIGNAL_NOT_FOUND = 1643
+ER_SIGNAL_EXCEPTION = 1644
+ER_RESIGNAL_WITHOUT_ACTIVE_HANDLER = 1645
+ER_SIGNAL_BAD_CONDITION_TYPE = 1646
+WARN_COND_ITEM_TRUNCATED = 1647
+ER_COND_ITEM_TOO_LONG = 1648
+ER_UNKNOWN_LOCALE = 1649
+ER_SLAVE_IGNORE_SERVER_IDS = 1650
+OBSOLETE_ER_QUERY_CACHE_DISABLED = 1651
+ER_SAME_NAME_PARTITION_FIELD = 1652
+ER_PARTITION_COLUMN_LIST_ERROR = 1653
+ER_WRONG_TYPE_COLUMN_VALUE_ERROR = 1654
+ER_TOO_MANY_PARTITION_FUNC_FIELDS_ERROR = 1655
+ER_MAXVALUE_IN_VALUES_IN = 1656
+ER_TOO_MANY_VALUES_ERROR = 1657
+ER_ROW_SINGLE_PARTITION_FIELD_ERROR = 1658
+ER_FIELD_TYPE_NOT_ALLOWED_AS_PARTITION_FIELD = 1659
+ER_PARTITION_FIELDS_TOO_LONG = 1660
+ER_BINLOG_ROW_ENGINE_AND_STMT_ENGINE = 1661
+ER_BINLOG_ROW_MODE_AND_STMT_ENGINE = 1662
+ER_BINLOG_UNSAFE_AND_STMT_ENGINE = 1663
+ER_BINLOG_ROW_INJECTION_AND_STMT_ENGINE = 1664
+ER_BINLOG_STMT_MODE_AND_ROW_ENGINE = 1665
+ER_BINLOG_ROW_INJECTION_AND_STMT_MODE = 1666
+ER_BINLOG_MULTIPLE_ENGINES_AND_SELF_LOGGING_ENGINE = 1667
+ER_BINLOG_UNSAFE_LIMIT = 1668
+OBSOLETE_ER_UNUSED4 = 1669
+ER_BINLOG_UNSAFE_SYSTEM_TABLE = 1670
+ER_BINLOG_UNSAFE_AUTOINC_COLUMNS = 1671
+ER_BINLOG_UNSAFE_UDF = 1672
+ER_BINLOG_UNSAFE_SYSTEM_VARIABLE = 1673
+ER_BINLOG_UNSAFE_SYSTEM_FUNCTION = 1674
+ER_BINLOG_UNSAFE_NONTRANS_AFTER_TRANS = 1675
+ER_MESSAGE_AND_STATEMENT = 1676
+OBSOLETE_ER_SLAVE_CONVERSION_FAILED = 1677
+ER_SLAVE_CANT_CREATE_CONVERSION = 1678
+ER_INSIDE_TRANSACTION_PREVENTS_SWITCH_BINLOG_FORMAT = 1679
+ER_PATH_LENGTH = 1680
+ER_WARN_DEPRECATED_SYNTAX_NO_REPLACEMENT = 1681
+ER_WRONG_NATIVE_TABLE_STRUCTURE = 1682
+ER_WRONG_PERFSCHEMA_USAGE = 1683
+ER_WARN_I_S_SKIPPED_TABLE = 1684
+ER_INSIDE_TRANSACTION_PREVENTS_SWITCH_BINLOG_DIRECT = 1685
+ER_STORED_FUNCTION_PREVENTS_SWITCH_BINLOG_DIRECT = 1686
+ER_SPATIAL_MUST_HAVE_GEOM_COL = 1687
+ER_TOO_LONG_INDEX_COMMENT = 1688
+ER_LOCK_ABORTED = 1689
+ER_DATA_OUT_OF_RANGE = 1690
+OBSOLETE_ER_WRONG_SPVAR_TYPE_IN_LIMIT = 1691
+ER_BINLOG_UNSAFE_MULTIPLE_ENGINES_AND_SELF_LOGGING_ENGINE = 1692
+ER_BINLOG_UNSAFE_MIXED_STATEMENT = 1693
+ER_INSIDE_TRANSACTION_PREVENTS_SWITCH_SQL_LOG_BIN = 1694
+ER_STORED_FUNCTION_PREVENTS_SWITCH_SQL_LOG_BIN = 1695
+ER_FAILED_READ_FROM_PAR_FILE = 1696
+ER_VALUES_IS_NOT_INT_TYPE_ERROR = 1697
+ER_ACCESS_DENIED_NO_PASSWORD_ERROR = 1698
+ER_SET_PASSWORD_AUTH_PLUGIN = 1699
+OBSOLETE_ER_GRANT_PLUGIN_USER_EXISTS = 1700
+ER_TRUNCATE_ILLEGAL_FK = 1701
+ER_PLUGIN_IS_PERMANENT = 1702
+ER_SLAVE_HEARTBEAT_VALUE_OUT_OF_RANGE_MIN = 1703
+ER_SLAVE_HEARTBEAT_VALUE_OUT_OF_RANGE_MAX = 1704
+ER_STMT_CACHE_FULL = 1705
+ER_MULTI_UPDATE_KEY_CONFLICT = 1706
+ER_TABLE_NEEDS_REBUILD = 1707
+WARN_OPTION_BELOW_LIMIT = 1708
+ER_INDEX_COLUMN_TOO_LONG = 1709
+ER_ERROR_IN_TRIGGER_BODY = 1710
+ER_ERROR_IN_UNKNOWN_TRIGGER_BODY = 1711
+ER_INDEX_CORRUPT = 1712
+ER_UNDO_RECORD_TOO_BIG = 1713
+ER_BINLOG_UNSAFE_INSERT_IGNORE_SELECT = 1714
+ER_BINLOG_UNSAFE_INSERT_SELECT_UPDATE = 1715
+ER_BINLOG_UNSAFE_REPLACE_SELECT = 1716
+ER_BINLOG_UNSAFE_CREATE_IGNORE_SELECT = 1717
+ER_BINLOG_UNSAFE_CREATE_REPLACE_SELECT = 1718
+ER_BINLOG_UNSAFE_UPDATE_IGNORE = 1719
+ER_PLUGIN_NO_UNINSTALL = 1720
+ER_PLUGIN_NO_INSTALL = 1721
+ER_BINLOG_UNSAFE_WRITE_AUTOINC_SELECT = 1722
+ER_BINLOG_UNSAFE_CREATE_SELECT_AUTOINC = 1723
+ER_BINLOG_UNSAFE_INSERT_TWO_KEYS = 1724
+ER_TABLE_IN_FK_CHECK = 1725
+ER_UNSUPPORTED_ENGINE = 1726
+ER_BINLOG_UNSAFE_AUTOINC_NOT_FIRST = 1727
+ER_CANNOT_LOAD_FROM_TABLE_V2 = 1728
+ER_MASTER_DELAY_VALUE_OUT_OF_RANGE = 1729
+ER_ONLY_FD_AND_RBR_EVENTS_ALLOWED_IN_BINLOG_STATEMENT = 1730
+ER_PARTITION_EXCHANGE_DIFFERENT_OPTION = 1731
+ER_PARTITION_EXCHANGE_PART_TABLE = 1732
+ER_PARTITION_EXCHANGE_TEMP_TABLE = 1733
+ER_PARTITION_INSTEAD_OF_SUBPARTITION = 1734
+ER_UNKNOWN_PARTITION = 1735
+ER_TABLES_DIFFERENT_METADATA = 1736
+ER_ROW_DOES_NOT_MATCH_PARTITION = 1737
+ER_BINLOG_CACHE_SIZE_GREATER_THAN_MAX = 1738
+ER_WARN_INDEX_NOT_APPLICABLE = 1739
+ER_PARTITION_EXCHANGE_FOREIGN_KEY = 1740
+OBSOLETE_ER_NO_SUCH_KEY_VALUE = 1741
+ER_RPL_INFO_DATA_TOO_LONG = 1742
+OBSOLETE_ER_NETWORK_READ_EVENT_CHECKSUM_FAILURE = 1743
+OBSOLETE_ER_BINLOG_READ_EVENT_CHECKSUM_FAILURE = 1744
+ER_BINLOG_STMT_CACHE_SIZE_GREATER_THAN_MAX = 1745
+ER_CANT_UPDATE_TABLE_IN_CREATE_TABLE_SELECT = 1746
+ER_PARTITION_CLAUSE_ON_NONPARTITIONED = 1747
+ER_ROW_DOES_NOT_MATCH_GIVEN_PARTITION_SET = 1748
+OBSOLETE_ER_NO_SUCH_PARTITION__UNUSED = 1749
+ER_CHANGE_RPL_INFO_REPOSITORY_FAILURE = 1750
+ER_WARNING_NOT_COMPLETE_ROLLBACK_WITH_CREATED_TEMP_TABLE = 1751
+ER_WARNING_NOT_COMPLETE_ROLLBACK_WITH_DROPPED_TEMP_TABLE = 1752
+ER_MTS_FEATURE_IS_NOT_SUPPORTED = 1753
+ER_MTS_UPDATED_DBS_GREATER_MAX = 1754
+ER_MTS_CANT_PARALLEL = 1755
+ER_MTS_INCONSISTENT_DATA = 1756
+ER_FULLTEXT_NOT_SUPPORTED_WITH_PARTITIONING = 1757
+ER_DA_INVALID_CONDITION_NUMBER = 1758
+ER_INSECURE_PLAIN_TEXT = 1759
+ER_INSECURE_CHANGE_MASTER = 1760
+ER_FOREIGN_DUPLICATE_KEY_WITH_CHILD_INFO = 1761
+ER_FOREIGN_DUPLICATE_KEY_WITHOUT_CHILD_INFO = 1762
+ER_SQLTHREAD_WITH_SECURE_SLAVE = 1763
+ER_TABLE_HAS_NO_FT = 1764
+ER_VARIABLE_NOT_SETTABLE_IN_SF_OR_TRIGGER = 1765
+ER_VARIABLE_NOT_SETTABLE_IN_TRANSACTION = 1766
+OBSOLETE_ER_GTID_NEXT_IS_NOT_IN_GTID_NEXT_LIST = 1767
+OBSOLETE_ER_CANT_CHANGE_GTID_NEXT_IN_TRANSACTION = 1768
+ER_SET_STATEMENT_CANNOT_INVOKE_FUNCTION = 1769
+ER_GTID_NEXT_CANT_BE_AUTOMATIC_IF_GTID_NEXT_LIST_IS_NON_NULL = 1770
+OBSOLETE_ER_SKIPPING_LOGGED_TRANSACTION = 1771
+ER_MALFORMED_GTID_SET_SPECIFICATION = 1772
+ER_MALFORMED_GTID_SET_ENCODING = 1773
+ER_MALFORMED_GTID_SPECIFICATION = 1774
+ER_GNO_EXHAUSTED = 1775
+ER_BAD_SLAVE_AUTO_POSITION = 1776
+ER_AUTO_POSITION_REQUIRES_GTID_MODE_NOT_OFF = 1777
+ER_CANT_DO_IMPLICIT_COMMIT_IN_TRX_WHEN_GTID_NEXT_IS_SET = 1778
+ER_GTID_MODE_ON_REQUIRES_ENFORCE_GTID_CONSISTENCY_ON = 1779
+OBSOLETE_ER_GTID_MODE_REQUIRES_BINLOG = 1780
+ER_CANT_SET_GTID_NEXT_TO_GTID_WHEN_GTID_MODE_IS_OFF = 1781
+ER_CANT_SET_GTID_NEXT_TO_ANONYMOUS_WHEN_GTID_MODE_IS_ON = 1782
+ER_CANT_SET_GTID_NEXT_LIST_TO_NON_NULL_WHEN_GTID_MODE_IS_OFF = 1783
+OBSOLETE_ER_FOUND_GTID_EVENT_WHEN_GTID_MODE_IS_OFF__UNUSED = 1784
+ER_GTID_UNSAFE_NON_TRANSACTIONAL_TABLE = 1785
+ER_GTID_UNSAFE_CREATE_SELECT = 1786
+OBSOLETE_ER_GTID_UNSAFE_CREATE_DROP_TEMP_TABLE_IN_TRANSACTION = 1787
+ER_GTID_MODE_CAN_ONLY_CHANGE_ONE_STEP_AT_A_TIME = 1788
+ER_MASTER_HAS_PURGED_REQUIRED_GTIDS = 1789
+ER_CANT_SET_GTID_NEXT_WHEN_OWNING_GTID = 1790
+ER_UNKNOWN_EXPLAIN_FORMAT = 1791
+ER_CANT_EXECUTE_IN_READ_ONLY_TRANSACTION = 1792
+ER_TOO_LONG_TABLE_PARTITION_COMMENT = 1793
+ER_SLAVE_CONFIGURATION = 1794
+ER_INNODB_FT_LIMIT = 1795
+ER_INNODB_NO_FT_TEMP_TABLE = 1796
+ER_INNODB_FT_WRONG_DOCID_COLUMN = 1797
+ER_INNODB_FT_WRONG_DOCID_INDEX = 1798
+ER_INNODB_ONLINE_LOG_TOO_BIG = 1799
+ER_UNKNOWN_ALTER_ALGORITHM = 1800
+ER_UNKNOWN_ALTER_LOCK = 1801
+ER_MTS_CHANGE_MASTER_CANT_RUN_WITH_GAPS = 1802
+ER_MTS_RECOVERY_FAILURE = 1803
+ER_MTS_RESET_WORKERS = 1804
+ER_COL_COUNT_DOESNT_MATCH_CORRUPTED_V2 = 1805
+ER_SLAVE_SILENT_RETRY_TRANSACTION = 1806
+ER_DISCARD_FK_CHECKS_RUNNING = 1807
+ER_TABLE_SCHEMA_MISMATCH = 1808
+ER_TABLE_IN_SYSTEM_TABLESPACE = 1809
+ER_IO_READ_ERROR = 1810
+ER_IO_WRITE_ERROR = 1811
+ER_TABLESPACE_MISSING = 1812
+ER_TABLESPACE_EXISTS = 1813
+ER_TABLESPACE_DISCARDED = 1814
+ER_INTERNAL_ERROR = 1815
+ER_INNODB_IMPORT_ERROR = 1816
+ER_INNODB_INDEX_CORRUPT = 1817
+ER_INVALID_YEAR_COLUMN_LENGTH = 1818
+ER_NOT_VALID_PASSWORD = 1819
+ER_MUST_CHANGE_PASSWORD = 1820
+ER_FK_NO_INDEX_CHILD = 1821
+ER_FK_NO_INDEX_PARENT = 1822
+ER_FK_FAIL_ADD_SYSTEM = 1823
+ER_FK_CANNOT_OPEN_PARENT = 1824
+ER_FK_INCORRECT_OPTION = 1825
+ER_FK_DUP_NAME = 1826
+ER_PASSWORD_FORMAT = 1827
+ER_FK_COLUMN_CANNOT_DROP = 1828
+ER_FK_COLUMN_CANNOT_DROP_CHILD = 1829
+ER_FK_COLUMN_NOT_NULL = 1830
+ER_DUP_INDEX = 1831
+ER_FK_COLUMN_CANNOT_CHANGE = 1832
+ER_FK_COLUMN_CANNOT_CHANGE_CHILD = 1833
+OBSOLETE_ER_UNUSED5 = 1834
+ER_MALFORMED_PACKET = 1835
+ER_READ_ONLY_MODE = 1836
+ER_GTID_NEXT_TYPE_UNDEFINED_GTID = 1837
+ER_VARIABLE_NOT_SETTABLE_IN_SP = 1838
+OBSOLETE_ER_CANT_SET_GTID_PURGED_WHEN_GTID_MODE_IS_OFF = 1839
+ER_CANT_SET_GTID_PURGED_WHEN_GTID_EXECUTED_IS_NOT_EMPTY = 1840
+ER_CANT_SET_GTID_PURGED_WHEN_OWNED_GTIDS_IS_NOT_EMPTY = 1841
+ER_GTID_PURGED_WAS_CHANGED = 1842
+ER_GTID_EXECUTED_WAS_CHANGED = 1843
+ER_BINLOG_STMT_MODE_AND_NO_REPL_TABLES = 1844
+ER_ALTER_OPERATION_NOT_SUPPORTED = 1845
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON = 1846
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_COPY = 1847
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_PARTITION = 1848
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_FK_RENAME = 1849
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_COLUMN_TYPE = 1850
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_FK_CHECK = 1851
+OBSOLETE_ER_UNUSED6 = 1852
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_NOPK = 1853
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_AUTOINC = 1854
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_HIDDEN_FTS = 1855
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_CHANGE_FTS = 1856
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_FTS = 1857
+OBSOLETE_ER_SQL_REPLICA_SKIP_COUNTER_NOT_SETTABLE_IN_GTID_MODE = 1858
+ER_DUP_UNKNOWN_IN_INDEX = 1859
+ER_IDENT_CAUSES_TOO_LONG_PATH = 1860
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_NOT_NULL = 1861
+ER_MUST_CHANGE_PASSWORD_LOGIN = 1862
+ER_ROW_IN_WRONG_PARTITION = 1863
+ER_MTS_EVENT_BIGGER_PENDING_JOBS_SIZE_MAX = 1864
+OBSOLETE_ER_INNODB_NO_FT_USES_PARSER = 1865
+ER_BINLOG_LOGICAL_CORRUPTION = 1866
+ER_WARN_PURGE_LOG_IN_USE = 1867
+ER_WARN_PURGE_LOG_IS_ACTIVE = 1868
+ER_AUTO_INCREMENT_CONFLICT = 1869
+WARN_ON_BLOCKHOLE_IN_RBR = 1870
+ER_SLAVE_MI_INIT_REPOSITORY = 1871
+ER_SLAVE_RLI_INIT_REPOSITORY = 1872
+ER_ACCESS_DENIED_CHANGE_USER_ERROR = 1873
+ER_INNODB_READ_ONLY = 1874
+ER_STOP_SLAVE_SQL_THREAD_TIMEOUT = 1875
+ER_STOP_SLAVE_IO_THREAD_TIMEOUT = 1876
+ER_TABLE_CORRUPT = 1877
+ER_TEMP_FILE_WRITE_FAILURE = 1878
+ER_INNODB_FT_AUX_NOT_HEX_ID = 1879
+ER_OLD_TEMPORALS_UPGRADED = 1880
+ER_INNODB_FORCED_RECOVERY = 1881
+ER_AES_INVALID_IV = 1882
+ER_PLUGIN_CANNOT_BE_UNINSTALLED = 1883
+ER_GTID_UNSAFE_BINLOG_SPLITTABLE_STATEMENT_AND_ASSIGNED_GTID = 1884
+ER_SLAVE_HAS_MORE_GTIDS_THAN_MASTER = 1885
+ER_MISSING_KEY = 1886
+WARN_NAMED_PIPE_ACCESS_EVERYONE = 1887
+ER_FILE_CORRUPT = 3000
+ER_ERROR_ON_MASTER = 3001
+OBSOLETE_ER_INCONSISTENT_ERROR = 3002
+ER_STORAGE_ENGINE_NOT_LOADED = 3003
+ER_GET_STACKED_DA_WITHOUT_ACTIVE_HANDLER = 3004
+ER_WARN_LEGACY_SYNTAX_CONVERTED = 3005
+ER_BINLOG_UNSAFE_FULLTEXT_PLUGIN = 3006
+ER_CANNOT_DISCARD_TEMPORARY_TABLE = 3007
+ER_FK_DEPTH_EXCEEDED = 3008
+ER_COL_COUNT_DOESNT_MATCH_PLEASE_UPDATE_V2 = 3009
+ER_WARN_TRIGGER_DOESNT_HAVE_CREATED = 3010
+ER_REFERENCED_TRG_DOES_NOT_EXIST = 3011
+ER_EXPLAIN_NOT_SUPPORTED = 3012
+ER_INVALID_FIELD_SIZE = 3013
+ER_MISSING_HA_CREATE_OPTION = 3014
+ER_ENGINE_OUT_OF_MEMORY = 3015
+ER_PASSWORD_EXPIRE_ANONYMOUS_USER = 3016
+ER_SLAVE_SQL_THREAD_MUST_STOP = 3017
+ER_NO_FT_MATERIALIZED_SUBQUERY = 3018
+ER_INNODB_UNDO_LOG_FULL = 3019
+ER_INVALID_ARGUMENT_FOR_LOGARITHM = 3020
+ER_SLAVE_CHANNEL_IO_THREAD_MUST_STOP = 3021
+ER_WARN_OPEN_TEMP_TABLES_MUST_BE_ZERO = 3022
+ER_WARN_ONLY_MASTER_LOG_FILE_NO_POS = 3023
+ER_QUERY_TIMEOUT = 3024
+ER_NON_RO_SELECT_DISABLE_TIMER = 3025
+ER_DUP_LIST_ENTRY = 3026
+OBSOLETE_ER_SQL_MODE_NO_EFFECT = 3027
+ER_AGGREGATE_ORDER_FOR_UNION = 3028
+ER_AGGREGATE_ORDER_NON_AGG_QUERY = 3029
+ER_SLAVE_WORKER_STOPPED_PREVIOUS_THD_ERROR = 3030
+ER_DONT_SUPPORT_REPLICA_PRESERVE_COMMIT_ORDER = 3031
+ER_SERVER_OFFLINE_MODE = 3032
+ER_GIS_DIFFERENT_SRIDS = 3033
+ER_GIS_UNSUPPORTED_ARGUMENT = 3034
+ER_GIS_UNKNOWN_ERROR = 3035
+ER_GIS_UNKNOWN_EXCEPTION = 3036
+ER_GIS_INVALID_DATA = 3037
+ER_BOOST_GEOMETRY_EMPTY_INPUT_EXCEPTION = 3038
+ER_BOOST_GEOMETRY_CENTROID_EXCEPTION = 3039
+ER_BOOST_GEOMETRY_OVERLAY_INVALID_INPUT_EXCEPTION = 3040
+ER_BOOST_GEOMETRY_TURN_INFO_EXCEPTION = 3041
+ER_BOOST_GEOMETRY_SELF_INTERSECTION_POINT_EXCEPTION = 3042
+ER_BOOST_GEOMETRY_UNKNOWN_EXCEPTION = 3043
+ER_STD_BAD_ALLOC_ERROR = 3044
+ER_STD_DOMAIN_ERROR = 3045
+ER_STD_LENGTH_ERROR = 3046
+ER_STD_INVALID_ARGUMENT = 3047
+ER_STD_OUT_OF_RANGE_ERROR = 3048
+ER_STD_OVERFLOW_ERROR = 3049
+ER_STD_RANGE_ERROR = 3050
+ER_STD_UNDERFLOW_ERROR = 3051
+ER_STD_LOGIC_ERROR = 3052
+ER_STD_RUNTIME_ERROR = 3053
+ER_STD_UNKNOWN_EXCEPTION = 3054
+ER_GIS_DATA_WRONG_ENDIANESS = 3055
+ER_CHANGE_MASTER_PASSWORD_LENGTH = 3056
+ER_USER_LOCK_WRONG_NAME = 3057
+ER_USER_LOCK_DEADLOCK = 3058
+ER_REPLACE_INACCESSIBLE_ROWS = 3059
+ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_GIS = 3060
+ER_ILLEGAL_USER_VAR = 3061
+ER_GTID_MODE_OFF = 3062
+OBSOLETE_ER_UNSUPPORTED_BY_REPLICATION_THREAD = 3063
+ER_INCORRECT_TYPE = 3064
+ER_FIELD_IN_ORDER_NOT_SELECT = 3065
+ER_AGGREGATE_IN_ORDER_NOT_SELECT = 3066
+ER_INVALID_RPL_WILD_TABLE_FILTER_PATTERN = 3067
+ER_NET_OK_PACKET_TOO_LARGE = 3068
+ER_INVALID_JSON_DATA = 3069
+ER_INVALID_GEOJSON_MISSING_MEMBER = 3070
+ER_INVALID_GEOJSON_WRONG_TYPE = 3071
+ER_INVALID_GEOJSON_UNSPECIFIED = 3072
+ER_DIMENSION_UNSUPPORTED = 3073
+ER_SLAVE_CHANNEL_DOES_NOT_EXIST = 3074
+OBSOLETE_ER_SLAVE_MULTIPLE_CHANNELS_HOST_PORT = 3075
+ER_SLAVE_CHANNEL_NAME_INVALID_OR_TOO_LONG = 3076
+ER_SLAVE_NEW_CHANNEL_WRONG_REPOSITORY = 3077
+OBSOLETE_ER_SLAVE_CHANNEL_DELETE = 3078
+ER_SLAVE_MULTIPLE_CHANNELS_CMD = 3079
+ER_SLAVE_MAX_CHANNELS_EXCEEDED = 3080
+ER_SLAVE_CHANNEL_MUST_STOP = 3081
+ER_SLAVE_CHANNEL_NOT_RUNNING = 3082
+ER_SLAVE_CHANNEL_WAS_RUNNING = 3083
+ER_SLAVE_CHANNEL_WAS_NOT_RUNNING = 3084
+ER_SLAVE_CHANNEL_SQL_THREAD_MUST_STOP = 3085
+ER_SLAVE_CHANNEL_SQL_SKIP_COUNTER = 3086
+ER_WRONG_FIELD_WITH_GROUP_V2 = 3087
+ER_MIX_OF_GROUP_FUNC_AND_FIELDS_V2 = 3088
+ER_WARN_DEPRECATED_SYSVAR_UPDATE = 3089
+ER_WARN_DEPRECATED_SQLMODE = 3090
+ER_CANNOT_LOG_PARTIAL_DROP_DATABASE_WITH_GTID = 3091
+ER_GROUP_REPLICATION_CONFIGURATION = 3092
+ER_GROUP_REPLICATION_RUNNING = 3093
+ER_GROUP_REPLICATION_APPLIER_INIT_ERROR = 3094
+ER_GROUP_REPLICATION_STOP_APPLIER_THREAD_TIMEOUT = 3095
+ER_GROUP_REPLICATION_COMMUNICATION_LAYER_SESSION_ERROR = 3096
+ER_GROUP_REPLICATION_COMMUNICATION_LAYER_JOIN_ERROR = 3097
+ER_BEFORE_DML_VALIDATION_ERROR = 3098
+ER_PREVENTS_VARIABLE_WITHOUT_RBR = 3099
+ER_RUN_HOOK_ERROR = 3100
+ER_TRANSACTION_ROLLBACK_DURING_COMMIT = 3101
+ER_GENERATED_COLUMN_FUNCTION_IS_NOT_ALLOWED = 3102
+ER_UNSUPPORTED_ALTER_INPLACE_ON_VIRTUAL_COLUMN = 3103
+ER_WRONG_FK_OPTION_FOR_GENERATED_COLUMN = 3104
+ER_NON_DEFAULT_VALUE_FOR_GENERATED_COLUMN = 3105
+ER_UNSUPPORTED_ACTION_ON_GENERATED_COLUMN = 3106
+ER_GENERATED_COLUMN_NON_PRIOR = 3107
+ER_DEPENDENT_BY_GENERATED_COLUMN = 3108
+ER_GENERATED_COLUMN_REF_AUTO_INC = 3109
+ER_FEATURE_NOT_AVAILABLE = 3110
+ER_CANT_SET_GTID_MODE = 3111
+ER_CANT_USE_AUTO_POSITION_WITH_GTID_MODE_OFF = 3112
+OBSOLETE_ER_CANT_REPLICATE_ANONYMOUS_WITH_AUTO_POSITION = 3113
+OBSOLETE_ER_CANT_REPLICATE_ANONYMOUS_WITH_GTID_MODE_ON = 3114
+OBSOLETE_ER_CANT_REPLICATE_GTID_WITH_GTID_MODE_OFF = 3115
+ER_CANT_ENFORCE_GTID_CONSISTENCY_WITH_ONGOING_GTID_VIOLATING_TX = 3116
+ER_ENFORCE_GTID_CONSISTENCY_WARN_WITH_ONGOING_GTID_VIOLATING_TX = 3117
+ER_ACCOUNT_HAS_BEEN_LOCKED = 3118
+ER_WRONG_TABLESPACE_NAME = 3119
+ER_TABLESPACE_IS_NOT_EMPTY = 3120
+ER_WRONG_FILE_NAME = 3121
+ER_BOOST_GEOMETRY_INCONSISTENT_TURNS_EXCEPTION = 3122
+ER_WARN_OPTIMIZER_HINT_SYNTAX_ERROR = 3123
+ER_WARN_BAD_MAX_EXECUTION_TIME = 3124
+ER_WARN_UNSUPPORTED_MAX_EXECUTION_TIME = 3125
+ER_WARN_CONFLICTING_HINT = 3126
+ER_WARN_UNKNOWN_QB_NAME = 3127
+ER_UNRESOLVED_HINT_NAME = 3128
+ER_WARN_ON_MODIFYING_GTID_EXECUTED_TABLE = 3129
+ER_PLUGGABLE_PROTOCOL_COMMAND_NOT_SUPPORTED = 3130
+ER_LOCKING_SERVICE_WRONG_NAME = 3131
+ER_LOCKING_SERVICE_DEADLOCK = 3132
+ER_LOCKING_SERVICE_TIMEOUT = 3133
+ER_GIS_MAX_POINTS_IN_GEOMETRY_OVERFLOWED = 3134
+ER_SQL_MODE_MERGED = 3135
+ER_VTOKEN_PLUGIN_TOKEN_MISMATCH = 3136
+ER_VTOKEN_PLUGIN_TOKEN_NOT_FOUND = 3137
+ER_CANT_SET_VARIABLE_WHEN_OWNING_GTID = 3138
+ER_SLAVE_CHANNEL_OPERATION_NOT_ALLOWED = 3139
+ER_INVALID_JSON_TEXT = 3140
+ER_INVALID_JSON_TEXT_IN_PARAM = 3141
+ER_INVALID_JSON_BINARY_DATA = 3142
+ER_INVALID_JSON_PATH = 3143
+ER_INVALID_JSON_CHARSET = 3144
+ER_INVALID_JSON_CHARSET_IN_FUNCTION = 3145
+ER_INVALID_TYPE_FOR_JSON = 3146
+ER_INVALID_CAST_TO_JSON = 3147
+ER_INVALID_JSON_PATH_CHARSET = 3148
+ER_INVALID_JSON_PATH_WILDCARD = 3149
+ER_JSON_VALUE_TOO_BIG = 3150
+ER_JSON_KEY_TOO_BIG = 3151
+ER_JSON_USED_AS_KEY = 3152
+ER_JSON_VACUOUS_PATH = 3153
+ER_JSON_BAD_ONE_OR_ALL_ARG = 3154
+ER_NUMERIC_JSON_VALUE_OUT_OF_RANGE = 3155
+ER_INVALID_JSON_VALUE_FOR_CAST = 3156
+ER_JSON_DOCUMENT_TOO_DEEP = 3157
+ER_JSON_DOCUMENT_NULL_KEY = 3158
+ER_SECURE_TRANSPORT_REQUIRED = 3159
+ER_NO_SECURE_TRANSPORTS_CONFIGURED = 3160
+ER_DISABLED_STORAGE_ENGINE = 3161
+ER_USER_DOES_NOT_EXIST = 3162
+ER_USER_ALREADY_EXISTS = 3163
+ER_AUDIT_API_ABORT = 3164
+ER_INVALID_JSON_PATH_ARRAY_CELL = 3165
+ER_BUFPOOL_RESIZE_INPROGRESS = 3166
+ER_FEATURE_DISABLED_SEE_DOC = 3167
+ER_SERVER_ISNT_AVAILABLE = 3168
+ER_SESSION_WAS_KILLED = 3169
+ER_CAPACITY_EXCEEDED = 3170
+ER_CAPACITY_EXCEEDED_IN_RANGE_OPTIMIZER = 3171
+OBSOLETE_ER_TABLE_NEEDS_UPG_PART = 3172
+ER_CANT_WAIT_FOR_EXECUTED_GTID_SET_WHILE_OWNING_A_GTID = 3173
+ER_CANNOT_ADD_FOREIGN_BASE_COL_VIRTUAL = 3174
+ER_CANNOT_CREATE_VIRTUAL_INDEX_CONSTRAINT = 3175
+ER_ERROR_ON_MODIFYING_GTID_EXECUTED_TABLE = 3176
+ER_LOCK_REFUSED_BY_ENGINE = 3177
+ER_UNSUPPORTED_ALTER_ONLINE_ON_VIRTUAL_COLUMN = 3178
+ER_MASTER_KEY_ROTATION_NOT_SUPPORTED_BY_SE = 3179
+OBSOLETE_ER_MASTER_KEY_ROTATION_ERROR_BY_SE = 3180
+ER_MASTER_KEY_ROTATION_BINLOG_FAILED = 3181
+ER_MASTER_KEY_ROTATION_SE_UNAVAILABLE = 3182
+ER_TABLESPACE_CANNOT_ENCRYPT = 3183
+ER_INVALID_ENCRYPTION_OPTION = 3184
+ER_CANNOT_FIND_KEY_IN_KEYRING = 3185
+ER_CAPACITY_EXCEEDED_IN_PARSER = 3186
+ER_UNSUPPORTED_ALTER_ENCRYPTION_INPLACE = 3187
+ER_KEYRING_UDF_KEYRING_SERVICE_ERROR = 3188
+ER_USER_COLUMN_OLD_LENGTH = 3189
+ER_CANT_RESET_MASTER = 3190
+ER_GROUP_REPLICATION_MAX_GROUP_SIZE = 3191
+ER_CANNOT_ADD_FOREIGN_BASE_COL_STORED = 3192
+ER_TABLE_REFERENCED = 3193
+OBSOLETE_ER_PARTITION_ENGINE_DEPRECATED_FOR_TABLE = 3194
+OBSOLETE_ER_WARN_USING_GEOMFROMWKB_TO_SET_SRID_ZERO = 3195
+OBSOLETE_ER_WARN_USING_GEOMFROMWKB_TO_SET_SRID = 3196
+ER_XA_RETRY = 3197
+ER_KEYRING_AWS_UDF_AWS_KMS_ERROR = 3198
+ER_BINLOG_UNSAFE_XA = 3199
+ER_UDF_ERROR = 3200
+ER_KEYRING_MIGRATION_FAILURE = 3201
+ER_KEYRING_ACCESS_DENIED_ERROR = 3202
+ER_KEYRING_MIGRATION_STATUS = 3203
+OBSOLETE_ER_PLUGIN_FAILED_TO_OPEN_TABLES = 3204
+OBSOLETE_ER_PLUGIN_FAILED_TO_OPEN_TABLE = 3205
+OBSOLETE_ER_AUDIT_LOG_NO_KEYRING_PLUGIN_INSTALLED = 3206
+OBSOLETE_ER_AUDIT_LOG_ENCRYPTION_PASSWORD_HAS_NOT_BEEN_SET = 3207
+OBSOLETE_ER_AUDIT_LOG_COULD_NOT_CREATE_AES_KEY = 3208
+OBSOLETE_ER_AUDIT_LOG_ENCRYPTION_PASSWORD_CANNOT_BE_FETCHED = 3209
+OBSOLETE_ER_AUDIT_LOG_JSON_FILTERING_NOT_ENABLED = 3210
+OBSOLETE_ER_AUDIT_LOG_UDF_INSUFFICIENT_PRIVILEGE = 3211
+OBSOLETE_ER_AUDIT_LOG_SUPER_PRIVILEGE_REQUIRED = 3212
+OBSOLETE_ER_COULD_NOT_REINITIALIZE_AUDIT_LOG_FILTERS = 3213
+OBSOLETE_ER_AUDIT_LOG_UDF_INVALID_ARGUMENT_TYPE = 3214
+OBSOLETE_ER_AUDIT_LOG_UDF_INVALID_ARGUMENT_COUNT = 3215
+OBSOLETE_ER_AUDIT_LOG_HAS_NOT_BEEN_INSTALLED = 3216
+OBSOLETE_ER_AUDIT_LOG_UDF_READ_INVALID_MAX_ARRAY_LENGTH_ARG_TYPE = 3217
+ER_AUDIT_LOG_UDF_READ_INVALID_MAX_ARRAY_LENGTH_ARG_VALUE = 3218
+OBSOLETE_ER_AUDIT_LOG_JSON_FILTER_PARSING_ERROR = 3219
+OBSOLETE_ER_AUDIT_LOG_JSON_FILTER_NAME_CANNOT_BE_EMPTY = 3220
+OBSOLETE_ER_AUDIT_LOG_JSON_USER_NAME_CANNOT_BE_EMPTY = 3221
+OBSOLETE_ER_AUDIT_LOG_JSON_FILTER_DOES_NOT_EXISTS = 3222
+OBSOLETE_ER_AUDIT_LOG_USER_FIRST_CHARACTER_MUST_BE_ALPHANUMERIC = 3223
+OBSOLETE_ER_AUDIT_LOG_USER_NAME_INVALID_CHARACTER = 3224
+OBSOLETE_ER_AUDIT_LOG_HOST_NAME_INVALID_CHARACTER = 3225
+OBSOLETE_ER_XA_REPLICATION_FILTERS = 3226
+OBSOLETE_ER_CANT_OPEN_ERROR_LOG = 3227
+OBSOLETE_ER_GROUPING_ON_TIMESTAMP_IN_DST = 3228
+OBSOLETE_ER_CANT_START_SERVER_NAMED_PIPE = 3229
+ER_WRITE_SET_EXCEEDS_LIMIT = 3230
+ER_UNSUPPORT_COMPRESSED_TEMPORARY_TABLE = 3500
+ER_ACL_OPERATION_FAILED = 3501
+ER_UNSUPPORTED_INDEX_ALGORITHM = 3502
+ER_NO_SUCH_DB = 3503
+ER_TOO_BIG_ENUM = 3504
+ER_TOO_LONG_SET_ENUM_VALUE = 3505
+ER_INVALID_DD_OBJECT = 3506
+ER_UPDATING_DD_TABLE = 3507
+ER_INVALID_DD_OBJECT_ID = 3508
+ER_INVALID_DD_OBJECT_NAME = 3509
+ER_TABLESPACE_MISSING_WITH_NAME = 3510
+ER_TOO_LONG_ROUTINE_COMMENT = 3511
+ER_SP_LOAD_FAILED = 3512
+ER_INVALID_BITWISE_OPERANDS_SIZE = 3513
+ER_INVALID_BITWISE_AGGREGATE_OPERANDS_SIZE = 3514
+ER_WARN_UNSUPPORTED_HINT = 3515
+ER_UNEXPECTED_GEOMETRY_TYPE = 3516
+ER_SRS_PARSE_ERROR = 3517
+ER_SRS_PROJ_PARAMETER_MISSING = 3518
+ER_WARN_SRS_NOT_FOUND = 3519
+ER_SRS_NOT_CARTESIAN = 3520
+ER_SRS_NOT_CARTESIAN_UNDEFINED = 3521
+ER_PK_INDEX_CANT_BE_INVISIBLE = 3522
+ER_UNKNOWN_AUTHID = 3523
+ER_FAILED_ROLE_GRANT = 3524
+ER_OPEN_ROLE_TABLES = 3525
+ER_FAILED_DEFAULT_ROLES = 3526
+ER_COMPONENTS_NO_SCHEME = 3527
+ER_COMPONENTS_NO_SCHEME_SERVICE = 3528
+ER_COMPONENTS_CANT_LOAD = 3529
+ER_ROLE_NOT_GRANTED = 3530
+ER_FAILED_REVOKE_ROLE = 3531
+ER_RENAME_ROLE = 3532
+ER_COMPONENTS_CANT_ACQUIRE_SERVICE_IMPLEMENTATION = 3533
+ER_COMPONENTS_CANT_SATISFY_DEPENDENCY = 3534
+ER_COMPONENTS_LOAD_CANT_REGISTER_SERVICE_IMPLEMENTATION = 3535
+ER_COMPONENTS_LOAD_CANT_INITIALIZE = 3536
+ER_COMPONENTS_UNLOAD_NOT_LOADED = 3537
+ER_COMPONENTS_UNLOAD_CANT_DEINITIALIZE = 3538
+ER_COMPONENTS_CANT_RELEASE_SERVICE = 3539
+ER_COMPONENTS_UNLOAD_CANT_UNREGISTER_SERVICE = 3540
+ER_COMPONENTS_CANT_UNLOAD = 3541
+ER_WARN_UNLOAD_THE_NOT_PERSISTED = 3542
+ER_COMPONENT_TABLE_INCORRECT = 3543
+ER_COMPONENT_MANIPULATE_ROW_FAILED = 3544
+ER_COMPONENTS_UNLOAD_DUPLICATE_IN_GROUP = 3545
+ER_CANT_SET_GTID_PURGED_DUE_SETS_CONSTRAINTS = 3546
+ER_CANNOT_LOCK_USER_MANAGEMENT_CACHES = 3547
+ER_SRS_NOT_FOUND = 3548
+ER_VARIABLE_NOT_PERSISTED = 3549
+ER_IS_QUERY_INVALID_CLAUSE = 3550
+ER_UNABLE_TO_STORE_STATISTICS = 3551
+ER_NO_SYSTEM_SCHEMA_ACCESS = 3552
+ER_NO_SYSTEM_TABLESPACE_ACCESS = 3553
+ER_NO_SYSTEM_TABLE_ACCESS = 3554
+ER_NO_SYSTEM_TABLE_ACCESS_FOR_DICTIONARY_TABLE = 3555
+ER_NO_SYSTEM_TABLE_ACCESS_FOR_SYSTEM_TABLE = 3556
+ER_NO_SYSTEM_TABLE_ACCESS_FOR_TABLE = 3557
+ER_INVALID_OPTION_KEY = 3558
+ER_INVALID_OPTION_VALUE = 3559
+ER_INVALID_OPTION_KEY_VALUE_PAIR = 3560
+ER_INVALID_OPTION_START_CHARACTER = 3561
+ER_INVALID_OPTION_END_CHARACTER = 3562
+ER_INVALID_OPTION_CHARACTERS = 3563
+ER_DUPLICATE_OPTION_KEY = 3564
+ER_WARN_SRS_NOT_FOUND_AXIS_ORDER = 3565
+ER_NO_ACCESS_TO_NATIVE_FCT = 3566
+ER_RESET_MASTER_TO_VALUE_OUT_OF_RANGE = 3567
+ER_UNRESOLVED_TABLE_LOCK = 3568
+ER_DUPLICATE_TABLE_LOCK = 3569
+ER_BINLOG_UNSAFE_SKIP_LOCKED = 3570
+ER_BINLOG_UNSAFE_NOWAIT = 3571
+ER_LOCK_NOWAIT = 3572
+ER_CTE_RECURSIVE_REQUIRES_UNION = 3573
+ER_CTE_RECURSIVE_REQUIRES_NONRECURSIVE_FIRST = 3574
+ER_CTE_RECURSIVE_FORBIDS_AGGREGATION = 3575
+ER_CTE_RECURSIVE_FORBIDDEN_JOIN_ORDER = 3576
+ER_CTE_RECURSIVE_REQUIRES_SINGLE_REFERENCE = 3577
+ER_SWITCH_TMP_ENGINE = 3578
+ER_WINDOW_NO_SUCH_WINDOW = 3579
+ER_WINDOW_CIRCULARITY_IN_WINDOW_GRAPH = 3580
+ER_WINDOW_NO_CHILD_PARTITIONING = 3581
+ER_WINDOW_NO_INHERIT_FRAME = 3582
+ER_WINDOW_NO_REDEFINE_ORDER_BY = 3583
+ER_WINDOW_FRAME_START_ILLEGAL = 3584
+ER_WINDOW_FRAME_END_ILLEGAL = 3585
+ER_WINDOW_FRAME_ILLEGAL = 3586
+ER_WINDOW_RANGE_FRAME_ORDER_TYPE = 3587
+ER_WINDOW_RANGE_FRAME_TEMPORAL_TYPE = 3588
+ER_WINDOW_RANGE_FRAME_NUMERIC_TYPE = 3589
+ER_WINDOW_RANGE_BOUND_NOT_CONSTANT = 3590
+ER_WINDOW_DUPLICATE_NAME = 3591
+ER_WINDOW_ILLEGAL_ORDER_BY = 3592
+ER_WINDOW_INVALID_WINDOW_FUNC_USE = 3593
+ER_WINDOW_INVALID_WINDOW_FUNC_ALIAS_USE = 3594
+ER_WINDOW_NESTED_WINDOW_FUNC_USE_IN_WINDOW_SPEC = 3595
+ER_WINDOW_ROWS_INTERVAL_USE = 3596
+ER_WINDOW_NO_GROUP_ORDER_UNUSED = 3597
+ER_WINDOW_EXPLAIN_JSON = 3598
+ER_WINDOW_FUNCTION_IGNORES_FRAME = 3599
+ER_WL9236_NOW_UNUSED = 3600
+ER_INVALID_NO_OF_ARGS = 3601
+ER_FIELD_IN_GROUPING_NOT_GROUP_BY = 3602
+ER_TOO_LONG_TABLESPACE_COMMENT = 3603
+ER_ENGINE_CANT_DROP_TABLE = 3604
+ER_ENGINE_CANT_DROP_MISSING_TABLE = 3605
+ER_TABLESPACE_DUP_FILENAME = 3606
+ER_DB_DROP_RMDIR2 = 3607
+ER_IMP_NO_FILES_MATCHED = 3608
+ER_IMP_SCHEMA_DOES_NOT_EXIST = 3609
+ER_IMP_TABLE_ALREADY_EXISTS = 3610
+ER_IMP_INCOMPATIBLE_MYSQLD_VERSION = 3611
+ER_IMP_INCOMPATIBLE_DD_VERSION = 3612
+ER_IMP_INCOMPATIBLE_SDI_VERSION = 3613
+ER_WARN_INVALID_HINT = 3614
+ER_VAR_DOES_NOT_EXIST = 3615
+ER_LONGITUDE_OUT_OF_RANGE = 3616
+ER_LATITUDE_OUT_OF_RANGE = 3617
+ER_NOT_IMPLEMENTED_FOR_GEOGRAPHIC_SRS = 3618
+ER_ILLEGAL_PRIVILEGE_LEVEL = 3619
+ER_NO_SYSTEM_VIEW_ACCESS = 3620
+ER_COMPONENT_FILTER_FLABBERGASTED = 3621
+ER_PART_EXPR_TOO_LONG = 3622
+ER_UDF_DROP_DYNAMICALLY_REGISTERED = 3623
+ER_UNABLE_TO_STORE_COLUMN_STATISTICS = 3624
+ER_UNABLE_TO_UPDATE_COLUMN_STATISTICS = 3625
+ER_UNABLE_TO_DROP_COLUMN_STATISTICS = 3626
+ER_UNABLE_TO_BUILD_HISTOGRAM = 3627
+ER_MANDATORY_ROLE = 3628
+ER_MISSING_TABLESPACE_FILE = 3629
+ER_PERSIST_ONLY_ACCESS_DENIED_ERROR = 3630
+ER_CMD_NEED_SUPER = 3631
+ER_PATH_IN_DATADIR = 3632
+ER_CLONE_DDL_IN_PROGRESS = 3633
+ER_CLONE_TOO_MANY_CONCURRENT_CLONES = 3634
+ER_APPLIER_LOG_EVENT_VALIDATION_ERROR = 3635
+ER_CTE_MAX_RECURSION_DEPTH = 3636
+ER_NOT_HINT_UPDATABLE_VARIABLE = 3637
+ER_CREDENTIALS_CONTRADICT_TO_HISTORY = 3638
+ER_WARNING_PASSWORD_HISTORY_CLAUSES_VOID = 3639
+ER_CLIENT_DOES_NOT_SUPPORT = 3640
+ER_I_S_SKIPPED_TABLESPACE = 3641
+ER_TABLESPACE_ENGINE_MISMATCH = 3642
+ER_WRONG_SRID_FOR_COLUMN = 3643
+ER_CANNOT_ALTER_SRID_DUE_TO_INDEX = 3644
+ER_WARN_BINLOG_PARTIAL_UPDATES_DISABLED = 3645
+ER_WARN_BINLOG_V1_ROW_EVENTS_DISABLED = 3646
+ER_WARN_BINLOG_PARTIAL_UPDATES_SUGGESTS_PARTIAL_IMAGES = 3647
+ER_COULD_NOT_APPLY_JSON_DIFF = 3648
+ER_CORRUPTED_JSON_DIFF = 3649
+ER_RESOURCE_GROUP_EXISTS = 3650
+ER_RESOURCE_GROUP_NOT_EXISTS = 3651
+ER_INVALID_VCPU_ID = 3652
+ER_INVALID_VCPU_RANGE = 3653
+ER_INVALID_THREAD_PRIORITY = 3654
+ER_DISALLOWED_OPERATION = 3655
+ER_RESOURCE_GROUP_BUSY = 3656
+ER_RESOURCE_GROUP_DISABLED = 3657
+ER_FEATURE_UNSUPPORTED = 3658
+ER_ATTRIBUTE_IGNORED = 3659
+ER_INVALID_THREAD_ID = 3660
+ER_RESOURCE_GROUP_BIND_FAILED = 3661
+ER_INVALID_USE_OF_FORCE_OPTION = 3662
+ER_GROUP_REPLICATION_COMMAND_FAILURE = 3663
+ER_SDI_OPERATION_FAILED = 3664
+ER_MISSING_JSON_TABLE_VALUE = 3665
+ER_WRONG_JSON_TABLE_VALUE = 3666
+ER_TF_MUST_HAVE_ALIAS = 3667
+ER_TF_FORBIDDEN_JOIN_TYPE = 3668
+ER_JT_VALUE_OUT_OF_RANGE = 3669
+ER_JT_MAX_NESTED_PATH = 3670
+ER_PASSWORD_EXPIRATION_NOT_SUPPORTED_BY_AUTH_METHOD = 3671
+ER_INVALID_GEOJSON_CRS_NOT_TOP_LEVEL = 3672
+ER_BAD_NULL_ERROR_NOT_IGNORED = 3673
+WARN_USELESS_SPATIAL_INDEX = 3674
+ER_DISK_FULL_NOWAIT = 3675
+ER_PARSE_ERROR_IN_DIGEST_FN = 3676
+ER_UNDISCLOSED_PARSE_ERROR_IN_DIGEST_FN = 3677
+ER_SCHEMA_DIR_EXISTS = 3678
+ER_SCHEMA_DIR_MISSING = 3679
+ER_SCHEMA_DIR_CREATE_FAILED = 3680
+ER_SCHEMA_DIR_UNKNOWN = 3681
+ER_ONLY_IMPLEMENTED_FOR_SRID_0_AND_4326 = 3682
+ER_BINLOG_EXPIRE_LOG_DAYS_AND_SECS_USED_TOGETHER = 3683
+ER_REGEXP_BUFFER_OVERFLOW = 3684
+ER_REGEXP_ILLEGAL_ARGUMENT = 3685
+ER_REGEXP_INDEX_OUTOFBOUNDS_ERROR = 3686
+ER_REGEXP_INTERNAL_ERROR = 3687
+ER_REGEXP_RULE_SYNTAX = 3688
+ER_REGEXP_BAD_ESCAPE_SEQUENCE = 3689
+ER_REGEXP_UNIMPLEMENTED = 3690
+ER_REGEXP_MISMATCHED_PAREN = 3691
+ER_REGEXP_BAD_INTERVAL = 3692
+ER_REGEXP_MAX_LT_MIN = 3693
+ER_REGEXP_INVALID_BACK_REF = 3694
+ER_REGEXP_LOOK_BEHIND_LIMIT = 3695
+ER_REGEXP_MISSING_CLOSE_BRACKET = 3696
+ER_REGEXP_INVALID_RANGE = 3697
+ER_REGEXP_STACK_OVERFLOW = 3698
+ER_REGEXP_TIME_OUT = 3699
+ER_REGEXP_PATTERN_TOO_BIG = 3700
+ER_CANT_SET_ERROR_LOG_SERVICE = 3701
+ER_EMPTY_PIPELINE_FOR_ERROR_LOG_SERVICE = 3702
+ER_COMPONENT_FILTER_DIAGNOSTICS = 3703
+ER_NOT_IMPLEMENTED_FOR_CARTESIAN_SRS = 3704
+ER_NOT_IMPLEMENTED_FOR_PROJECTED_SRS = 3705
+ER_NONPOSITIVE_RADIUS = 3706
+ER_RESTART_SERVER_FAILED = 3707
+ER_SRS_MISSING_MANDATORY_ATTRIBUTE = 3708
+ER_SRS_MULTIPLE_ATTRIBUTE_DEFINITIONS = 3709
+ER_SRS_NAME_CANT_BE_EMPTY_OR_WHITESPACE = 3710
+ER_SRS_ORGANIZATION_CANT_BE_EMPTY_OR_WHITESPACE = 3711
+ER_SRS_ID_ALREADY_EXISTS = 3712
+ER_WARN_SRS_ID_ALREADY_EXISTS = 3713
+ER_CANT_MODIFY_SRID_0 = 3714
+ER_WARN_RESERVED_SRID_RANGE = 3715
+ER_CANT_MODIFY_SRS_USED_BY_COLUMN = 3716
+ER_SRS_INVALID_CHARACTER_IN_ATTRIBUTE = 3717
+ER_SRS_ATTRIBUTE_STRING_TOO_LONG = 3718
+ER_DEPRECATED_UTF8_ALIAS = 3719
+ER_DEPRECATED_NATIONAL = 3720
+ER_INVALID_DEFAULT_UTF8MB4_COLLATION = 3721
+ER_UNABLE_TO_COLLECT_LOG_STATUS = 3722
+ER_RESERVED_TABLESPACE_NAME = 3723
+ER_UNABLE_TO_SET_OPTION = 3724
+ER_SLAVE_POSSIBLY_DIVERGED_AFTER_DDL = 3725
+ER_SRS_NOT_GEOGRAPHIC = 3726
+ER_POLYGON_TOO_LARGE = 3727
+ER_SPATIAL_UNIQUE_INDEX = 3728
+ER_INDEX_TYPE_NOT_SUPPORTED_FOR_SPATIAL_INDEX = 3729
+ER_FK_CANNOT_DROP_PARENT = 3730
+ER_GEOMETRY_PARAM_LONGITUDE_OUT_OF_RANGE = 3731
+ER_GEOMETRY_PARAM_LATITUDE_OUT_OF_RANGE = 3732
+ER_FK_CANNOT_USE_VIRTUAL_COLUMN = 3733
+ER_FK_NO_COLUMN_PARENT = 3734
+ER_CANT_SET_ERROR_SUPPRESSION_LIST = 3735
+ER_SRS_GEOGCS_INVALID_AXES = 3736
+ER_SRS_INVALID_SEMI_MAJOR_AXIS = 3737
+ER_SRS_INVALID_INVERSE_FLATTENING = 3738
+ER_SRS_INVALID_ANGULAR_UNIT = 3739
+ER_SRS_INVALID_PRIME_MERIDIAN = 3740
+ER_TRANSFORM_SOURCE_SRS_NOT_SUPPORTED = 3741
+ER_TRANSFORM_TARGET_SRS_NOT_SUPPORTED = 3742
+ER_TRANSFORM_SOURCE_SRS_MISSING_TOWGS84 = 3743
+ER_TRANSFORM_TARGET_SRS_MISSING_TOWGS84 = 3744
+ER_TEMP_TABLE_PREVENTS_SWITCH_SESSION_BINLOG_FORMAT = 3745
+ER_TEMP_TABLE_PREVENTS_SWITCH_GLOBAL_BINLOG_FORMAT = 3746
+ER_RUNNING_APPLIER_PREVENTS_SWITCH_GLOBAL_BINLOG_FORMAT = 3747
+ER_CLIENT_GTID_UNSAFE_CREATE_DROP_TEMP_TABLE_IN_TRX_IN_SBR = 3748
+OBSOLETE_ER_XA_CANT_CREATE_MDL_BACKUP = 3749
+ER_TABLE_WITHOUT_PK = 3750
+ER_WARN_DATA_TRUNCATED_FUNCTIONAL_INDEX = 3751
+ER_WARN_DATA_OUT_OF_RANGE_FUNCTIONAL_INDEX = 3752
+ER_FUNCTIONAL_INDEX_ON_JSON_OR_GEOMETRY_FUNCTION = 3753
+ER_FUNCTIONAL_INDEX_REF_AUTO_INCREMENT = 3754
+ER_CANNOT_DROP_COLUMN_FUNCTIONAL_INDEX = 3755
+ER_FUNCTIONAL_INDEX_PRIMARY_KEY = 3756
+ER_FUNCTIONAL_INDEX_ON_LOB = 3757
+ER_FUNCTIONAL_INDEX_FUNCTION_IS_NOT_ALLOWED = 3758
+ER_FULLTEXT_FUNCTIONAL_INDEX = 3759
+ER_SPATIAL_FUNCTIONAL_INDEX = 3760
+ER_WRONG_KEY_COLUMN_FUNCTIONAL_INDEX = 3761
+ER_FUNCTIONAL_INDEX_ON_FIELD = 3762
+ER_GENERATED_COLUMN_NAMED_FUNCTION_IS_NOT_ALLOWED = 3763
+ER_GENERATED_COLUMN_ROW_VALUE = 3764
+ER_GENERATED_COLUMN_VARIABLES = 3765
+ER_DEPENDENT_BY_DEFAULT_GENERATED_VALUE = 3766
+ER_DEFAULT_VAL_GENERATED_NON_PRIOR = 3767
+ER_DEFAULT_VAL_GENERATED_REF_AUTO_INC = 3768
+ER_DEFAULT_VAL_GENERATED_FUNCTION_IS_NOT_ALLOWED = 3769
+ER_DEFAULT_VAL_GENERATED_NAMED_FUNCTION_IS_NOT_ALLOWED = 3770
+ER_DEFAULT_VAL_GENERATED_ROW_VALUE = 3771
+ER_DEFAULT_VAL_GENERATED_VARIABLES = 3772
+ER_DEFAULT_AS_VAL_GENERATED = 3773
+ER_UNSUPPORTED_ACTION_ON_DEFAULT_VAL_GENERATED = 3774
+ER_GTID_UNSAFE_ALTER_ADD_COL_WITH_DEFAULT_EXPRESSION = 3775
+ER_FK_CANNOT_CHANGE_ENGINE = 3776
+ER_WARN_DEPRECATED_USER_SET_EXPR = 3777
+ER_WARN_DEPRECATED_UTF8MB3_COLLATION = 3778
+ER_WARN_DEPRECATED_NESTED_COMMENT_SYNTAX = 3779
+ER_FK_INCOMPATIBLE_COLUMNS = 3780
+ER_GR_HOLD_WAIT_TIMEOUT = 3781
+ER_GR_HOLD_KILLED = 3782
+ER_GR_HOLD_MEMBER_STATUS_ERROR = 3783
+ER_RPL_ENCRYPTION_FAILED_TO_FETCH_KEY = 3784
+ER_RPL_ENCRYPTION_KEY_NOT_FOUND = 3785
+ER_RPL_ENCRYPTION_KEYRING_INVALID_KEY = 3786
+ER_RPL_ENCRYPTION_HEADER_ERROR = 3787
+ER_RPL_ENCRYPTION_FAILED_TO_ROTATE_LOGS = 3788
+ER_RPL_ENCRYPTION_KEY_EXISTS_UNEXPECTED = 3789
+ER_RPL_ENCRYPTION_FAILED_TO_GENERATE_KEY = 3790
+ER_RPL_ENCRYPTION_FAILED_TO_STORE_KEY = 3791
+ER_RPL_ENCRYPTION_FAILED_TO_REMOVE_KEY = 3792
+ER_RPL_ENCRYPTION_UNABLE_TO_CHANGE_OPTION = 3793
+ER_RPL_ENCRYPTION_MASTER_KEY_RECOVERY_FAILED = 3794
+ER_SLOW_LOG_MODE_IGNORED_WHEN_NOT_LOGGING_TO_FILE = 3795
+ER_GRP_TRX_CONSISTENCY_NOT_ALLOWED = 3796
+ER_GRP_TRX_CONSISTENCY_BEFORE = 3797
+ER_GRP_TRX_CONSISTENCY_AFTER_ON_TRX_BEGIN = 3798
+ER_GRP_TRX_CONSISTENCY_BEGIN_NOT_ALLOWED = 3799
+ER_FUNCTIONAL_INDEX_ROW_VALUE_IS_NOT_ALLOWED = 3800
+ER_RPL_ENCRYPTION_FAILED_TO_ENCRYPT = 3801
+ER_PAGE_TRACKING_NOT_STARTED = 3802
+ER_PAGE_TRACKING_RANGE_NOT_TRACKED = 3803
+ER_PAGE_TRACKING_CANNOT_PURGE = 3804
+ER_RPL_ENCRYPTION_CANNOT_ROTATE_BINLOG_MASTER_KEY = 3805
+ER_BINLOG_MASTER_KEY_RECOVERY_OUT_OF_COMBINATION = 3806
+ER_BINLOG_MASTER_KEY_ROTATION_FAIL_TO_OPERATE_KEY = 3807
+ER_BINLOG_MASTER_KEY_ROTATION_FAIL_TO_ROTATE_LOGS = 3808
+ER_BINLOG_MASTER_KEY_ROTATION_FAIL_TO_REENCRYPT_LOG = 3809
+ER_BINLOG_MASTER_KEY_ROTATION_FAIL_TO_CLEANUP_UNUSED_KEYS = 3810
+ER_BINLOG_MASTER_KEY_ROTATION_FAIL_TO_CLEANUP_AUX_KEY = 3811
+ER_NON_BOOLEAN_EXPR_FOR_CHECK_CONSTRAINT = 3812
+ER_COLUMN_CHECK_CONSTRAINT_REFERENCES_OTHER_COLUMN = 3813
+ER_CHECK_CONSTRAINT_NAMED_FUNCTION_IS_NOT_ALLOWED = 3814
+ER_CHECK_CONSTRAINT_FUNCTION_IS_NOT_ALLOWED = 3815
+ER_CHECK_CONSTRAINT_VARIABLES = 3816
+ER_CHECK_CONSTRAINT_ROW_VALUE = 3817
+ER_CHECK_CONSTRAINT_REFERS_AUTO_INCREMENT_COLUMN = 3818
+ER_CHECK_CONSTRAINT_VIOLATED = 3819
+ER_CHECK_CONSTRAINT_REFERS_UNKNOWN_COLUMN = 3820
+ER_CHECK_CONSTRAINT_NOT_FOUND = 3821
+ER_CHECK_CONSTRAINT_DUP_NAME = 3822
+ER_CHECK_CONSTRAINT_CLAUSE_USING_FK_REFER_ACTION_COLUMN = 3823
+WARN_UNENCRYPTED_TABLE_IN_ENCRYPTED_DB = 3824
+ER_INVALID_ENCRYPTION_REQUEST = 3825
+ER_CANNOT_SET_TABLE_ENCRYPTION = 3826
+ER_CANNOT_SET_DATABASE_ENCRYPTION = 3827
+ER_CANNOT_SET_TABLESPACE_ENCRYPTION = 3828
+ER_TABLESPACE_CANNOT_BE_ENCRYPTED = 3829
+ER_TABLESPACE_CANNOT_BE_DECRYPTED = 3830
+ER_TABLESPACE_TYPE_UNKNOWN = 3831
+ER_TARGET_TABLESPACE_UNENCRYPTED = 3832
+ER_CANNOT_USE_ENCRYPTION_CLAUSE = 3833
+ER_INVALID_MULTIPLE_CLAUSES = 3834
+ER_UNSUPPORTED_USE_OF_GRANT_AS = 3835
+ER_UKNOWN_AUTH_ID_OR_ACCESS_DENIED_FOR_GRANT_AS = 3836
+ER_DEPENDENT_BY_FUNCTIONAL_INDEX = 3837
+ER_PLUGIN_NOT_EARLY = 3838
+ER_INNODB_REDO_LOG_ARCHIVE_START_SUBDIR_PATH = 3839
+ER_INNODB_REDO_LOG_ARCHIVE_START_TIMEOUT = 3840
+ER_INNODB_REDO_LOG_ARCHIVE_DIRS_INVALID = 3841
+ER_INNODB_REDO_LOG_ARCHIVE_LABEL_NOT_FOUND = 3842
+ER_INNODB_REDO_LOG_ARCHIVE_DIR_EMPTY = 3843
+ER_INNODB_REDO_LOG_ARCHIVE_NO_SUCH_DIR = 3844
+ER_INNODB_REDO_LOG_ARCHIVE_DIR_CLASH = 3845
+ER_INNODB_REDO_LOG_ARCHIVE_DIR_PERMISSIONS = 3846
+ER_INNODB_REDO_LOG_ARCHIVE_FILE_CREATE = 3847
+ER_INNODB_REDO_LOG_ARCHIVE_ACTIVE = 3848
+ER_INNODB_REDO_LOG_ARCHIVE_INACTIVE = 3849
+ER_INNODB_REDO_LOG_ARCHIVE_FAILED = 3850
+ER_INNODB_REDO_LOG_ARCHIVE_SESSION = 3851
+ER_STD_REGEX_ERROR = 3852
+ER_INVALID_JSON_TYPE = 3853
+ER_CANNOT_CONVERT_STRING = 3854
+ER_DEPENDENT_BY_PARTITION_FUNC = 3855
+ER_WARN_DEPRECATED_FLOAT_AUTO_INCREMENT = 3856
+ER_RPL_CANT_STOP_SLAVE_WHILE_LOCKED_BACKUP = 3857
+ER_WARN_DEPRECATED_FLOAT_DIGITS = 3858
+ER_WARN_DEPRECATED_FLOAT_UNSIGNED = 3859
+ER_WARN_DEPRECATED_INTEGER_DISPLAY_WIDTH = 3860
+ER_WARN_DEPRECATED_ZEROFILL = 3861
+ER_CLONE_DONOR = 3862
+ER_CLONE_PROTOCOL = 3863
+ER_CLONE_DONOR_VERSION = 3864
+ER_CLONE_OS = 3865
+ER_CLONE_PLATFORM = 3866
+ER_CLONE_CHARSET = 3867
+ER_CLONE_CONFIG = 3868
+ER_CLONE_SYS_CONFIG = 3869
+ER_CLONE_PLUGIN_MATCH = 3870
+ER_CLONE_LOOPBACK = 3871
+ER_CLONE_ENCRYPTION = 3872
+ER_CLONE_DISK_SPACE = 3873
+ER_CLONE_IN_PROGRESS = 3874
+ER_CLONE_DISALLOWED = 3875
+ER_CANNOT_GRANT_ROLES_TO_ANONYMOUS_USER = 3876
+ER_SECONDARY_ENGINE_PLUGIN = 3877
+ER_SECOND_PASSWORD_CANNOT_BE_EMPTY = 3878
+ER_DB_ACCESS_DENIED = 3879
+ER_DA_AUTH_ID_WITH_SYSTEM_USER_PRIV_IN_MANDATORY_ROLES = 3880
+ER_DA_RPL_GTID_TABLE_CANNOT_OPEN = 3881
+ER_GEOMETRY_IN_UNKNOWN_LENGTH_UNIT = 3882
+ER_DA_PLUGIN_INSTALL_ERROR = 3883
+ER_NO_SESSION_TEMP = 3884
+ER_DA_UNKNOWN_ERROR_NUMBER = 3885
+ER_COLUMN_CHANGE_SIZE = 3886
+ER_REGEXP_INVALID_CAPTURE_GROUP_NAME = 3887
+ER_DA_SSL_LIBRARY_ERROR = 3888
+ER_SECONDARY_ENGINE = 3889
+ER_SECONDARY_ENGINE_DDL = 3890
+ER_INCORRECT_CURRENT_PASSWORD = 3891
+ER_MISSING_CURRENT_PASSWORD = 3892
+ER_CURRENT_PASSWORD_NOT_REQUIRED = 3893
+ER_PASSWORD_CANNOT_BE_RETAINED_ON_PLUGIN_CHANGE = 3894
+ER_CURRENT_PASSWORD_CANNOT_BE_RETAINED = 3895
+ER_PARTIAL_REVOKES_EXIST = 3896
+ER_CANNOT_GRANT_SYSTEM_PRIV_TO_MANDATORY_ROLE = 3897
+ER_XA_REPLICATION_FILTERS = 3898
+ER_UNSUPPORTED_SQL_MODE = 3899
+ER_REGEXP_INVALID_FLAG = 3900
+ER_PARTIAL_REVOKE_AND_DB_GRANT_BOTH_EXISTS = 3901
+ER_UNIT_NOT_FOUND = 3902
+ER_INVALID_JSON_VALUE_FOR_FUNC_INDEX = 3903
+ER_JSON_VALUE_OUT_OF_RANGE_FOR_FUNC_INDEX = 3904
+ER_EXCEEDED_MV_KEYS_NUM = 3905
+ER_EXCEEDED_MV_KEYS_SPACE = 3906
+ER_FUNCTIONAL_INDEX_DATA_IS_TOO_LONG = 3907
+ER_WRONG_MVI_VALUE = 3908
+ER_WARN_FUNC_INDEX_NOT_APPLICABLE = 3909
+ER_GRP_RPL_UDF_ERROR = 3910
+ER_UPDATE_GTID_PURGED_WITH_GR = 3911
+ER_GROUPING_ON_TIMESTAMP_IN_DST = 3912
+ER_TABLE_NAME_CAUSES_TOO_LONG_PATH = 3913
+ER_AUDIT_LOG_INSUFFICIENT_PRIVILEGE = 3914
+OBSOLETE_ER_AUDIT_LOG_PASSWORD_HAS_BEEN_COPIED = 3915
+ER_DA_GRP_RPL_STARTED_AUTO_REJOIN = 3916
+ER_SYSVAR_CHANGE_DURING_QUERY = 3917
+ER_GLOBSTAT_CHANGE_DURING_QUERY = 3918
+ER_GRP_RPL_MESSAGE_SERVICE_INIT_FAILURE = 3919
+ER_CHANGE_MASTER_WRONG_COMPRESSION_ALGORITHM_CLIENT = 3920
+ER_CHANGE_MASTER_WRONG_COMPRESSION_LEVEL_CLIENT = 3921
+ER_WRONG_COMPRESSION_ALGORITHM_CLIENT = 3922
+ER_WRONG_COMPRESSION_LEVEL_CLIENT = 3923
+ER_CHANGE_MASTER_WRONG_COMPRESSION_ALGORITHM_LIST_CLIENT = 3924
+ER_CLIENT_PRIVILEGE_CHECKS_USER_CANNOT_BE_ANONYMOUS = 3925
+ER_CLIENT_PRIVILEGE_CHECKS_USER_DOES_NOT_EXIST = 3926
+ER_CLIENT_PRIVILEGE_CHECKS_USER_CORRUPT = 3927
+ER_CLIENT_PRIVILEGE_CHECKS_USER_NEEDS_RPL_APPLIER_PRIV = 3928
+ER_WARN_DA_PRIVILEGE_NOT_REGISTERED = 3929
+ER_CLIENT_KEYRING_UDF_KEY_INVALID = 3930
+ER_CLIENT_KEYRING_UDF_KEY_TYPE_INVALID = 3931
+ER_CLIENT_KEYRING_UDF_KEY_TOO_LONG = 3932
+ER_CLIENT_KEYRING_UDF_KEY_TYPE_TOO_LONG = 3933
+ER_JSON_SCHEMA_VALIDATION_ERROR_WITH_DETAILED_REPORT = 3934
+ER_DA_UDF_INVALID_CHARSET_SPECIFIED = 3935
+ER_DA_UDF_INVALID_CHARSET = 3936
+ER_DA_UDF_INVALID_COLLATION = 3937
+ER_DA_UDF_INVALID_EXTENSION_ARGUMENT_TYPE = 3938
+ER_MULTIPLE_CONSTRAINTS_WITH_SAME_NAME = 3939
+ER_CONSTRAINT_NOT_FOUND = 3940
+ER_ALTER_CONSTRAINT_ENFORCEMENT_NOT_SUPPORTED = 3941
+ER_TABLE_VALUE_CONSTRUCTOR_MUST_HAVE_COLUMNS = 3942
+ER_TABLE_VALUE_CONSTRUCTOR_CANNOT_HAVE_DEFAULT = 3943
+ER_CLIENT_QUERY_FAILURE_INVALID_NON_ROW_FORMAT = 3944
+ER_REQUIRE_ROW_FORMAT_INVALID_VALUE = 3945
+ER_FAILED_TO_DETERMINE_IF_ROLE_IS_MANDATORY = 3946
+ER_FAILED_TO_FETCH_MANDATORY_ROLE_LIST = 3947
+ER_CLIENT_LOCAL_FILES_DISABLED = 3948
+ER_IMP_INCOMPATIBLE_CFG_VERSION = 3949
+ER_DA_OOM = 3950
+ER_DA_UDF_INVALID_ARGUMENT_TO_SET_CHARSET = 3951
+ER_DA_UDF_INVALID_RETURN_TYPE_TO_SET_CHARSET = 3952
+ER_MULTIPLE_INTO_CLAUSES = 3953
+ER_MISPLACED_INTO = 3954
+ER_USER_ACCESS_DENIED_FOR_USER_ACCOUNT_BLOCKED_BY_PASSWORD_LOCK = 3955
+ER_WARN_DEPRECATED_YEAR_UNSIGNED = 3956
+ER_CLONE_NETWORK_PACKET = 3957
+ER_SDI_OPERATION_FAILED_MISSING_RECORD = 3958
+ER_DEPENDENT_BY_CHECK_CONSTRAINT = 3959
+ER_GRP_OPERATION_NOT_ALLOWED_GR_MUST_STOP = 3960
+ER_WARN_DEPRECATED_JSON_TABLE_ON_ERROR_ON_EMPTY = 3961
+ER_WARN_DEPRECATED_INNER_INTO = 3962
+ER_WARN_DEPRECATED_VALUES_FUNCTION_ALWAYS_NULL = 3963
+ER_WARN_DEPRECATED_SQL_CALC_FOUND_ROWS = 3964
+ER_WARN_DEPRECATED_FOUND_ROWS = 3965
+ER_MISSING_JSON_VALUE = 3966
+ER_MULTIPLE_JSON_VALUES = 3967
+ER_HOSTNAME_TOO_LONG = 3968
+ER_WARN_CLIENT_DEPRECATED_PARTITION_PREFIX_KEY = 3969
+ER_GROUP_REPLICATION_USER_EMPTY_MSG = 3970
+ER_GROUP_REPLICATION_USER_MANDATORY_MSG = 3971
+ER_GROUP_REPLICATION_PASSWORD_LENGTH = 3972
+ER_SUBQUERY_TRANSFORM_REJECTED = 3973
+ER_DA_GRP_RPL_RECOVERY_ENDPOINT_FORMAT = 3974
+ER_DA_GRP_RPL_RECOVERY_ENDPOINT_INVALID = 3975
+ER_WRONG_VALUE_FOR_VAR_PLUS_ACTIONABLE_PART = 3976
+ER_STATEMENT_NOT_ALLOWED_AFTER_START_TRANSACTION = 3977
+ER_FOREIGN_KEY_WITH_ATOMIC_CREATE_SELECT = 3978
+ER_NOT_ALLOWED_WITH_START_TRANSACTION = 3979
+ER_INVALID_JSON_ATTRIBUTE = 3980
+ER_ENGINE_ATTRIBUTE_NOT_SUPPORTED = 3981
+ER_INVALID_USER_ATTRIBUTE_JSON = 3982
+ER_INNODB_REDO_DISABLED = 3983
+ER_INNODB_REDO_ARCHIVING_ENABLED = 3984
+ER_MDL_OUT_OF_RESOURCES = 3985
+ER_IMPLICIT_COMPARISON_FOR_JSON = 3986
+ER_FUNCTION_DOES_NOT_SUPPORT_CHARACTER_SET = 3987
+ER_IMPOSSIBLE_STRING_CONVERSION = 3988
+ER_SCHEMA_READ_ONLY = 3989
+ER_RPL_ASYNC_RECONNECT_GTID_MODE_OFF = 3990
+ER_RPL_ASYNC_RECONNECT_AUTO_POSITION_OFF = 3991
+ER_DISABLE_GTID_MODE_REQUIRES_ASYNC_RECONNECT_OFF = 3992
+ER_DISABLE_AUTO_POSITION_REQUIRES_ASYNC_RECONNECT_OFF = 3993
+ER_INVALID_PARAMETER_USE = 3994
+ER_CHARACTER_SET_MISMATCH = 3995
+ER_WARN_VAR_VALUE_CHANGE_NOT_SUPPORTED = 3996
+ER_INVALID_TIME_ZONE_INTERVAL = 3997
+ER_INVALID_CAST = 3998
+ER_HYPERGRAPH_NOT_SUPPORTED_YET = 3999
+ER_WARN_HYPERGRAPH_EXPERIMENTAL = 4000
+ER_DA_NO_ERROR_LOG_PARSER_CONFIGURED = 4001
+ER_DA_ERROR_LOG_TABLE_DISABLED = 4002
+ER_DA_ERROR_LOG_MULTIPLE_FILTERS = 4003
+ER_DA_CANT_OPEN_ERROR_LOG = 4004
+ER_USER_REFERENCED_AS_DEFINER = 4005
+ER_CANNOT_USER_REFERENCED_AS_DEFINER = 4006
+ER_REGEX_NUMBER_TOO_BIG = 4007
+ER_SPVAR_NONINTEGER_TYPE = 4008
+WARN_UNSUPPORTED_ACL_TABLES_READ = 4009
+ER_BINLOG_UNSAFE_ACL_TABLE_READ_IN_DML_DDL = 4010
+ER_STOP_REPLICA_MONITOR_IO_THREAD_TIMEOUT = 4011
+ER_STARTING_REPLICA_MONITOR_IO_THREAD = 4012
+ER_CANT_USE_ANONYMOUS_TO_GTID_WITH_GTID_MODE_NOT_ON = 4013
+ER_CANT_COMBINE_ANONYMOUS_TO_GTID_AND_AUTOPOSITION = 4014
+ER_ASSIGN_GTIDS_TO_ANONYMOUS_TRANSACTIONS_REQUIRES_GTID_MODE_ON = 4015
+ER_SQL_REPLICA_SKIP_COUNTER_USED_WITH_GTID_MODE_ON = 4016
+ER_USING_ASSIGN_GTIDS_TO_ANONYMOUS_TRANSACTIONS_AS_LOCAL_OR_UUID = 4017
+ER_CANT_SET_ANONYMOUS_TO_GTID_AND_WAIT_UNTIL_SQL_THD_AFTER_GTIDS = 4018
+ER_CANT_SET_SQL_AFTER_OR_BEFORE_GTIDS_WITH_ANONYMOUS_TO_GTID = 4019
+ER_ANONYMOUS_TO_GTID_UUID_SAME_AS_GROUP_NAME = 4020
+ER_CANT_USE_SAME_UUID_AS_GROUP_NAME = 4021
+ER_GRP_RPL_RECOVERY_CHANNEL_STILL_RUNNING = 4022
+ER_INNODB_INVALID_AUTOEXTEND_SIZE_VALUE = 4023
+ER_INNODB_INCOMPATIBLE_WITH_TABLESPACE = 4024
+ER_INNODB_AUTOEXTEND_SIZE_OUT_OF_RANGE = 4025
+ER_CANNOT_USE_AUTOEXTEND_SIZE_CLAUSE = 4026
+ER_ROLE_GRANTED_TO_ITSELF = 4027
+ER_TABLE_MUST_HAVE_A_VISIBLE_COLUMN = 4028
+ER_INNODB_COMPRESSION_FAILURE = 4029
+ER_WARN_ASYNC_CONN_FAILOVER_NETWORK_NAMESPACE = 4030
+ER_CLIENT_INTERACTION_TIMEOUT = 4031
+ER_INVALID_CAST_TO_GEOMETRY = 4032
+ER_INVALID_CAST_POLYGON_RING_DIRECTION = 4033
+ER_GIS_DIFFERENT_SRIDS_AGGREGATION = 4034
+ER_RELOAD_KEYRING_FAILURE = 4035
+ER_SDI_GET_KEYS_INVALID_TABLESPACE = 4036
+ER_CHANGE_RPL_SRC_WRONG_COMPRESSION_ALGORITHM_SIZE = 4037
+ER_WARN_DEPRECATED_TLS_VERSION_FOR_CHANNEL_CLI = 4038
+ER_CANT_USE_SAME_UUID_AS_VIEW_CHANGE_UUID = 4039
+ER_ANONYMOUS_TO_GTID_UUID_SAME_AS_VIEW_CHANGE_UUID = 4040
+ER_GRP_RPL_VIEW_CHANGE_UUID_FAIL_GET_VARIABLE = 4041
+ER_WARN_ADUIT_LOG_MAX_SIZE_AND_PRUNE_SECONDS = 4042
+ER_WARN_ADUIT_LOG_MAX_SIZE_CLOSE_TO_ROTATE_ON_SIZE = 4043
+ER_KERBEROS_CREATE_USER = 4044
+ER_INSTALL_PLUGIN_CONFLICT_CLIENT = 4045
+ER_DA_ERROR_LOG_COMPONENT_FLUSH_FAILED = 4046
+ER_WARN_SQL_AFTER_MTS_GAPS_GAP_NOT_CALCULATED = 4047
+ER_INVALID_ASSIGNMENT_TARGET = 4048
+ER_OPERATION_NOT_ALLOWED_ON_GR_SECONDARY = 4049
+ER_GRP_RPL_FAILOVER_CHANNEL_STATUS_PROPAGATION = 4050
+ER_WARN_AUDIT_LOG_FORMAT_UNIX_TIMESTAMP_ONLY_WHEN_JSON = 4051
+ER_INVALID_MFA_PLUGIN_SPECIFIED = 4052
+ER_IDENTIFIED_BY_UNSUPPORTED = 4053
+ER_INVALID_PLUGIN_FOR_REGISTRATION = 4054
+ER_PLUGIN_REQUIRES_REGISTRATION = 4055
+ER_MFA_METHOD_EXISTS = 4056
+ER_MFA_METHOD_NOT_EXISTS = 4057
+ER_AUTHENTICATION_POLICY_MISMATCH = 4058
+ER_PLUGIN_REGISTRATION_DONE = 4059
+ER_INVALID_USER_FOR_REGISTRATION = 4060
+ER_USER_REGISTRATION_FAILED = 4061
+ER_MFA_METHODS_INVALID_ORDER = 4062
+ER_MFA_METHODS_IDENTICAL = 4063
+ER_INVALID_MFA_OPERATIONS_FOR_PASSWORDLESS_USER = 4064
+ER_CHANGE_REPLICATION_SOURCE_NO_OPTIONS_FOR_GTID_ONLY = 4065
+ER_CHANGE_REP_SOURCE_CANT_DISABLE_REQ_ROW_FORMAT_WITH_GTID_ONLY = 4066
+ER_CHANGE_REP_SOURCE_CANT_DISABLE_AUTO_POSITION_WITH_GTID_ONLY = 4067
+ER_CHANGE_REP_SOURCE_CANT_DISABLE_GTID_ONLY_WITHOUT_POSITIONS = 4068
+ER_CHANGE_REP_SOURCE_CANT_DISABLE_AUTO_POS_WITHOUT_POSITIONS = 4069
+ER_CHANGE_REP_SOURCE_GR_CHANNEL_WITH_GTID_MODE_NOT_ON = 4070
+ER_CANT_USE_GTID_ONLY_WITH_GTID_MODE_NOT_ON = 4071
+ER_WARN_C_DISABLE_GTID_ONLY_WITH_SOURCE_AUTO_POS_INVALID_POS = 4072
+ER_DA_SSL_FIPS_MODE_ERROR = 4073
+CR_UNKNOWN_ERROR = 2000
+CR_SOCKET_CREATE_ERROR = 2001
+CR_CONNECTION_ERROR = 2002
+CR_CONN_HOST_ERROR = 2003
+CR_IPSOCK_ERROR = 2004
+CR_UNKNOWN_HOST = 2005
+CR_SERVER_GONE_ERROR = 2006
+CR_VERSION_ERROR = 2007
+CR_OUT_OF_MEMORY = 2008
+CR_WRONG_HOST_INFO = 2009
+CR_LOCALHOST_CONNECTION = 2010
+CR_TCP_CONNECTION = 2011
+CR_SERVER_HANDSHAKE_ERR = 2012
+CR_SERVER_LOST = 2013
+CR_COMMANDS_OUT_OF_SYNC = 2014
+CR_NAMEDPIPE_CONNECTION = 2015
+CR_NAMEDPIPEWAIT_ERROR = 2016
+CR_NAMEDPIPEOPEN_ERROR = 2017
+CR_NAMEDPIPESETSTATE_ERROR = 2018
+CR_CANT_READ_CHARSET = 2019
+CR_NET_PACKET_TOO_LARGE = 2020
+CR_EMBEDDED_CONNECTION = 2021
+CR_PROBE_SLAVE_STATUS = 2022
+CR_PROBE_SLAVE_HOSTS = 2023
+CR_PROBE_SLAVE_CONNECT = 2024
+CR_PROBE_MASTER_CONNECT = 2025
+CR_SSL_CONNECTION_ERROR = 2026
+CR_MALFORMED_PACKET = 2027
+CR_WRONG_LICENSE = 2028
+CR_NULL_POINTER = 2029
+CR_NO_PREPARE_STMT = 2030
+CR_PARAMS_NOT_BOUND = 2031
+CR_DATA_TRUNCATED = 2032
+CR_NO_PARAMETERS_EXISTS = 2033
+CR_INVALID_PARAMETER_NO = 2034
+CR_INVALID_BUFFER_USE = 2035
+CR_UNSUPPORTED_PARAM_TYPE = 2036
+CR_SHARED_MEMORY_CONNECTION = 2037
+CR_SHARED_MEMORY_CONNECT_REQUEST_ERROR = 2038
+CR_SHARED_MEMORY_CONNECT_ANSWER_ERROR = 2039
+CR_SHARED_MEMORY_CONNECT_FILE_MAP_ERROR = 2040
+CR_SHARED_MEMORY_CONNECT_MAP_ERROR = 2041
+CR_SHARED_MEMORY_FILE_MAP_ERROR = 2042
+CR_SHARED_MEMORY_MAP_ERROR = 2043
+CR_SHARED_MEMORY_EVENT_ERROR = 2044
+CR_SHARED_MEMORY_CONNECT_ABANDONED_ERROR = 2045
+CR_SHARED_MEMORY_CONNECT_SET_ERROR = 2046
+CR_CONN_UNKNOW_PROTOCOL = 2047
+CR_INVALID_CONN_HANDLE = 2048
+CR_UNUSED_1 = 2049
+CR_FETCH_CANCELED = 2050
+CR_NO_DATA = 2051
+CR_NO_STMT_METADATA = 2052
+CR_NO_RESULT_SET = 2053
+CR_NOT_IMPLEMENTED = 2054
+CR_SERVER_LOST_EXTENDED = 2055
+CR_STMT_CLOSED = 2056
+CR_NEW_STMT_METADATA = 2057
+CR_ALREADY_CONNECTED = 2058
+CR_AUTH_PLUGIN_CANNOT_LOAD = 2059
+CR_DUPLICATE_CONNECTION_ATTR = 2060
+CR_AUTH_PLUGIN_ERR = 2061
+CR_INSECURE_API_ERR = 2062
+CR_FILE_NAME_TOO_LONG = 2063
+CR_SSL_FIPS_MODE_ERR = 2064
+CR_DEPRECATED_COMPRESSION_NOT_SUPPORTED = 2065
+CR_COMPRESSION_WRONGLY_CONFIGURED = 2066
+CR_KERBEROS_USER_NOT_FOUND = 2067
+CR_LOAD_DATA_LOCAL_INFILE_REJECTED = 2068
+CR_LOAD_DATA_LOCAL_INFILE_REALPATH_FAIL = 2069
+CR_DNS_SRV_LOOKUP_FAILED = 2070
+CR_MANDATORY_TRACKER_NOT_FOUND = 2071
+CR_INVALID_FACTOR_NO = 2072
+# End MySQL Errors
+
+# Start X Plugin Errors
+ER_X_BAD_MESSAGE = 5000
+ER_X_CAPABILITIES_PREPARE_FAILED = 5001
+ER_X_CAPABILITY_NOT_FOUND = 5002
+ER_X_INVALID_PROTOCOL_DATA = 5003
+ER_X_BAD_CONNECTION_SESSION_ATTRIBUTE_VALUE_LENGTH = 5004
+ER_X_BAD_CONNECTION_SESSION_ATTRIBUTE_KEY_LENGTH = 5005
+ER_X_BAD_CONNECTION_SESSION_ATTRIBUTE_EMPTY_KEY = 5006
+ER_X_BAD_CONNECTION_SESSION_ATTRIBUTE_LENGTH = 5007
+ER_X_BAD_CONNECTION_SESSION_ATTRIBUTE_TYPE = 5008
+ER_X_CAPABILITY_SET_NOT_ALLOWED = 5009
+ER_X_SERVICE_ERROR = 5010
+ER_X_SESSION = 5011
+ER_X_INVALID_ARGUMENT = 5012
+ER_X_MISSING_ARGUMENT = 5013
+ER_X_BAD_INSERT_DATA = 5014
+ER_X_CMD_NUM_ARGUMENTS = 5015
+ER_X_CMD_ARGUMENT_TYPE = 5016
+ER_X_CMD_ARGUMENT_VALUE = 5017
+ER_X_BAD_UPSERT_DATA = 5018
+ER_X_DUPLICATED_CAPABILITIES = 5019
+ER_X_CMD_ARGUMENT_OBJECT_EMPTY = 5020
+ER_X_CMD_INVALID_ARGUMENT = 5021
+ER_X_BAD_UPDATE_DATA = 5050
+ER_X_BAD_TYPE_OF_UPDATE = 5051
+ER_X_BAD_COLUMN_TO_UPDATE = 5052
+ER_X_BAD_MEMBER_TO_UPDATE = 5053
+ER_X_BAD_STATEMENT_ID = 5110
+ER_X_BAD_CURSOR_ID = 5111
+ER_X_BAD_SCHEMA = 5112
+ER_X_BAD_TABLE = 5113
+ER_X_BAD_PROJECTION = 5114
+ER_X_DOC_ID_MISSING = 5115
+ER_X_DUPLICATE_ENTRY = 5116
+ER_X_DOC_REQUIRED_FIELD_MISSING = 5117
+ER_X_PROJ_BAD_KEY_NAME = 5120
+ER_X_BAD_DOC_PATH = 5121
+ER_X_CURSOR_EXISTS = 5122
+ER_X_CURSOR_REACHED_EOF = 5123
+ER_X_PREPARED_STATMENT_CAN_HAVE_ONE_CURSOR = 5131
+ER_X_PREPARED_EXECUTE_ARGUMENT_NOT_SUPPORTED = 5133
+ER_X_PREPARED_EXECUTE_ARGUMENT_CONSISTENCY = 5134
+ER_X_EXPR_BAD_OPERATOR = 5150
+ER_X_EXPR_BAD_NUM_ARGS = 5151
+ER_X_EXPR_MISSING_ARG = 5152
+ER_X_EXPR_BAD_TYPE_VALUE = 5153
+ER_X_EXPR_BAD_VALUE = 5154
+ER_X_INVALID_COLLECTION = 5156
+ER_X_INVALID_ADMIN_COMMAND = 5157
+ER_X_EXPECT_NOT_OPEN = 5158
+ER_X_EXPECT_NO_ERROR_FAILED = 5159
+ER_X_EXPECT_BAD_CONDITION = 5160
+ER_X_EXPECT_BAD_CONDITION_VALUE = 5161
+ER_X_INVALID_NAMESPACE = 5162
+ER_X_BAD_NOTICE = 5163
+ER_X_CANNOT_DISABLE_NOTICE = 5164
+ER_X_BAD_CONFIGURATION = 5165
+ER_X_MYSQLX_ACCOUNT_MISSING_PERMISSIONS = 5167
+ER_X_EXPECT_FIELD_EXISTS_FAILED = 5168
+ER_X_BAD_LOCKING = 5169
+ER_X_FRAME_COMPRESSION_DISABLED = 5170
+ER_X_DECOMPRESSION_FAILED = 5171
+ER_X_BAD_COMPRESSED_FRAME = 5174
+ER_X_CAPABILITY_COMPRESSION_INVALID_ALGORITHM = 5175
+ER_X_CAPABILITY_COMPRESSION_INVALID_SERVER_STYLE = 5176
+ER_X_CAPABILITY_COMPRESSION_INVALID_CLIENT_STYLE = 5177
+ER_X_CAPABILITY_COMPRESSION_INVALID_OPTION = 5178
+ER_X_CAPABILITY_COMPRESSION_MISSING_REQUIRED_FIELDS = 5179
+ER_X_DOCUMENT_DOESNT_MATCH_EXPECTED_SCHEMA = 5180
+ER_X_COLLECTION_OPTION_DOESNT_EXISTS = 5181
+ER_X_INVALID_VALIDATION_SCHEMA = 5182
+# End X Plugin Errors
\ No newline at end of file
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/errors.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/errors.py
new file mode 100644
index 0000000000000000000000000000000000000000..1c4d8431f61870ad40872f1048f6bc6f968fec03
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/errors.py
@@ -0,0 +1,269 @@
+# Copyright (c) 2016, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Implementation of the Python Database API Specification v2.0 exceptions."""
+
+from struct import unpack as struct_unpack
+
+from .locales import get_client_error
+
+
+class Error(Exception):
+ """Exception that is base class for all other error exceptions."""
+ def __init__(self, msg=None, errno=None, values=None, sqlstate=None):
+ super(Error, self).__init__()
+ self.msg = msg
+ self._full_msg = self.msg
+ self.errno = errno or -1
+ self.sqlstate = sqlstate
+
+ if not self.msg and (2000 <= self.errno < 3000):
+ self.msg = get_client_error(self.errno)
+ if values is not None:
+ try:
+ self.msg = self.msg % values
+ except TypeError as err:
+ self.msg = "{0} (Warning: {1})".format(self.msg, str(err))
+ elif not self.msg:
+ self._full_msg = self.msg = "Unknown error"
+
+ if self.msg and self.errno != -1:
+ fields = {
+ "errno": self.errno,
+ "msg": self.msg
+ }
+ if self.sqlstate:
+ fmt = "{errno} ({state}): {msg}"
+ fields["state"] = self.sqlstate
+ else:
+ fmt = "{errno}: {msg}"
+ self._full_msg = fmt.format(**fields)
+
+ self.args = (self.errno, self._full_msg, self.sqlstate)
+
+ def __str__(self):
+ return self._full_msg
+
+
+class InterfaceError(Error):
+ """Exception for errors related to the interface."""
+ pass
+
+
+class DatabaseError(Error):
+ """Exception for errors related to the database."""
+ pass
+
+
+class InternalError(DatabaseError):
+ """Exception for errors internal database errors."""
+ pass
+
+
+class OperationalError(DatabaseError):
+ """Exception for errors related to the database's operation."""
+ pass
+
+
+class ProgrammingError(DatabaseError):
+ """Exception for errors programming errors."""
+ pass
+
+
+class IntegrityError(DatabaseError):
+ """Exception for errors regarding relational integrity."""
+ pass
+
+
+class DataError(DatabaseError):
+ """Exception for errors reporting problems with processed data."""
+ pass
+
+
+class NotSupportedError(DatabaseError):
+ """Exception for errors when an unsupported database feature was used."""
+ pass
+
+
+class PoolError(Error):
+ """Exception for errors relating to connection pooling."""
+ pass
+
+# pylint: disable=W0622
+class TimeoutError(Error):
+ """Exception for errors relating to connection timeout."""
+ pass
+
+
+def intread(buf):
+ """Unpacks the given buffer to an integer."""
+ try:
+ if isinstance(buf, int):
+ return buf
+ length = len(buf)
+ if length == 1:
+ return buf[0]
+ elif length <= 4:
+ tmp = buf + b"\x00" * (4 - length)
+ return struct_unpack("<I", tmp)[0]
+ tmp = buf + b"\x00" * (8 - length)
+ return struct_unpack("<Q", tmp)[0]
+ except:
+ raise
+
+
+def read_int(buf, size):
+ """Read an integer from buffer.
+
+ Returns a tuple (truncated buffer, int).
+ """
+
+ try:
+ res = intread(buf[0:size])
+ except:
+ raise
+
+ return (buf[size:], res)
+
+
+def read_bytes(buf, size):
+ """Reads bytes from a buffer.
+
+ Returns a tuple with buffer less the read bytes, and the bytes.
+ """
+ res = buf[0:size]
+ return (buf[size:], res)
+
+
+def get_mysql_exception(errno, msg=None, sqlstate=None):
+ """Get the exception matching the MySQL error.
+
+ This function will return an exception based on the SQLState. The given
+ message will be passed on in the returned exception.
+
+ Returns an Exception.
+ """
+ try:
+ return _ERROR_EXCEPTIONS[errno](msg=msg, errno=errno,
+ sqlstate=sqlstate)
+ except KeyError:
+ # Error was not mapped to particular exception
+ pass
+
+ if not sqlstate:
+ return DatabaseError(msg=msg, errno=errno)
+
+ try:
+ return _SQLSTATE_CLASS_EXCEPTION[sqlstate[0:2]](msg=msg, errno=errno,
+ sqlstate=sqlstate)
+ except KeyError:
+ # Return default InterfaceError
+ return DatabaseError(msg=msg, errno=errno, sqlstate=sqlstate)
+
+
+def get_exception(packet):
+ """Returns an exception object based on the MySQL error.
+
+ Returns an exception object based on the MySQL error in the given
+ packet.
+
+ Returns an Error-Object.
+ """
+ errno = errmsg = None
+
+ try:
+ if packet[4] != 255:
+ raise ValueError("Packet is not an error packet")
+ except IndexError as err:
+ return InterfaceError("Failed getting Error information ({0})"
+ "".format(err))
+
+ sqlstate = None
+ try:
+ packet = packet[5:]
+ packet, errno = read_int(packet, 2)
+ if packet[0] != 35:
+ # Error without SQLState
+ if isinstance(packet, (bytes, bytearray)):
+ errmsg = packet.decode("utf8")
+ else:
+ errmsg = packet
+ else:
+ packet, sqlstate = read_bytes(packet[1:], 5)
+ sqlstate = sqlstate.decode("utf8")
+ errmsg = packet.decode("utf8")
+ except Exception as err: # pylint: disable=W0703
+ return InterfaceError("Failed getting Error information ({0})"
+ "".format(err))
+ else:
+ return get_mysql_exception(errno, errmsg, sqlstate)
+
+
+_SQLSTATE_CLASS_EXCEPTION = {
+ "02": DataError, # no data
+ "07": DatabaseError, # dynamic SQL error
+ "08": OperationalError, # connection exception
+ "0A": NotSupportedError, # feature not supported
+ "21": DataError, # cardinality violation
+ "22": DataError, # data exception
+ "23": IntegrityError, # integrity constraint violation
+ "24": ProgrammingError, # invalid cursor state
+ "25": ProgrammingError, # invalid transaction state
+ "26": ProgrammingError, # invalid SQL statement name
+ "27": ProgrammingError, # triggered data change violation
+ "28": ProgrammingError, # invalid authorization specification
+ "2A": ProgrammingError, # direct SQL syntax error or access rule violation
+ "2B": DatabaseError, # dependent privilege descriptors still exist
+ "2C": ProgrammingError, # invalid character set name
+ "2D": DatabaseError, # invalid transaction termination
+ "2E": DatabaseError, # invalid connection name
+ "33": DatabaseError, # invalid SQL descriptor name
+ "34": ProgrammingError, # invalid cursor name
+ "35": ProgrammingError, # invalid condition number
+ "37": ProgrammingError, # dynamic SQL syntax error or access rule violation
+ "3C": ProgrammingError, # ambiguous cursor name
+ "3D": ProgrammingError, # invalid catalog name
+ "3F": ProgrammingError, # invalid schema name
+ "40": InternalError, # transaction rollback
+ "42": ProgrammingError, # syntax error or access rule violation
+ "44": InternalError, # with check option violation
+ "HZ": OperationalError, # remote database access
+ "XA": IntegrityError,
+ "0K": OperationalError,
+ "HY": DatabaseError, # default when no SQLState provided by MySQL server
+}
+
+_ERROR_EXCEPTIONS = {
+ 1243: ProgrammingError,
+ 1210: ProgrammingError,
+ 2002: InterfaceError,
+ 2013: OperationalError,
+ 2049: NotSupportedError,
+ 2055: OperationalError,
+ 2061: InterfaceError,
+}
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/expr.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/expr.py
new file mode 100644
index 0000000000000000000000000000000000000000..4b54a87b89ab34604b9b9edc1176e8f67239db8d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/expr.py
@@ -0,0 +1,1278 @@
+# Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Expression Parser."""
+
+from .helpers import BYTE_TYPES, get_item_or_attr
+from .dbdoc import DbDoc
+from .protobuf import Message, mysqlxpb_enum
+
+
+# pylint: disable=C0103,C0111
+class TokenType(object):
+ NOT = 1
+ AND = 2
+ OR = 3
+ XOR = 4
+ IS = 5
+ LPAREN = 6
+ RPAREN = 7
+ LSQBRACKET = 8
+ RSQBRACKET = 9
+ BETWEEN = 10
+ TRUE = 11
+ NULL = 12
+ FALSE = 13
+ IN = 14
+ LIKE = 15
+ INTERVAL = 16
+ REGEXP = 17
+ ESCAPE = 18
+ IDENT = 19
+ LSTRING = 20
+ LNUM = 21
+ DOT = 22
+ DOLLAR = 23
+ COMMA = 24
+ EQ = 25
+ NE = 26
+ GT = 27
+ GE = 28
+ LT = 29
+ LE = 30
+ BITAND = 31
+ BITOR = 32
+ BITXOR = 33
+ LSHIFT = 34
+ RSHIFT = 35
+ PLUS = 36
+ MINUS = 37
+ MUL = 38
+ DIV = 39
+ HEX = 40
+ BIN = 41
+ NEG = 42
+ BANG = 43
+ MICROSECOND = 44
+ SECOND = 45
+ MINUTE = 46
+ HOUR = 47
+ DAY = 48
+ WEEK = 49
+ MONTH = 50
+ QUARTER = 51
+ YEAR = 52
+ EROTEME = 53
+ DOUBLESTAR = 54
+ MOD = 55
+ COLON = 56
+ OROR = 57
+ ANDAND = 58
+ LCURLY = 59
+ RCURLY = 60
+ CAST = 61
+ DOTSTAR = 62
+ ORDERBY_ASC = 63
+ ORDERBY_DESC = 64
+ AS = 65
+ ARROW = 66
+ QUOTE = 67
+ BINARY = 68
+ DATETIME = 69
+ TIME = 70
+ CHAR = 71
+ DATE = 72
+ DECIMAL = 73
+ SIGNED = 74
+ INTEGER = 75
+ UNSIGNED = 76
+ JSON = 77
+ SECOND_MICROSECOND = 78
+ MINUTE_MICROSECOND = 79
+ MINUTE_SECOND = 80
+ HOUR_MICROSECOND = 81
+ HOUR_SECOND = 82
+ HOUR_MINUTE = 83
+ DAY_MICROSECOND = 84
+ DAY_SECOND = 85
+ DAY_MINUTE = 86
+ DAY_HOUR = 87
+ YEAR_MONTH = 88
+ OVERLAPS = 89
+# pylint: enable=C0103
+
+_INTERVAL_UNITS = set([
+ TokenType.MICROSECOND,
+ TokenType.SECOND,
+ TokenType.MINUTE,
+ TokenType.HOUR,
+ TokenType.DAY,
+ TokenType.WEEK,
+ TokenType.MONTH,
+ TokenType.QUARTER,
+ TokenType.YEAR,
+ TokenType.SECOND_MICROSECOND,
+ TokenType.MINUTE_MICROSECOND,
+ TokenType.MINUTE_SECOND,
+ TokenType.HOUR_MICROSECOND,
+ TokenType.HOUR_SECOND,
+ TokenType.HOUR_MINUTE,
+ TokenType.DAY_MICROSECOND,
+ TokenType.DAY_SECOND,
+ TokenType.DAY_MINUTE,
+ TokenType.DAY_HOUR,
+ TokenType.YEAR_MONTH])
+
+# map of reserved word to token type
+_RESERVED_WORDS = {
+ "and": TokenType.AND,
+ "or": TokenType.OR,
+ "xor": TokenType.XOR,
+ "is": TokenType.IS,
+ "not": TokenType.NOT,
+ "like": TokenType.LIKE,
+ "in": TokenType.IN,
+ "overlaps": TokenType.OVERLAPS,
+ "regexp": TokenType.REGEXP,
+ "between": TokenType.BETWEEN,
+ "interval": TokenType.INTERVAL,
+ "escape": TokenType.ESCAPE,
+ "cast": TokenType.CAST,
+ "div": TokenType.DIV,
+ "hex": TokenType.HEX,
+ "bin": TokenType.BIN,
+ "true": TokenType.TRUE,
+ "false": TokenType.FALSE,
+ "null": TokenType.NULL,
+ "second": TokenType.SECOND,
+ "minute": TokenType.MINUTE,
+ "hour": TokenType.HOUR,
+ "day": TokenType.DAY,
+ "week": TokenType.WEEK,
+ "month": TokenType.MONTH,
+ "quarter": TokenType.QUARTER,
+ "year": TokenType.YEAR,
+ "microsecond": TokenType.MICROSECOND,
+ "asc": TokenType.ORDERBY_ASC,
+ "desc": TokenType.ORDERBY_DESC,
+ "as": TokenType.AS,
+ "binary": TokenType.BINARY,
+ "datetime": TokenType.DATETIME,
+ "time": TokenType.TIME,
+ "char": TokenType.CHAR,
+ "date": TokenType.DATE,
+ "decimal": TokenType.DECIMAL,
+ "signed": TokenType.SIGNED,
+ "unsigned": TokenType.UNSIGNED,
+ "integer": TokenType.INTEGER,
+ "json": TokenType.JSON,
+ "second_microsecond": TokenType.SECOND_MICROSECOND,
+ "minute_microsecond": TokenType.MINUTE_MICROSECOND,
+ "minute_second": TokenType.MINUTE_SECOND,
+ "hour_microsecond": TokenType.HOUR_MICROSECOND,
+ "hour_second": TokenType.HOUR_SECOND,
+ "hour_minute": TokenType.HOUR_MINUTE,
+ "day_microsecond": TokenType.DAY_MICROSECOND,
+ "day_second": TokenType.DAY_SECOND,
+ "day_minute": TokenType.DAY_MINUTE,
+ "day_hour": TokenType.DAY_HOUR,
+ "year_month": TokenType.YEAR_MONTH
+}
+
+_SQL_FUNTION_RESERVED_WORDS_COLLISION = {
+ "binary": TokenType.BINARY,
+ "cast": TokenType.CAST,
+ "char": TokenType.CHAR,
+ "date": TokenType.DATE,
+ "decimal": TokenType.DECIMAL,
+ "signed": TokenType.SIGNED,
+ "time": TokenType.TIME,
+ "unsigned": TokenType.UNSIGNED,
+}
+
+_OPERATORS = {
+ "=": "==",
+ "==": "==",
+ "and": "&&",
+ "div": "div",
+ "||": "||",
+ "or": "||",
+ "not": "not",
+ "xor": "xor",
+ "^": "^",
+ "is": "is",
+ "between": "between",
+ "in": "in",
+ "like": "like",
+ "!=": "!=",
+ "<>": "!=",
+ ">": ">",
+ ">=": ">=",
+ "<": "<",
+ "<=": "<=",
+ "&": "&",
+ "&&": "&&",
+ "|": "|",
+ "<<": "<<",
+ ">>": ">>",
+ "+": "+",
+ "-": "-",
+ "*": "*",
+ "/": "/",
+ "~": "~",
+ "%": "%",
+ "cast": "cast",
+ "cont_in": "cont_in",
+ "overlaps": "overlaps"
+}
+
+_UNARY_OPERATORS = {
+ "+": "sign_plus",
+ "-": "sign_minus",
+ "~": "~",
+ "not": "not",
+ "!": "!"
+}
+
+_NEGATION = {
+ "is": "is_not",
+ "between": "not_between",
+ "regexp": "not_regexp",
+ "like": "not_like",
+ "in": "not_in",
+ "cont_in": "not_cont_in",
+ "overlaps": "not_overlaps",
+}
+
+
+class Token(object):
+ def __init__(self, token_type, value, length=1):
+ self.token_type = token_type
+ self.value = value
+ self.length = length
+
+ def __repr__(self):
+ return self.__str__()
+
+ def __str__(self):
+ if self.token_type == TokenType.IDENT or \
+ self.token_type == TokenType.LNUM or \
+ self.token_type == TokenType.LSTRING:
+ return "{0}({1})".format(self.token_type, self.value)
+ return "{0}".format(self.token_type)
+
+
+# static protobuf helper functions
+
+def build_expr(value):
+ msg = Message("Mysqlx.Expr.Expr")
+ if isinstance(value, (Message)):
+ return value
+ elif isinstance(value, (ExprParser)):
+ return value.expr(reparse=True)
+ elif isinstance(value, (dict, DbDoc)):
+ msg["type"] = mysqlxpb_enum("Mysqlx.Expr.Expr.Type.OBJECT")
+ msg["object"] = build_object(value).get_message()
+ elif isinstance(value, (list, tuple)):
+ msg["type"] = mysqlxpb_enum("Mysqlx.Expr.Expr.Type.ARRAY")
+ msg["array"] = build_array(value).get_message()
+ else:
+ msg["type"] = mysqlxpb_enum("Mysqlx.Expr.Expr.Type.LITERAL")
+ msg["literal"] = build_scalar(value).get_message()
+ return msg
+
+
+def build_scalar(value):
+ if isinstance(value, str):
+ return build_string_scalar(value)
+ elif isinstance(value, BYTE_TYPES):
+ return build_bytes_scalar(value)
+ elif isinstance(value, bool):
+ return build_bool_scalar(value)
+ elif isinstance(value, int):
+ return build_int_scalar(value)
+ elif isinstance(value, float):
+ return build_double_scalar(value)
+ elif value is None:
+ return build_null_scalar()
+ raise ValueError("Unsupported data type: {0}.".format(type(value)))
+
+
+def build_object(obj):
+ if isinstance(obj, DbDoc):
+ return build_object(obj.__dict__)
+
+ msg = Message("Mysqlx.Expr.Object")
+ for key, value in obj.items():
+ pair = Message("Mysqlx.Expr.Object.ObjectField")
+ pair["key"] = key.encode() if isinstance(key, str) else key
+ pair["value"] = build_expr(value).get_message()
+ msg["fld"].extend([pair.get_message()])
+ return msg
+
+
+def build_array(array):
+ msg = Message("Mysqlx.Expr.Array")
+ msg["value"].extend([build_expr(value).get_message() for value in array])
+ return msg
+
+
+def build_null_scalar():
+ msg = Message("Mysqlx.Datatypes.Scalar")
+ msg["type"] = mysqlxpb_enum("Mysqlx.Datatypes.Scalar.Type.V_NULL")
+ return msg
+
+
+def build_double_scalar(value):
+ msg = Message("Mysqlx.Datatypes.Scalar")
+ msg["type"] = mysqlxpb_enum("Mysqlx.Datatypes.Scalar.Type.V_DOUBLE")
+ msg["v_double"] = value
+ return msg
+
+
+def build_int_scalar(value):
+ msg = Message("Mysqlx.Datatypes.Scalar")
+ msg["type"] = mysqlxpb_enum("Mysqlx.Datatypes.Scalar.Type.V_SINT")
+ msg["v_signed_int"] = value
+ return msg
+
+def build_unsigned_int_scalar(value):
+ msg = Message("Mysqlx.Datatypes.Scalar")
+ msg["type"] = mysqlxpb_enum("Mysqlx.Datatypes.Scalar.Type.V_UINT")
+ msg["v_unsigned_int"] = value
+ return msg
+
+def build_string_scalar(value):
+ if isinstance(value, str):
+ value = bytes(bytearray(value, "utf-8"))
+ msg = Message("Mysqlx.Datatypes.Scalar")
+ msg["type"] = mysqlxpb_enum("Mysqlx.Datatypes.Scalar.Type.V_STRING")
+ msg["v_string"] = Message("Mysqlx.Datatypes.Scalar.String", value=value)
+ return msg
+
+
+def build_bool_scalar(value):
+ msg = Message("Mysqlx.Datatypes.Scalar")
+ msg["type"] = mysqlxpb_enum("Mysqlx.Datatypes.Scalar.Type.V_BOOL")
+ msg["v_bool"] = value
+ return msg
+
+
+def build_bytes_scalar(value):
+ msg = Message("Mysqlx.Datatypes.Scalar")
+ msg["type"] = mysqlxpb_enum("Mysqlx.Datatypes.Scalar.Type.V_OCTETS")
+ msg["v_octets"] = Message("Mysqlx.Datatypes.Scalar.Octets", value=value)
+ return msg
+
+
+def build_literal_expr(value):
+ msg = Message("Mysqlx.Expr.Expr")
+ msg["type"] = mysqlxpb_enum("Mysqlx.Expr.Expr.Type.LITERAL")
+ msg["literal"] = value
+ return msg
+
+
+def build_unary_op(name, param):
+ operator = Message("Mysqlx.Expr.Operator")
+ operator["name"] = _UNARY_OPERATORS[name]
+ operator["param"] = [param.get_message()]
+ msg = Message("Mysqlx.Expr.Expr")
+ msg["type"] = mysqlxpb_enum("Mysqlx.Expr.Expr.Type.OPERATOR")
+ msg["operator"] = operator.get_message()
+ return msg
+
+
+def escape_literal(string):
+ return string.replace('"', '""')
+
+
+class ExprParser(object):
+ def __init__(self, string, allow_relational=True):
+ self.string = string
+ self.tokens = []
+ self.path_name_queue = []
+ self.pos = 0
+ self._allow_relational_columns = allow_relational
+ self.placeholder_name_to_position = {}
+ self.positional_placeholder_count = 0
+ self.clean_expression()
+ self.lex()
+
+ def __str__(self):
+ return "<mysqlx.ExprParser '{}'>".format(self.string)
+
+ def clean_expression(self):
+ """Removes the keywords that does not form part of the expression.
+
+ Removes the keywords "SELECT" and "WHERE" that does not form part of
+ the expression itself.
+ """
+ if not isinstance(self.string, str):
+ self.string = repr(self.string)
+ self.string = self.string.strip(" ")
+ if len(self.string) > 1 and self.string[-1] == ';':
+ self.string = self.string[:-1]
+ if "SELECT" in self.string[:6].upper():
+ self.string = self.string[6:]
+ if "WHERE" in self.string[:5].upper():
+ self.string = self.string[5:]
+
+ # convencience checker for lexer
+ def next_char_is(self, key, char):
+ return key + 1 < len(self.string) and self.string[key + 1] == char
+
+ def lex_number(self, pos):
+ # numeric literal
+ start = pos
+ found_dot = False
+ while pos < len(self.string) and (self.string[pos].isdigit() or
+ self.string[pos] == "."):
+ if self.string[pos] == ".":
+ if found_dot is True:
+ raise ValueError("Invalid number. Found multiple '.'")
+ found_dot = True
+ # technically we allow more than one "." and let float()'s parsing
+ # complain later
+ pos += 1
+ val = self.string[start:pos]
+ return Token(TokenType.LNUM, val, len(val))
+
+ def lex_alpha(self, i, allow_space=False):
+ start = i
+ while i < len(self.string) and \
+ (self.string[i].isalnum() or self.string[i] == "_" or
+ (self.string[i].isspace() and allow_space)):
+ i += 1
+
+ val = self.string[start:i]
+ try:
+ if i < len(self.string) and self.string[i] == '(' and \
+ val.lower() not in _SQL_FUNTION_RESERVED_WORDS_COLLISION:
+ token = Token(TokenType.IDENT, val, len(val))
+ else:
+ token = Token(_RESERVED_WORDS[val.lower()], val.lower(), len(val))
+ except KeyError:
+ token = Token(TokenType.IDENT, val, len(val))
+ return token
+
+ def lex_quoted_token(self, key):
+ quote_char = self.string[key]
+ val = ""
+ key += 1
+ start = key
+ while key < len(self.string):
+ char = self.string[key]
+ if char == quote_char and key + 1 < len(self.string) and \
+ self.string[key + 1] != quote_char:
+ # break if we have a quote char that's not double
+ break
+ elif char == quote_char or char == "\\":
+ # this quote char has to be doubled
+ if key + 1 >= len(self.string):
+ break
+ key += 1
+ val += self.string[key]
+ else:
+ val += char
+ key += 1
+ if key >= len(self.string) or self.string[key] != quote_char:
+ raise ValueError("Unterminated quoted string starting at {0}"
+ "".format(start))
+ if quote_char == "`":
+ return Token(TokenType.IDENT, val, len(val) + 2)
+ return Token(TokenType.LSTRING, val, len(val) + 2)
+
+ def lex(self):
+ i = 0
+ arrow_last = False
+ inside_arrow = False
+ while i < len(self.string):
+ char = self.string[i]
+ if char.isspace():
+ i += 1
+ continue
+ elif char.isdigit():
+ token = self.lex_number(i)
+ elif char.isalpha() or char == "_":
+ token = self.lex_alpha(i, inside_arrow)
+ elif char == "?":
+ token = Token(TokenType.EROTEME, char)
+ elif char == ":":
+ token = Token(TokenType.COLON, char)
+ elif char == "{":
+ token = Token(TokenType.LCURLY, char)
+ elif char == "}":
+ token = Token(TokenType.RCURLY, char)
+ elif char == "+":
+ token = Token(TokenType.PLUS, char)
+ elif char == "-":
+ if self.next_char_is(i, ">") and not arrow_last:
+ token = Token(TokenType.ARROW, "->", 2)
+ arrow_last = True
+ else:
+ token = Token(TokenType.MINUS, char)
+ elif char == "*":
+ if self.next_char_is(i, "*"):
+ token = Token(TokenType.DOUBLESTAR, "**", 2)
+ else:
+ token = Token(TokenType.MUL, char)
+ elif char == "/":
+ token = Token(TokenType.DIV, char)
+ elif char == "$":
+ token = Token(TokenType.DOLLAR, char)
+ elif char == "%":
+ token = Token(TokenType.MOD, char)
+ elif char == "=":
+ if self.next_char_is(i, "="):
+ token = Token(TokenType.EQ, "==", 2)
+ else:
+ token = Token(TokenType.EQ, "==", 1)
+ elif char == "&":
+ if self.next_char_is(i, "&"):
+ token = Token(TokenType.ANDAND, "&&", 2)
+ else:
+ token = Token(TokenType.BITAND, char)
+ elif char == "^":
+ token = Token(TokenType.BITXOR, char)
+ elif char == "|":
+ if self.next_char_is(i, "|"):
+ token = Token(TokenType.OROR, "||", 2)
+ else:
+ token = Token(TokenType.BITOR, char)
+ elif char == "(":
+ token = Token(TokenType.LPAREN, char)
+ elif char == ")":
+ token = Token(TokenType.RPAREN, char)
+ elif char == "[":
+ token = Token(TokenType.LSQBRACKET, char)
+ elif char == "]":
+ token = Token(TokenType.RSQBRACKET, char)
+ elif char == "~":
+ token = Token(TokenType.NEG, char)
+ elif char == ",":
+ token = Token(TokenType.COMMA, char)
+ elif char == "!":
+ if self.next_char_is(i, "="):
+ token = Token(TokenType.NE, "!=", 2)
+ else:
+ token = Token(TokenType.BANG, char)
+ elif char == "<":
+ if self.next_char_is(i, ">"):
+ token = Token(TokenType.NE, "<>", 2)
+ elif self.next_char_is(i, "<"):
+ token = Token(TokenType.LSHIFT, "<<", 2)
+ elif self.next_char_is(i, "="):
+ token = Token(TokenType.LE, "<=", 2)
+ else:
+ token = Token(TokenType.LT, char)
+ elif char == ">":
+ if self.next_char_is(i, ">"):
+ token = Token(TokenType.RSHIFT, ">>", 2)
+ elif self.next_char_is(i, "="):
+ token = Token(TokenType.GE, ">=", 2)
+ else:
+ token = Token(TokenType.GT, char)
+ elif char == ".":
+ if self.next_char_is(i, "*"):
+ token = Token(TokenType.DOTSTAR, ".*", 2)
+ elif i + 1 < len(self.string) and self.string[i + 1].isdigit():
+ token = self.lex_number(i)
+ else:
+ token = Token(TokenType.DOT, char)
+ elif (char == "'" or char == '"') and arrow_last:
+ token = Token(TokenType.QUOTE, char)
+ if not inside_arrow:
+ inside_arrow = True
+ else:
+ arrow_last = False
+ inside_arrow = False
+ elif char == '"' or char == "'" or char == "`":
+ token = self.lex_quoted_token(i)
+ else:
+ raise ValueError("Unknown character at {0}".format(i))
+ self.tokens.append(token)
+ i += token.length
+
+ def assert_cur_token(self, token_type):
+ if self.pos >= len(self.tokens):
+ raise ValueError("Expected token type {0} at pos {1} but no "
+ "tokens left".format(token_type, self.pos))
+ if self.tokens[self.pos].token_type != token_type:
+ raise ValueError("Expected token type {0} at pos {1} but found "
+ "type {2}, on tokens {3}"
+ "".format(token_type, self.pos,
+ self.tokens[self.pos], self.tokens))
+
+ def cur_token_type_is(self, token_type):
+ return self.pos_token_type_is(self.pos, token_type)
+
+ def cur_token_type_in(self, *types):
+ return self.pos < len(self.tokens) and \
+ self.tokens[self.pos].token_type in types
+
+ def next_token_type_is(self, token_type):
+ return self.pos_token_type_is(self.pos + 1, token_type)
+
+ def next_token_type_in(self, *types):
+ return self.pos < len(self.tokens) and \
+ self.tokens[self.pos + 1].token_type in types
+
+ def pos_token_type_is(self, pos, token_type):
+ return pos < len(self.tokens) and \
+ self.tokens[pos].token_type == token_type
+
+ def consume_token(self, token_type):
+ self.assert_cur_token(token_type)
+ value = self.tokens[self.pos].value
+ self.pos += 1
+ return value
+
+ def paren_expr_list(self):
+ """Parse a paren-bounded expression list for function arguments or IN
+ list and return a list of Expr objects.
+ """
+ exprs = []
+ path_name_added = False
+ self.consume_token(TokenType.LPAREN)
+ if not self.cur_token_type_is(TokenType.RPAREN):
+ msg_expr = self._expr().get_message()
+ if hasattr(msg_expr, "identifier") and msg_expr.identifier.name:
+ self.path_name_queue.insert(0, msg_expr.identifier.name)
+ path_name_added = True
+ elif not hasattr(msg_expr, "identifier") and \
+ "identifier" in msg_expr and "name" in msg_expr["identifier"]:
+ self.path_name_queue.insert(0, msg_expr["identifier"]["name"])
+ path_name_added = True
+ exprs.append(msg_expr)
+ while self.cur_token_type_is(TokenType.COMMA):
+ self.pos += 1
+ exprs.append(self._expr().get_message())
+ self.consume_token(TokenType.RPAREN)
+ if path_name_added:
+ self.path_name_queue.pop()
+ return exprs
+
+ def identifier(self):
+ self.assert_cur_token(TokenType.IDENT)
+ ident = Message("Mysqlx.Expr.Identifier")
+ if self.next_token_type_is(TokenType.DOT):
+ ident["schema_name"] = self.consume_token(TokenType.IDENT)
+ self.consume_token(TokenType.DOT)
+ ident["name"] = self.tokens[self.pos].value
+ self.pos += 1
+ return ident
+
+ def function_call(self):
+ function_call = Message("Mysqlx.Expr.FunctionCall")
+ function_call["name"] = self.identifier()
+ function_call["param"] = self.paren_expr_list()
+ msg_expr = Message("Mysqlx.Expr.Expr")
+ msg_expr["type"] = mysqlxpb_enum("Mysqlx.Expr.Expr.Type.FUNC_CALL")
+ msg_expr["function_call"] = function_call.get_message()
+ return msg_expr
+
+ def docpath_member(self):
+ self.consume_token(TokenType.DOT)
+ token = self.tokens[self.pos]
+
+ if token.token_type == TokenType.IDENT:
+ if token.value.startswith('`') and token.value.endswith('`'):
+ raise ValueError("{0} is not a valid JSON/ECMAScript "
+ "identifier".format(token.value))
+ self.consume_token(TokenType.IDENT)
+ member_name = token.value
+ elif token.token_type == TokenType.LSTRING:
+ self.consume_token(TokenType.LSTRING)
+ member_name = token.value
+ else:
+ raise ValueError("Expected token type IDENT or LSTRING in JSON "
+ "path at token pos {0}".format(self.pos))
+ doc_path_item = Message("Mysqlx.Expr.DocumentPathItem")
+ doc_path_item["type"] = mysqlxpb_enum(
+ "Mysqlx.Expr.DocumentPathItem.Type.MEMBER")
+ doc_path_item["value"] = member_name
+ return doc_path_item
+
+ def docpath_array_loc(self):
+ self.consume_token(TokenType.LSQBRACKET)
+ if self.cur_token_type_is(TokenType.MUL):
+ self.consume_token(TokenType.MUL)
+ self.consume_token(TokenType.RSQBRACKET)
+ doc_path_item = Message("Mysqlx.Expr.DocumentPathItem")
+ doc_path_item["type"] = mysqlxpb_enum(
+ "Mysqlx.Expr.DocumentPathItem.Type.ARRAY_INDEX_ASTERISK")
+ return doc_path_item
+ elif self.cur_token_type_is(TokenType.LNUM):
+ value = int(self.consume_token(TokenType.LNUM))
+ if value < 0:
+ raise IndexError("Array index cannot be negative at {0}"
+ "".format(self.pos))
+ self.consume_token(TokenType.RSQBRACKET)
+ doc_path_item = Message("Mysqlx.Expr.DocumentPathItem")
+ doc_path_item["type"] = mysqlxpb_enum(
+ "Mysqlx.Expr.DocumentPathItem.Type.ARRAY_INDEX")
+ doc_path_item["index"] = value
+ return doc_path_item
+ else:
+ raise ValueError("Exception token type MUL or LNUM in JSON "
+ "path array index at token pos {0}"
+ "".format(self.pos))
+
+ def document_field(self):
+ col_id = Message("Mysqlx.Expr.ColumnIdentifier")
+ if self.cur_token_type_is(TokenType.IDENT):
+ doc_path_item = Message("Mysqlx.Expr.DocumentPathItem")
+ doc_path_item["type"] = mysqlxpb_enum(
+ "Mysqlx.Expr.DocumentPathItem.Type.MEMBER")
+ doc_path_item["value"] = self.consume_token(TokenType.IDENT)
+ col_id["document_path"].extend([doc_path_item.get_message()])
+ col_id["document_path"].extend(self.document_path())
+ if self.path_name_queue:
+ col_id["name"] = self.path_name_queue[0]
+ expr = Message("Mysqlx.Expr.Expr")
+ expr["type"] = mysqlxpb_enum("Mysqlx.Expr.Expr.Type.IDENT")
+ expr["identifier"] = col_id
+ return expr
+
+ def document_path(self):
+ """Parse a JSON-style document path, like WL#7909, but prefix by @.
+ instead of $. We parse this as a string because the protocol doesn't
+ support it. (yet)
+ """
+ doc_path = []
+ while True:
+ if self.cur_token_type_is(TokenType.DOT):
+ doc_path.append(self.docpath_member().get_message())
+ elif self.cur_token_type_is(TokenType.DOTSTAR):
+ self.consume_token(TokenType.DOTSTAR)
+ doc_path_item = Message("Mysqlx.Expr.DocumentPathItem")
+ doc_path_item["type"] = mysqlxpb_enum(
+ "Mysqlx.Expr.DocumentPathItem.Type.MEMBER_ASTERISK")
+ doc_path.append(doc_path_item.get_message())
+ elif self.cur_token_type_is(TokenType.LSQBRACKET):
+ doc_path.append(self.docpath_array_loc().get_message())
+ elif self.cur_token_type_is(TokenType.DOUBLESTAR):
+ self.consume_token(TokenType.DOUBLESTAR)
+ doc_path_item = Message("Mysqlx.Expr.DocumentPathItem")
+ doc_path_item["type"] = mysqlxpb_enum(
+ "Mysqlx.Expr.DocumentPathItem.Type.DOUBLE_ASTERISK")
+ doc_path.append(doc_path_item.get_message())
+ else:
+ break
+ items = len(doc_path)
+ if items > 0 and get_item_or_attr(doc_path[items - 1], "type") == \
+ mysqlxpb_enum("Mysqlx.Expr.DocumentPathItem.Type.DOUBLE_ASTERISK"):
+ raise ValueError("JSON path may not end in '**' at {0}"
+ "".format(self.pos))
+ return doc_path
+
+ def column_identifier(self):
+ parts = []
+ parts.append(self.consume_token(TokenType.IDENT))
+ while self.cur_token_type_is(TokenType.DOT):
+ self.consume_token(TokenType.DOT)
+ parts.append(self.consume_token(TokenType.IDENT))
+ if len(parts) > 3:
+ raise ValueError("Too many parts to identifier at {0}"
+ "".format(self.pos))
+ parts.reverse()
+ col_id = Message("Mysqlx.Expr.ColumnIdentifier")
+ # clever way to apply them to the struct
+ for i in range(0, len(parts)):
+ if i == 0:
+ col_id["name"] = parts[0]
+ elif i == 1:
+ col_id["table_name"] = parts[1]
+ elif i == 2:
+ col_id["schema_name"] = parts[2]
+
+ is_doc = False
+ if self.cur_token_type_is(TokenType.DOLLAR):
+ is_doc = True
+ self.consume_token(TokenType.DOLLAR)
+ col_id["document_path"] = self.document_path()
+ elif self.cur_token_type_is(TokenType.ARROW):
+ is_doc = True
+ self.consume_token(TokenType.ARROW)
+ is_quoted = False
+ if self.cur_token_type_is(TokenType.QUOTE):
+ is_quoted = True
+ self.consume_token(TokenType.QUOTE)
+ self.consume_token(TokenType.DOLLAR)
+ col_id["document_path"] = self.document_path()
+ if is_quoted:
+ self.consume_token(TokenType.QUOTE)
+
+ if is_doc and len(col_id["document_path"]) == 0:
+ doc_path_item = Message("Mysqlx.Expr.DocumentPathItem")
+ doc_path_item["type"] = mysqlxpb_enum(
+ "Mysqlx.Expr.DocumentPathItem.Type.MEMBER")
+ doc_path_item["value"] = ""
+ col_id["document_path"].extend([doc_path_item.get_message()])
+
+ msg_expr = Message("Mysqlx.Expr.Expr")
+ msg_expr["type"] = mysqlxpb_enum("Mysqlx.Expr.Expr.Type.IDENT")
+ msg_expr["identifier"] = col_id
+ return msg_expr
+
+ def next_token(self):
+ if self.pos >= len(self.tokens):
+ raise ValueError("Unexpected end of token stream")
+ token = self.tokens[self.pos]
+ self.pos += 1
+ return token
+
+ def expect_token(self, token_type):
+ token = self.next_token()
+ if token.token_type != token_type:
+ raise ValueError("Expected token type {0}".format(token_type))
+
+ def peek_token(self):
+ return self.tokens[self.pos]
+
+ def consume_any_token(self):
+ value = self.tokens[self.pos].value
+ self.pos += 1
+ return value
+
+ def parse_json_array(self):
+ """
+ jsonArray ::= "[" [ expr ("," expr)* ] "]"
+ """
+ msg = Message("Mysqlx.Expr.Array")
+ while self.pos < len(self.tokens) and \
+ not self.cur_token_type_is(TokenType.RSQBRACKET):
+ msg["value"].extend([self._expr().get_message()])
+ if not self.cur_token_type_is(TokenType.COMMA):
+ break
+ self.consume_token(TokenType.COMMA)
+ self.consume_token(TokenType.RSQBRACKET)
+
+ expr = Message("Mysqlx.Expr.Expr")
+ expr["type"] = mysqlxpb_enum("Mysqlx.Expr.Expr.Type.ARRAY")
+ expr["array"] = msg.get_message()
+ return expr
+
+ def parse_json_doc(self):
+ """
+ jsonDoc ::= "{" [jsonKeyValue ("," jsonKeyValue)*] "}"
+ jsonKeyValue ::= STRING_DQ ":" expr
+ """
+ msg = Message("Mysqlx.Expr.Object")
+ while self.pos < len(self.tokens) and \
+ not self.cur_token_type_is(TokenType.RCURLY):
+ item = Message("Mysqlx.Expr.Object.ObjectField")
+ item["key"] = self.consume_token(TokenType.LSTRING)
+ self.consume_token(TokenType.COLON)
+ item["value"] = self._expr().get_message()
+ msg["fld"].extend([item.get_message()])
+ if not self.cur_token_type_is(TokenType.COMMA):
+ break
+ self.consume_token(TokenType.COMMA)
+ self.consume_token(TokenType.RCURLY)
+
+ expr = Message("Mysqlx.Expr.Expr")
+ expr["type"] = mysqlxpb_enum("Mysqlx.Expr.Expr.Type.OBJECT")
+ expr["object"] = msg.get_message()
+ return expr
+
+ def parse_place_holder(self, token):
+ place_holder_name = ""
+ if self.cur_token_type_is(TokenType.LNUM):
+ place_holder_name = self.consume_token(TokenType.LNUM)
+ elif self.cur_token_type_is(TokenType.IDENT):
+ place_holder_name = self.consume_token(TokenType.IDENT)
+ elif token.token_type == TokenType.EROTEME:
+ place_holder_name = str(self.positional_placeholder_count)
+ else:
+ raise ValueError("Invalid placeholder name at token pos {0}"
+ "".format(self.pos))
+
+ msg_expr = Message("Mysqlx.Expr.Expr")
+ msg_expr["type"] = mysqlxpb_enum("Mysqlx.Expr.Expr.Type.PLACEHOLDER")
+ if place_holder_name in self.placeholder_name_to_position:
+ msg_expr["position"] = \
+ self.placeholder_name_to_position[place_holder_name]
+ else:
+ msg_expr["position"] = self.positional_placeholder_count
+ self.placeholder_name_to_position[place_holder_name] = \
+ self.positional_placeholder_count
+ self.positional_placeholder_count += 1
+ return msg_expr
+
+ def cast(self):
+ """ cast ::= CAST LPAREN expr AS cast_data_type RPAREN
+ """
+ operator = Message("Mysqlx.Expr.Operator", name="cast")
+ self.consume_token(TokenType.LPAREN)
+ operator["param"].extend([self._expr().get_message()])
+ self.consume_token(TokenType.AS)
+
+ type_scalar = build_bytes_scalar(str.encode(self.cast_data_type()))
+ operator["param"].extend(
+ [build_literal_expr(type_scalar).get_message()])
+ self.consume_token(TokenType.RPAREN)
+ msg = Message("Mysqlx.Expr.Expr", operator=operator.get_message())
+ msg["type"] = mysqlxpb_enum("Mysqlx.Expr.Expr.Type.OPERATOR")
+ return msg
+
+ def cast_data_type(self):
+ """ cast_data_type ::= ( BINARY dimension? ) |
+ ( CHAR dimension? ) |
+ ( DATE ) |
+ ( DATETIME dimension? ) |
+ ( TIME dimension? ) |
+ ( DECIMAL dimension? ) |
+ ( SIGNED INTEGER? ) |
+ ( UNSIGNED INTEGER? ) |
+ JSON
+ """
+ token = self.next_token()
+ if token.token_type in (TokenType.BINARY, TokenType.CHAR,
+ TokenType.DATETIME, TokenType.TIME,):
+ dimension = self.cast_data_type_dimension()
+ return "{0}{1}".format(token.value, dimension) \
+ if dimension else token.value
+ elif token.token_type is TokenType.DECIMAL:
+ dimension = self.cast_data_type_dimension(True)
+ return "{0}{1}".format(token.value, dimension) \
+ if dimension else token.value
+ elif token.token_type in (TokenType.SIGNED, TokenType.UNSIGNED,):
+ if self.cur_token_type_is(TokenType.INTEGER):
+ self.consume_token(TokenType.INTEGER)
+ return token.value
+ elif token.token_type in (TokenType.INTEGER, TokenType.JSON,
+ TokenType.DATE,):
+ return token.value
+
+ raise ValueError("Unknown token type {0} at position {1} ({2})"
+ "".format(token.token_type, self.pos, token.value))
+
+ def cast_data_type_dimension(self, decimal=False):
+ """ dimension ::= LPAREN LNUM (, LNUM)? RPAREN
+ """
+ if not self.cur_token_type_is(TokenType.LPAREN):
+ return None
+
+ dimension = []
+ self.consume_token(TokenType.LPAREN)
+ dimension.append(self.consume_token(TokenType.LNUM))
+ if decimal and self.cur_token_type_is(TokenType.COMMA):
+ self.consume_token(TokenType.COMMA)
+ dimension.append(self.consume_token(TokenType.LNUM))
+ self.consume_token(TokenType.RPAREN)
+
+ return "({0})".format(dimension[0]) if len(dimension) == 1 else \
+ "({0},{1})".format(*dimension)
+
+ def star_operator(self):
+ msg = Message("Mysqlx.Expr.Expr")
+ msg["type"] = mysqlxpb_enum("Mysqlx.Expr.Expr.Type.OPERATOR")
+ msg["operator"] = Message("Mysqlx.Expr.Operator", name="*")
+ return msg
+
+ def atomic_expr(self):
+ """Parse an atomic expression and return a protobuf Expr object"""
+ token = self.next_token()
+
+ if token.token_type in [TokenType.EROTEME, TokenType.COLON]:
+ return self.parse_place_holder(token)
+ elif token.token_type == TokenType.LCURLY:
+ return self.parse_json_doc()
+ elif token.token_type == TokenType.LSQBRACKET:
+ return self.parse_json_array()
+ elif token.token_type == TokenType.CAST:
+ return self.cast()
+ elif token.token_type == TokenType.LPAREN:
+ expr = self._expr()
+ self.expect_token(TokenType.RPAREN)
+ return expr
+ elif token.token_type in [TokenType.PLUS, TokenType.MINUS]:
+ peek = self.peek_token()
+ if peek.token_type == TokenType.LNUM:
+ self.tokens[self.pos].value = token.value + peek.value
+ return self.atomic_expr()
+ return build_unary_op(token.value, self.atomic_expr())
+ elif token.token_type in [TokenType.NOT, TokenType.NEG, TokenType.BANG]:
+ return build_unary_op(token.value, self.atomic_expr())
+ elif token.token_type == TokenType.LSTRING:
+ return build_literal_expr(build_string_scalar(token.value))
+ elif token.token_type == TokenType.NULL:
+ return build_literal_expr(build_null_scalar())
+ elif token.token_type == TokenType.LNUM:
+ if "." in token.value:
+ return build_literal_expr(
+ build_double_scalar(float(token.value)))
+ return build_literal_expr(build_int_scalar(int(token.value)))
+ elif token.token_type in [TokenType.TRUE, TokenType.FALSE]:
+ return build_literal_expr(
+ build_bool_scalar(token.token_type == TokenType.TRUE))
+ elif token.token_type == TokenType.DOLLAR:
+ return self.document_field()
+ elif token.token_type == TokenType.MUL:
+ return self.star_operator()
+ elif token.token_type == TokenType.IDENT:
+ self.pos = self.pos - 1 # stay on the identifier
+ if self.next_token_type_is(TokenType.LPAREN) or \
+ (self.next_token_type_is(TokenType.DOT) and
+ self.pos_token_type_is(self.pos + 2, TokenType.IDENT) and
+ self.pos_token_type_is(self.pos + 3, TokenType.LPAREN)):
+ # Function call
+ return self.function_call()
+ return (self.document_field()
+ if not self._allow_relational_columns
+ else self.column_identifier())
+
+ raise ValueError("Unknown token type = {0} when expecting atomic "
+ "expression at {1}"
+ "".format(token.token_type, self.pos))
+
+ def parse_left_assoc_binary_op_expr(self, types, inner_parser):
+ """Given a `set' of types and an Expr-returning inner parser function,
+ parse a left associate binary operator expression"""
+ lhs = inner_parser()
+ while (self.pos < len(self.tokens) and
+ self.tokens[self.pos].token_type in types):
+ msg = Message("Mysqlx.Expr.Expr")
+ msg["type"] = mysqlxpb_enum("Mysqlx.Expr.Expr.Type.OPERATOR")
+ operator = Message("Mysqlx.Expr.Operator")
+ operator["name"] = _OPERATORS[self.tokens[self.pos].value]
+ operator["param"] = [lhs.get_message()]
+ self.pos += 1
+ operator["param"].extend([inner_parser().get_message()])
+ msg["operator"] = operator
+ lhs = msg
+ return lhs
+
+ # operator precedence is implemented here
+ def add_sub_interval(self):
+ lhs = self.atomic_expr()
+ if self.cur_token_type_in(TokenType.PLUS, TokenType.MINUS) and \
+ self.next_token_type_is(TokenType.INTERVAL):
+ token = self.next_token()
+
+ operator = Message("Mysqlx.Expr.Operator")
+ operator["param"].extend([lhs.get_message()])
+ operator["name"] = "date_add" if token.token_type is TokenType.PLUS \
+ else "date_sub"
+
+ self.consume_token(TokenType.INTERVAL)
+ operator["param"].extend([self.bit_expr().get_message()])
+
+ if not self.cur_token_type_in(*_INTERVAL_UNITS):
+ raise ValueError("Expected interval type at position {0}"
+ "".format(self.pos))
+
+ token = str.encode(self.consume_any_token().upper())
+ operator["param"].extend([build_literal_expr(
+ build_bytes_scalar(token)).get_message()])
+
+ lhs = Message("Mysqlx.Expr.Expr", operator=operator)
+ lhs["type"] = mysqlxpb_enum("Mysqlx.Expr.Expr.Type.OPERATOR")
+
+ return lhs
+
+ def mul_div_expr(self):
+ return self.parse_left_assoc_binary_op_expr(
+ set([TokenType.MUL, TokenType.DIV, TokenType.MOD]),
+ self.add_sub_interval)
+
+ def add_sub_expr(self):
+ return self.parse_left_assoc_binary_op_expr(
+ set([TokenType.PLUS, TokenType.MINUS]), self.mul_div_expr)
+
+ def shift_expr(self):
+ return self.parse_left_assoc_binary_op_expr(
+ set([TokenType.LSHIFT, TokenType.RSHIFT]), self.add_sub_expr)
+
+ def bit_expr(self):
+ return self.parse_left_assoc_binary_op_expr(
+ set([TokenType.BITAND, TokenType.BITOR, TokenType.BITXOR]),
+ self.shift_expr)
+
+ def comp_expr(self):
+ return self.parse_left_assoc_binary_op_expr(
+ set([TokenType.GE, TokenType.GT, TokenType.LE, TokenType.LT,
+ TokenType.EQ, TokenType.NE]), self.bit_expr)
+
+ def ilri_expr(self):
+ params = []
+ lhs = self.comp_expr()
+ is_not = False
+ if self.cur_token_type_is(TokenType.NOT):
+ is_not = True
+ self.consume_token(TokenType.NOT)
+ if self.pos < len(self.tokens):
+ params.append(lhs.get_message())
+ op_name = self.tokens[self.pos].value
+ if self.cur_token_type_is(TokenType.IS):
+ self.consume_token(TokenType.IS)
+ # for IS, NOT comes AFTER
+ if self.cur_token_type_is(TokenType.NOT):
+ is_not = True
+ self.consume_token(TokenType.NOT)
+ params.append(self.comp_expr().get_message())
+ elif self.cur_token_type_is(TokenType.IN):
+ self.consume_token(TokenType.IN)
+ if self.cur_token_type_is(TokenType.LPAREN):
+ params.extend(self.paren_expr_list())
+ else:
+ op_name = "cont_in"
+ params.append(self.comp_expr().get_message())
+ elif self.cur_token_type_is(TokenType.OVERLAPS):
+ self.consume_token(TokenType.OVERLAPS)
+ params.append(self.comp_expr().get_message())
+
+ elif self.cur_token_type_is(TokenType.LIKE):
+ self.consume_token(TokenType.LIKE)
+ params.append(self.comp_expr().get_message())
+ if self.cur_token_type_is(TokenType.ESCAPE):
+ self.consume_token(TokenType.ESCAPE)
+ params.append(self.comp_expr().get_message())
+ elif self.cur_token_type_is(TokenType.BETWEEN):
+ self.consume_token(TokenType.BETWEEN)
+ params.append(self.comp_expr().get_message())
+ self.consume_token(TokenType.AND)
+ params.append(self.comp_expr().get_message())
+ elif self.cur_token_type_is(TokenType.REGEXP):
+ self.consume_token(TokenType.REGEXP)
+ params.append(self.comp_expr().get_message())
+ else:
+ if is_not:
+ raise ValueError("Unknown token after NOT as pos {0}"
+ "".format(self.pos))
+ op_name = None # not an operator we're interested in
+ if op_name:
+ operator = Message("Mysqlx.Expr.Operator")
+ operator["name"] = _NEGATION[op_name] if is_not else op_name
+ operator["param"] = params
+ msg_expr = Message("Mysqlx.Expr.Expr")
+ msg_expr["type"] = mysqlxpb_enum(
+ "Mysqlx.Expr.Expr.Type.OPERATOR")
+ msg_expr["operator"] = operator.get_message()
+ lhs = msg_expr
+ return lhs
+
+ def and_expr(self):
+ return self.parse_left_assoc_binary_op_expr(
+ set([TokenType.AND, TokenType.ANDAND]), self.ilri_expr)
+
+ def xor_expr(self):
+ return self.parse_left_assoc_binary_op_expr(
+ set([TokenType.XOR]), self.and_expr)
+
+ def or_expr(self):
+ return self.parse_left_assoc_binary_op_expr(
+ set([TokenType.OR, TokenType.OROR]), self.xor_expr)
+
+ def _expr(self, reparse=False):
+ if reparse:
+ self.tokens = []
+ self.pos = 0
+ self.placeholder_name_to_position = {}
+ self.positional_placeholder_count = 0
+ self.lex()
+ return self.or_expr()
+
+ def expr(self, reparse=False):
+ expression = self._expr(reparse)
+ used_tokens = self.pos
+ if self.pos_token_type_is(len(self.tokens) - 2, TokenType.AS):
+ used_tokens += 2
+ if used_tokens < len(self.tokens):
+ raise ValueError("Unused token types {} found in expression at "
+ "position: {}".format(self.tokens[self.pos:],
+ self.pos))
+ return expression
+
+ def parse_table_insert_field(self):
+ return Message("Mysqlx.Crud.Column",
+ name=self.consume_token(TokenType.IDENT))
+
+ def parse_table_update_field(self):
+ return self.column_identifier().identifier
+
+ def _table_fields(self):
+ fields = []
+ temp = self.string.split(",")
+ temp.reverse()
+ while temp:
+ field = temp.pop()
+ while field.count("(") != field.count(")") or \
+ field.count("[") != field.count("]") or \
+ field.count("{") != field.count("}"):
+ field = "{1},{0}".format(temp.pop(), field)
+ fields.append(field.strip())
+ return fields
+
+ def parse_table_select_projection(self):
+ project_expr = []
+ first = True
+ fields = self._table_fields()
+ while self.pos < len(self.tokens):
+ if not first:
+ self.consume_token(TokenType.COMMA)
+ first = False
+ projection = Message("Mysqlx.Crud.Projection", source=self._expr())
+ if self.cur_token_type_is(TokenType.AS):
+ self.consume_token(TokenType.AS)
+ projection["alias"] = self.consume_token(TokenType.IDENT)
+ else:
+ projection["alias"] = fields[len(project_expr)]
+ project_expr.append(projection.get_message())
+
+ return project_expr
+
+ def parse_order_spec(self):
+ order_specs = []
+ first = True
+ while self.pos < len(self.tokens):
+ if not first:
+ self.consume_token(TokenType.COMMA)
+ first = False
+ order = Message("Mysqlx.Crud.Order", expr=self._expr())
+ if self.cur_token_type_is(TokenType.ORDERBY_ASC):
+ order["direction"] = mysqlxpb_enum(
+ "Mysqlx.Crud.Order.Direction.ASC")
+ self.consume_token(TokenType.ORDERBY_ASC)
+ elif self.cur_token_type_is(TokenType.ORDERBY_DESC):
+ order["direction"] = mysqlxpb_enum(
+ "Mysqlx.Crud.Order.Direction.DESC")
+ self.consume_token(TokenType.ORDERBY_DESC)
+ order_specs.append(order.get_message())
+ return order_specs
+
+ def parse_expr_list(self):
+ expr_list = []
+ first = True
+ while self.pos < len(self.tokens):
+ if not first:
+ self.consume_token(TokenType.COMMA)
+ first = False
+ expr_list.append(self._expr().get_message())
+ return expr_list
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/helpers.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/helpers.py
new file mode 100644
index 0000000000000000000000000000000000000000..8f4cabe57d0d2fb6a30d5ff6a4825d55e6874fda
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/helpers.py
@@ -0,0 +1,213 @@
+# Copyright (c) 2017, 2019, Oracle and/or its affiliates. All rights reserved.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""This module contains helper functions."""
+
+import binascii
+import decimal
+import functools
+import inspect
+import warnings
+
+from .constants import TLS_CIPHER_SUITES, TLS_VERSIONS
+from .errors import InterfaceError
+
+
+BYTE_TYPES = (bytearray, bytes,)
+NUMERIC_TYPES = (int, float, decimal.Decimal,)
+
+
+def encode_to_bytes(value, encoding="utf-8"):
+ """Returns an encoded version of the string as a bytes object.
+
+ Args:
+ encoding (str): The encoding.
+
+ Resturns:
+ bytes: The encoded version of the string as a bytes object.
+ """
+ return value if isinstance(value, bytes) else value.encode(encoding)
+
+
+def decode_from_bytes(value, encoding="utf-8"):
+ """Returns a string decoded from the given bytes.
+
+ Args:
+ value (bytes): The value to be decoded.
+ encoding (str): The encoding.
+
+ Returns:
+ str: The value decoded from bytes.
+ """
+ return value.decode(encoding) if isinstance(value, bytes) else value
+
+
+def get_item_or_attr(obj, key):
+ """Get item from dictionary or attribute from object.
+
+ Args:
+ obj (object): Dictionary or object.
+ key (str): Key.
+
+ Returns:
+ object: The object for the provided key.
+ """
+ return obj[key] if isinstance(obj, dict) else getattr(obj, key)
+
+
+def escape(*args):
+ """Escapes special characters as they are expected to be when MySQL
+ receives them.
+ As found in MySQL source mysys/charset.c
+
+ Args:
+ value (object): Value to be escaped.
+
+ Returns:
+ str: The value if not a string, or the escaped string.
+ """
+ def _escape(value):
+ """Escapes special characters."""
+ if value is None:
+ return value
+ elif isinstance(value, NUMERIC_TYPES):
+ return value
+ if isinstance(value, (bytes, bytearray)):
+ value = value.replace(b'\\', b'\\\\')
+ value = value.replace(b'\n', b'\\n')
+ value = value.replace(b'\r', b'\\r')
+ value = value.replace(b'\047', b'\134\047') # single quotes
+ value = value.replace(b'\042', b'\134\042') # double quotes
+ value = value.replace(b'\032', b'\134\032') # for Win32
+ else:
+ value = value.replace('\\', '\\\\')
+ value = value.replace('\n', '\\n')
+ value = value.replace('\r', '\\r')
+ value = value.replace('\047', '\134\047') # single quotes
+ value = value.replace('\042', '\134\042') # double quotes
+ value = value.replace('\032', '\134\032') # for Win32
+ return value
+ if len(args) > 1:
+ return [_escape(arg) for arg in args]
+ return _escape(args[0])
+
+
+def quote_identifier(identifier, sql_mode=""):
+ """Quote the given identifier with backticks, converting backticks (`)
+ in the identifier name with the correct escape sequence (``) unless the
+ identifier is quoted (") as in sql_mode set to ANSI_QUOTES.
+
+ Args:
+ identifier (str): Identifier to quote.
+
+ Returns:
+ str: Returns string with the identifier quoted with backticks.
+ """
+ if sql_mode == "ANSI_QUOTES":
+ return '"{0}"'.format(identifier.replace('"', '""'))
+ return "`{0}`".format(identifier.replace("`", "``"))
+
+
+def deprecated(version=None, reason=None):
+ """This is a decorator used to mark functions as deprecated.
+
+ Args:
+ version (Optional[string]): Version when was deprecated.
+ reason (Optional[string]): Reason or extra information to be shown.
+
+ Usage:
+
+ .. code-block:: python
+
+ from mysqlx.helpers import deprecated
+
+ @deprecated('8.0.12', 'Please use other_function() instead')
+ def deprecated_function(x, y):
+ return x + y
+ """
+ def decorate(func):
+ """Decorate function."""
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ """Wrapper function.
+
+ Args:
+ *args: Variable length argument list.
+ **kwargs: Arbitrary keyword arguments.
+ """
+ message = ["'{}' is deprecated".format(func.__name__)]
+ if version:
+ message.append(" since version {}".format(version))
+ if reason:
+ message.append(". {}".format(reason))
+ frame = inspect.currentframe().f_back
+ warnings.warn_explicit("".join(message),
+ category=DeprecationWarning,
+ filename=inspect.getfile(frame.f_code),
+ lineno=frame.f_lineno)
+ return func(*args, **kwargs)
+ return wrapper
+ return decorate
+
+
+def iani_to_openssl_cs_name(tls_version, cipher_suites_names):
+ """Translates a cipher suites names list; from IANI names to OpenSSL names.
+
+ Args:
+ TLS_version (str): The TLS version to look at for a translation.
+ cipher_suite_names (list): A list of cipher suites names.
+ """
+ translated_names = []
+
+ cipher_suites = {}#TLS_CIPHER_SUITES[TLS_version]
+
+ # Find the previews TLS versions of the given on TLS_version
+ for index in range(TLS_VERSIONS.index(tls_version) + 1):
+ cipher_suites.update(TLS_CIPHER_SUITES[TLS_VERSIONS[index]])
+
+ for name in cipher_suites_names:
+ if "-" in name:
+ translated_names.append(name)
+ elif name in cipher_suites:
+ translated_names.append(cipher_suites[name])
+ else:
+ raise InterfaceError("The '{}' in cipher suites is not a valid "
+ "cipher suite".format(name))
+ return translated_names
+
+
+def hexlify(data):
+ """Return the hexadecimal representation of the binary data.
+
+ Args:
+ data (str): The binary data.
+
+ Returns:
+ bytes: The hexadecimal representation of data.
+ """
+ return binascii.hexlify(data).decode("utf-8")
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/locales/__init__.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/locales/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..be637639e9bb6521010d1d2db0470c1d538bda20
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/locales/__init__.py
@@ -0,0 +1,73 @@
+# Copyright (c) 2016, 2017, Oracle and/or its affiliates. All rights reserved.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Translations"""
+
+__all__ = ["get_client_error"]
+
+from .. import errorcode
+
+
+def get_client_error(error, language="eng"):
+ """Lookup client error
+
+ This function will lookup the client error message based on the given
+ error and return the error message. If the error was not found,
+ None will be returned.
+
+ Error can be either an integer or a string. For example:
+ error: 2000
+ error: CR_UNKNOWN_ERROR
+
+ The language attribute can be used to retrieve a localized message, when
+ available.
+
+ Returns a string or None.
+ """
+ try:
+ tmp = __import__("mysqlx.locales.{0}".format(language),
+ globals(), locals(), ["client_error"])
+ except ImportError:
+ raise ImportError("No localization support for language '{0}'"
+ "".format(language))
+ client_error = tmp.client_error
+
+ if isinstance(error, int):
+ errno = error
+ for key, value in errorcode.__dict__.items():
+ if value == errno:
+ error = key
+ break
+
+ if isinstance(error, (str)):
+ try:
+ return getattr(client_error, error)
+ except AttributeError:
+ return None
+
+ raise ValueError("Error argument needs to be either an integer or string")
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/locales/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/locales/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..df17c815b72fdcb08f6edbbc2b36558b64b7af17
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/locales/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/locales/eng/__init__.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/locales/eng/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/locales/eng/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/locales/eng/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..440b1c2ecf962fd593131e11740970dc7517facb
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/locales/eng/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/locales/eng/__pycache__/client_error.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/locales/eng/__pycache__/client_error.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1582a08885d91fff45007a073baa2396d5fed4d4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/locales/eng/__pycache__/client_error.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/locales/eng/client_error.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/locales/eng/client_error.py
new file mode 100644
index 0000000000000000000000000000000000000000..5927e950b7801b9cc69c488e76d32bf26d4fcfd2
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/locales/eng/client_error.py
@@ -0,0 +1,110 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2013, 2021, Oracle and/or its affiliates. All rights reserved.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+# This file was auto-generated.
+_GENERATED_ON = '2021-08-11'
+_MYSQL_VERSION = (8, 0, 27)
+
+# Start MySQL Error messages
+CR_UNKNOWN_ERROR = u"Unknown MySQL error"
+CR_SOCKET_CREATE_ERROR = u"Can't create UNIX socket (%s)"
+CR_CONNECTION_ERROR = u"Can't connect to local MySQL server through socket '%-.100s' (%s)"
+CR_CONN_HOST_ERROR = u"Can't connect to MySQL server on '%-.100s:%u' (%s)"
+CR_IPSOCK_ERROR = u"Can't create TCP/IP socket (%s)"
+CR_UNKNOWN_HOST = u"Unknown MySQL server host '%-.100s' (%s)"
+CR_SERVER_GONE_ERROR = u"MySQL server has gone away"
+CR_VERSION_ERROR = u"Protocol mismatch; server version = %s, client version = %s"
+CR_OUT_OF_MEMORY = u"MySQL client ran out of memory"
+CR_WRONG_HOST_INFO = u"Wrong host info"
+CR_LOCALHOST_CONNECTION = u"Localhost via UNIX socket"
+CR_TCP_CONNECTION = u"%-.100s via TCP/IP"
+CR_SERVER_HANDSHAKE_ERR = u"Error in server handshake"
+CR_SERVER_LOST = u"Lost connection to MySQL server during query"
+CR_COMMANDS_OUT_OF_SYNC = u"Commands out of sync; you can't run this command now"
+CR_NAMEDPIPE_CONNECTION = u"Named pipe: %-.32s"
+CR_NAMEDPIPEWAIT_ERROR = u"Can't wait for named pipe to host: %-.64s pipe: %-.32s (%s)"
+CR_NAMEDPIPEOPEN_ERROR = u"Can't open named pipe to host: %-.64s pipe: %-.32s (%s)"
+CR_NAMEDPIPESETSTATE_ERROR = u"Can't set state of named pipe to host: %-.64s pipe: %-.32s (%s)"
+CR_CANT_READ_CHARSET = u"Can't initialize character set %-.32s (path: %-.100s)"
+CR_NET_PACKET_TOO_LARGE = u"Got packet bigger than 'max_allowed_packet' bytes"
+CR_EMBEDDED_CONNECTION = u"Embedded server"
+CR_PROBE_SLAVE_STATUS = u"Error on SHOW SLAVE STATUS:"
+CR_PROBE_SLAVE_HOSTS = u"Error on SHOW SLAVE HOSTS:"
+CR_PROBE_SLAVE_CONNECT = u"Error connecting to slave:"
+CR_PROBE_MASTER_CONNECT = u"Error connecting to master:"
+CR_SSL_CONNECTION_ERROR = u"SSL connection error: %-.100s"
+CR_MALFORMED_PACKET = u"Malformed packet"
+CR_WRONG_LICENSE = u"This client library is licensed only for use with MySQL servers having '%s' license"
+CR_NULL_POINTER = u"Invalid use of null pointer"
+CR_NO_PREPARE_STMT = u"Statement not prepared"
+CR_PARAMS_NOT_BOUND = u"No data supplied for parameters in prepared statement"
+CR_DATA_TRUNCATED = u"Data truncated"
+CR_NO_PARAMETERS_EXISTS = u"No parameters exist in the statement"
+CR_INVALID_PARAMETER_NO = u"Invalid parameter number"
+CR_INVALID_BUFFER_USE = u"Can't send long data for non-string/non-binary data types (parameter: %s)"
+CR_UNSUPPORTED_PARAM_TYPE = u"Using unsupported buffer type: %s (parameter: %s)"
+CR_SHARED_MEMORY_CONNECTION = u"Shared memory: %-.100s"
+CR_SHARED_MEMORY_CONNECT_REQUEST_ERROR = u"Can't open shared memory; client could not create request event (%s)"
+CR_SHARED_MEMORY_CONNECT_ANSWER_ERROR = u"Can't open shared memory; no answer event received from server (%s)"
+CR_SHARED_MEMORY_CONNECT_FILE_MAP_ERROR = u"Can't open shared memory; server could not allocate file mapping (%s)"
+CR_SHARED_MEMORY_CONNECT_MAP_ERROR = u"Can't open shared memory; server could not get pointer to file mapping (%s)"
+CR_SHARED_MEMORY_FILE_MAP_ERROR = u"Can't open shared memory; client could not allocate file mapping (%s)"
+CR_SHARED_MEMORY_MAP_ERROR = u"Can't open shared memory; client could not get pointer to file mapping (%s)"
+CR_SHARED_MEMORY_EVENT_ERROR = u"Can't open shared memory; client could not create %s event (%s)"
+CR_SHARED_MEMORY_CONNECT_ABANDONED_ERROR = u"Can't open shared memory; no answer from server (%s)"
+CR_SHARED_MEMORY_CONNECT_SET_ERROR = u"Can't open shared memory; cannot send request event to server (%s)"
+CR_CONN_UNKNOW_PROTOCOL = u"Wrong or unknown protocol"
+CR_INVALID_CONN_HANDLE = u"Invalid connection handle"
+CR_UNUSED_1 = u"Connection using old (pre-4.1.1) authentication protocol refused (client option 'secure_auth' enabled)"
+CR_FETCH_CANCELED = u"Row retrieval was canceled by mysql_stmt_close() call"
+CR_NO_DATA = u"Attempt to read column without prior row fetch"
+CR_NO_STMT_METADATA = u"Prepared statement contains no metadata"
+CR_NO_RESULT_SET = u"Attempt to read a row while there is no result set associated with the statement"
+CR_NOT_IMPLEMENTED = u"This feature is not implemented yet"
+CR_SERVER_LOST_EXTENDED = u"Lost connection to MySQL server at '%s', system error: %s"
+CR_STMT_CLOSED = u"Statement closed indirectly because of a preceding %s() call"
+CR_NEW_STMT_METADATA = u"The number of columns in the result set differs from the number of bound buffers. You must reset the statement, rebind the result set columns, and execute the statement again"
+CR_ALREADY_CONNECTED = u"This handle is already connected. Use a separate handle for each connection."
+CR_AUTH_PLUGIN_CANNOT_LOAD = u"Authentication plugin '%s' cannot be loaded: %s"
+CR_DUPLICATE_CONNECTION_ATTR = u"There is an attribute with the same name already"
+CR_AUTH_PLUGIN_ERR = u"Authentication plugin '%s' reported error: %s"
+CR_INSECURE_API_ERR = u"Insecure API function call: '%s' Use instead: '%s'"
+CR_FILE_NAME_TOO_LONG = u"File name is too long"
+CR_SSL_FIPS_MODE_ERR = u"Set FIPS mode ON/STRICT failed"
+CR_DEPRECATED_COMPRESSION_NOT_SUPPORTED = u"Compression protocol not supported with asynchronous protocol"
+CR_COMPRESSION_WRONGLY_CONFIGURED = u"Connection failed due to wrongly configured compression algorithm"
+CR_KERBEROS_USER_NOT_FOUND = u"SSO user not found, Please perform SSO authentication using kerberos."
+CR_LOAD_DATA_LOCAL_INFILE_REJECTED = u"LOAD DATA LOCAL INFILE file request rejected due to restrictions on access."
+CR_LOAD_DATA_LOCAL_INFILE_REALPATH_FAIL = u"Determining the real path for '%s' failed with error (%s): %s"
+CR_DNS_SRV_LOOKUP_FAILED = u"DNS SRV lookup failed with error : %s"
+CR_MANDATORY_TRACKER_NOT_FOUND = u"Client does not recognise tracker type %s marked as mandatory by server."
+CR_INVALID_FACTOR_NO = u"Invalid first argument for MYSQL_OPT_USER_PASSWORD option. Valid value should be between 1 and 3 inclusive."
+# End MySQL Error messages
+
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__init__.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..67dcf8de566a2514ea73afcc66645cff4b756388
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__init__.py
@@ -0,0 +1,480 @@
+# Copyright (c) 2017, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""This module contains the implementation of a helper class for MySQL X
+Protobuf messages."""
+
+try:
+ ModuleNotFoundError
+except NameError:
+ ModuleNotFoundError = ImportError
+
+_SERVER_MESSAGES_TUPLES = (
+ ("Mysqlx.ServerMessages.Type.OK",
+ "Mysqlx.Ok"),
+ ("Mysqlx.ServerMessages.Type.ERROR",
+ "Mysqlx.Error"),
+ ("Mysqlx.ServerMessages.Type.CONN_CAPABILITIES",
+ "Mysqlx.Connection.Capabilities"),
+ ("Mysqlx.ServerMessages.Type.SESS_AUTHENTICATE_CONTINUE",
+ "Mysqlx.Session.AuthenticateContinue"),
+ ("Mysqlx.ServerMessages.Type.SESS_AUTHENTICATE_OK",
+ "Mysqlx.Session.AuthenticateOk"),
+ ("Mysqlx.ServerMessages.Type.NOTICE",
+ "Mysqlx.Notice.Frame"),
+ ("Mysqlx.ServerMessages.Type.RESULTSET_COLUMN_META_DATA",
+ "Mysqlx.Resultset.ColumnMetaData"),
+ ("Mysqlx.ServerMessages.Type.RESULTSET_ROW",
+ "Mysqlx.Resultset.Row"),
+ ("Mysqlx.ServerMessages.Type.RESULTSET_FETCH_DONE",
+ "Mysqlx.Resultset.FetchDone"),
+ ("Mysqlx.ServerMessages.Type.RESULTSET_FETCH_SUSPENDED",
+ "Mysqlx.Resultset.FetchSuspended"),
+ ("Mysqlx.ServerMessages.Type.RESULTSET_FETCH_DONE_MORE_RESULTSETS",
+ "Mysqlx.Resultset.FetchDoneMoreResultsets"),
+ ("Mysqlx.ServerMessages.Type.SQL_STMT_EXECUTE_OK",
+ "Mysqlx.Sql.StmtExecuteOk"),
+ ("Mysqlx.ServerMessages.Type.RESULTSET_FETCH_DONE_MORE_OUT_PARAMS",
+ "Mysqlx.Resultset.FetchDoneMoreOutParams"),
+ ("Mysqlx.ServerMessages.Type.COMPRESSION",
+ "Mysqlx.Connection.Compression"),
+)
+
+PROTOBUF_VERSION = None
+PROTOBUF_REPEATED_TYPES = [list]
+
+try:
+ import _mysqlxpb
+ SERVER_MESSAGES = dict([(int(_mysqlxpb.enum_value(key)), val)
+ for key, val in _SERVER_MESSAGES_TUPLES])
+ HAVE_MYSQLXPB_CEXT = True
+except ImportError:
+ HAVE_MYSQLXPB_CEXT = False
+
+from ..helpers import BYTE_TYPES, NUMERIC_TYPES, encode_to_bytes
+
+try:
+ from google import protobuf
+ from google.protobuf import descriptor_database
+ from google.protobuf import descriptor_pb2
+ from google.protobuf import descriptor_pool
+ from google.protobuf import message_factory
+ from google.protobuf.internal.containers import (
+ RepeatedCompositeFieldContainer)
+ try:
+ from google.protobuf.pyext._message import (
+ RepeatedCompositeContainer)
+ PROTOBUF_REPEATED_TYPES.append(RepeatedCompositeContainer)
+ except ImportError:
+ pass
+
+ PROTOBUF_REPEATED_TYPES.append(RepeatedCompositeFieldContainer)
+ if hasattr(protobuf, "__version__"):
+ # Only Protobuf versions >=3.0.0 provide `__version__`
+ PROTOBUF_VERSION = protobuf.__version__
+
+ from . import mysqlx_connection_pb2
+ from . import mysqlx_crud_pb2
+ from . import mysqlx_cursor_pb2
+ from . import mysqlx_datatypes_pb2
+ from . import mysqlx_expect_pb2
+ from . import mysqlx_expr_pb2
+ from . import mysqlx_notice_pb2
+ from . import mysqlx_pb2
+ from . import mysqlx_prepare_pb2
+ from . import mysqlx_resultset_pb2
+ from . import mysqlx_session_pb2
+ from . import mysqlx_sql_pb2
+
+ # Dictionary with all messages descriptors
+ _MESSAGES = {}
+
+ # Mysqlx
+ for key, val in mysqlx_pb2.ClientMessages.Type.items():
+ _MESSAGES["Mysqlx.ClientMessages.Type.{0}".format(key)] = val
+ for key, val in mysqlx_pb2.ServerMessages.Type.items():
+ _MESSAGES["Mysqlx.ServerMessages.Type.{0}".format(key)] = val
+ for key, val in mysqlx_pb2.Error.Severity.items():
+ _MESSAGES["Mysqlx.Error.Severity.{0}".format(key)] = val
+
+ # Mysqlx.Crud
+ for key, val in mysqlx_crud_pb2.DataModel.items():
+ _MESSAGES["Mysqlx.Crud.DataModel.{0}".format(key)] = val
+ for key, val in mysqlx_crud_pb2.Find.RowLock.items():
+ _MESSAGES["Mysqlx.Crud.Find.RowLock.{0}".format(key)] = val
+ for key, val in mysqlx_crud_pb2.Order.Direction.items():
+ _MESSAGES["Mysqlx.Crud.Order.Direction.{0}".format(key)] = val
+ for key, val in mysqlx_crud_pb2.UpdateOperation.UpdateType.items():
+ _MESSAGES["Mysqlx.Crud.UpdateOperation.UpdateType.{0}".format(key)] = val
+
+ # Mysqlx.Datatypes
+ for key, val in mysqlx_datatypes_pb2.Scalar.Type.items():
+ _MESSAGES["Mysqlx.Datatypes.Scalar.Type.{0}".format(key)] = val
+ for key, val in mysqlx_datatypes_pb2.Any.Type.items():
+ _MESSAGES["Mysqlx.Datatypes.Any.Type.{0}".format(key)] = val
+
+ # Mysqlx.Expect
+ for key, val in mysqlx_expect_pb2.Open.Condition.ConditionOperation.items():
+ _MESSAGES["Mysqlx.Expect.Open.Condition.ConditionOperation.{0}"
+ "".format(key)] = val
+ for key, val in mysqlx_expect_pb2.Open.Condition.Key.items():
+ _MESSAGES["Mysqlx.Expect.Open.Condition.Key.{0}"
+ "".format(key)] = val
+ for key, val in mysqlx_expect_pb2.Open.CtxOperation.items():
+ _MESSAGES["Mysqlx.Expect.Open.CtxOperation.{0}".format(key)] = val
+
+ # Mysqlx.Expr
+ for key, val in mysqlx_expr_pb2.Expr.Type.items():
+ _MESSAGES["Mysqlx.Expr.Expr.Type.{0}".format(key)] = val
+ for key, val in mysqlx_expr_pb2.DocumentPathItem.Type.items():
+ _MESSAGES["Mysqlx.Expr.DocumentPathItem.Type.{0}".format(key)] = val
+
+ # Mysqlx.Notice
+ for key, val in mysqlx_notice_pb2.Frame.Scope.items():
+ _MESSAGES["Mysqlx.Notice.Frame.Scope.{0}".format(key)] = val
+ for key, val in mysqlx_notice_pb2.Warning.Level.items():
+ _MESSAGES["Mysqlx.Notice.Warning.Level.{0}".format(key)] = val
+ for key, val in mysqlx_notice_pb2.SessionStateChanged.Parameter.items():
+ _MESSAGES["Mysqlx.Notice.SessionStateChanged.Parameter.{0}"
+ "".format(key)] = val
+
+ # Mysql.Prepare
+ for key, val in mysqlx_prepare_pb2.Prepare.OneOfMessage.Type.items():
+ _MESSAGES["Mysqlx.Prepare.Prepare.OneOfMessage.Type.{0}"
+ "".format(key)] = val
+
+ # Mysql.Resultset
+ for key, val in mysqlx_resultset_pb2.ColumnMetaData.FieldType.items():
+ _MESSAGES["Mysqlx.Resultset.ColumnMetaData.FieldType.{0}".format(key)] = val
+
+ # Add messages to the descriptor pool
+ _DESCRIPTOR_DB = descriptor_database.DescriptorDatabase()
+ _DESCRIPTOR_POOL = descriptor_pool.DescriptorPool(_DESCRIPTOR_DB)
+
+ _DESCRIPTOR_DB.Add(descriptor_pb2.FileDescriptorProto.FromString(
+ mysqlx_connection_pb2.DESCRIPTOR.serialized_pb))
+ _DESCRIPTOR_DB.Add(descriptor_pb2.FileDescriptorProto.FromString(
+ mysqlx_crud_pb2.DESCRIPTOR.serialized_pb))
+ _DESCRIPTOR_DB.Add(descriptor_pb2.FileDescriptorProto.FromString(
+ mysqlx_cursor_pb2.DESCRIPTOR.serialized_pb))
+ _DESCRIPTOR_DB.Add(descriptor_pb2.FileDescriptorProto.FromString(
+ mysqlx_datatypes_pb2.DESCRIPTOR.serialized_pb))
+ _DESCRIPTOR_DB.Add(descriptor_pb2.FileDescriptorProto.FromString(
+ mysqlx_expect_pb2.DESCRIPTOR.serialized_pb))
+ _DESCRIPTOR_DB.Add(descriptor_pb2.FileDescriptorProto.FromString(
+ mysqlx_expr_pb2.DESCRIPTOR.serialized_pb))
+ _DESCRIPTOR_DB.Add(descriptor_pb2.FileDescriptorProto.FromString(
+ mysqlx_notice_pb2.DESCRIPTOR.serialized_pb))
+ _DESCRIPTOR_DB.Add(descriptor_pb2.FileDescriptorProto.FromString(
+ mysqlx_pb2.DESCRIPTOR.serialized_pb))
+ _DESCRIPTOR_DB.Add(descriptor_pb2.FileDescriptorProto.FromString(
+ mysqlx_prepare_pb2.DESCRIPTOR.serialized_pb))
+ _DESCRIPTOR_DB.Add(descriptor_pb2.FileDescriptorProto.FromString(
+ mysqlx_resultset_pb2.DESCRIPTOR.serialized_pb))
+ _DESCRIPTOR_DB.Add(descriptor_pb2.FileDescriptorProto.FromString(
+ mysqlx_session_pb2.DESCRIPTOR.serialized_pb))
+ _DESCRIPTOR_DB.Add(descriptor_pb2.FileDescriptorProto.FromString(
+ mysqlx_sql_pb2.DESCRIPTOR.serialized_pb))
+
+ SERVER_MESSAGES = dict(
+ [(_MESSAGES[key], val) for key, val in _SERVER_MESSAGES_TUPLES]
+ )
+ HAVE_PROTOBUF = True
+ HAVE_PROTOBUF_ERROR = None
+
+ class _mysqlxpb_pure(object):
+ """This class implements the methods in pure Python used by the
+ _mysqlxpb C++ extension."""
+
+ factory = message_factory.MessageFactory()
+
+ @staticmethod
+ def new_message(name):
+ cls = _mysqlxpb_pure.factory.GetPrototype(
+ _DESCRIPTOR_POOL.FindMessageTypeByName(name))
+ return cls()
+
+ @staticmethod
+ def enum_value(key):
+ return _MESSAGES[key]
+
+ @staticmethod
+ def serialize_message(msg):
+ return msg.SerializeToString()
+
+ @staticmethod
+ def serialize_partial_message(msg):
+ return msg.SerializePartialToString()
+
+ @staticmethod
+ def parse_message(msg_type_name, payload):
+ msg = _mysqlxpb_pure.new_message(msg_type_name)
+ msg.ParseFromString(payload)
+ return msg
+
+ @staticmethod
+ def parse_server_message(msg_type, payload):
+ msg_type_name = SERVER_MESSAGES.get(msg_type)
+ if not msg_type_name:
+ raise ValueError("Unknown msg_type: {0}".format(msg_type))
+ msg = _mysqlxpb_pure.new_message(msg_type_name)
+ msg.ParseFromString(payload)
+ return msg
+except (ImportError, ModuleNotFoundError, SyntaxError, TypeError) as err:
+ HAVE_PROTOBUF = False
+ HAVE_PROTOBUF_ERROR = err if PROTOBUF_VERSION is not None \
+ else "Protobuf >=3.0.0 is required"
+ if not HAVE_MYSQLXPB_CEXT:
+ raise ImportError("Protobuf is not available: {}"
+ "".format(HAVE_PROTOBUF_ERROR))
+
+CRUD_PREPARE_MAPPING = {
+ "Mysqlx.ClientMessages.Type.CRUD_FIND": (
+ "Mysqlx.Prepare.Prepare.OneOfMessage.Type.FIND", "find"),
+ "Mysqlx.ClientMessages.Type.CRUD_INSERT": (
+ "Mysqlx.Prepare.Prepare.OneOfMessage.Type.INSERT", "insert"),
+ "Mysqlx.ClientMessages.Type.CRUD_UPDATE": (
+ "Mysqlx.Prepare.Prepare.OneOfMessage.Type.UPDATE", "update"),
+ "Mysqlx.ClientMessages.Type.CRUD_DELETE": (
+ "Mysqlx.Prepare.Prepare.OneOfMessage.Type.DELETE", "delete"),
+ "Mysqlx.ClientMessages.Type.SQL_STMT_EXECUTE": (
+ "Mysqlx.Prepare.Prepare.OneOfMessage.Type.STMT", "stmt_execute")
+}
+
+
+class Protobuf(object):
+ """Protobuf class acts as a container of the Protobuf message class.
+ It allows the switch between the C extension and pure Python implementation
+ message handlers, by patching the `mysqlxpb` class attribute.
+ """
+ mysqlxpb = _mysqlxpb if HAVE_MYSQLXPB_CEXT else _mysqlxpb_pure
+ use_pure = False if HAVE_MYSQLXPB_CEXT else True
+
+ @staticmethod
+ def set_use_pure(use_pure):
+ """Sets whether to use the C extension or pure Python implementation.
+
+ Args:
+ use_pure (bool): `True` to use pure Python implementation.
+ """
+ if use_pure and not HAVE_PROTOBUF:
+ raise ImportError("Protobuf is not available: {}"
+ "".format(HAVE_PROTOBUF_ERROR))
+ elif not use_pure and not HAVE_MYSQLXPB_CEXT:
+ raise ImportError("MySQL X Protobuf C extension is not available")
+ Protobuf.mysqlxpb = _mysqlxpb_pure if use_pure else _mysqlxpb
+ Protobuf.use_pure = use_pure
+
+
+class Message(object):
+ """Helper class for interfacing with the MySQL X Protobuf extension.
+
+ Args:
+ msg_type_name (string): Protobuf type name.
+ **kwargs: Arbitrary keyword arguments with values for the message.
+ """
+ def __init__(self, msg_type_name=None, **kwargs):
+ self.__dict__["_msg"] = Protobuf.mysqlxpb.new_message(msg_type_name) \
+ if msg_type_name else None
+ for key, value in kwargs.items():
+ self.__setattr__(key, value)
+
+ def __setattr__(self, name, value):
+ if Protobuf.use_pure:
+ if isinstance(value, str):
+ setattr(self._msg, name, encode_to_bytes(value))
+ elif isinstance(value, (NUMERIC_TYPES, BYTE_TYPES)):
+ setattr(self._msg, name, value)
+ elif isinstance(value, list):
+ getattr(self._msg, name).extend(value)
+ elif isinstance(value, Message):
+ getattr(self._msg, name).MergeFrom(value.get_message())
+ else:
+ getattr(self._msg, name).MergeFrom(value)
+ else:
+ self._msg[name] = value.get_message() \
+ if isinstance(value, Message) else value
+
+ def __getattr__(self, name):
+ try:
+ return self._msg[name] if not Protobuf.use_pure \
+ else getattr(self._msg, name)
+ except KeyError:
+ raise AttributeError
+
+ def __setitem__(self, name, value):
+ self.__setattr__(name, value)
+
+ def __getitem__(self, name):
+ return self.__getattr__(name)
+
+ def get(self, name, default=None):
+ """Returns the value of an element of the message dictionary.
+
+ Args:
+ name (string): Key name.
+ default (object): The default value if the key does not exists.
+
+ Returns:
+ object: The value of the provided key name.
+ """
+ return self.__dict__["_msg"].get(name, default) \
+ if not Protobuf.use_pure \
+ else getattr(self.__dict__["_msg"], name, default)
+
+ def set_message(self, msg):
+ """Sets the message.
+
+ Args:
+ msg (dict): Dictionary representing a message.
+ """
+ self.__dict__["_msg"] = msg
+
+ def get_message(self):
+ """Returns the dictionary representing a message containing parsed
+ data.
+
+ Returns:
+ dict: The dictionary representing a message containing parsed data.
+ """
+ return self.__dict__["_msg"]
+
+ def serialize_to_string(self):
+ """Serializes a message to a string.
+
+ Returns:
+ str: A string representing a message containing parsed data.
+ """
+ return Protobuf.mysqlxpb.serialize_message(self._msg)
+
+ def serialize_partial_to_string(self):
+ """Serializes the protocol message to a binary string.
+
+ This method is similar to serialize_to_string but doesn't check if the
+ message is initialized.
+
+ Returns:
+ str: A string representation of the partial message.
+ """
+ return Protobuf.mysqlxpb.serialize_partial_message(self._msg)
+
+ @property
+ def type(self):
+ """string: Message type name."""
+ return self._msg["_mysqlxpb_type_name"] if not Protobuf.use_pure \
+ else self._msg.DESCRIPTOR.full_name
+
+ @staticmethod
+ def parse(msg_type_name, payload):
+ """Creates a new message, initialized with parsed data.
+
+ Args:
+ msg_type_name (string): Message type name.
+ payload (string): Serialized message data.
+
+ Returns:
+ dict: The dictionary representing a message containing parsed data.
+
+ .. versionadded:: 8.0.21
+ """
+ return Protobuf.mysqlxpb.parse_message(msg_type_name, payload)
+
+ @staticmethod
+ def byte_size(msg):
+ """Returns the size of the message in bytes.
+
+ Args:
+ msg (mysqlx.protobuf.Message): MySQL X Protobuf Message.
+
+ Returns:
+ int: Size of the message in bytes.
+
+ .. versionadded:: 8.0.21
+ """
+ return msg.ByteSize() if Protobuf.use_pure \
+ else len(encode_to_bytes(msg.serialize_to_string()))
+
+ @staticmethod
+ def parse_from_server(msg_type, payload):
+ """Creates a new server-side message, initialized with parsed data.
+
+ Args:
+ msg_type (int): Message type.
+ payload (string): Serialized message data.
+
+ Returns:
+ dict: The dictionary representing a message containing parsed data.
+ """
+ return Protobuf.mysqlxpb.parse_server_message(msg_type, payload)
+
+ @classmethod
+ def from_message(cls, msg_type_name, payload):
+ """Creates a new message, initialized with parsed data and returns a
+ :class:`mysqlx.protobuf.Message` object.
+
+ Args:
+ msg_type_name (string): Message type name.
+ payload (string): Serialized message data.
+
+ Returns:
+ mysqlx.protobuf.Message: The Message representing a message
+ containing parsed data.
+ """
+ msg = cls()
+ msg.set_message(Protobuf.mysqlxpb.parse_message(msg_type_name, payload))
+ return msg
+
+ @classmethod
+ def from_server_message(cls, msg_type, payload):
+ """Creates a new server-side message, initialized with parsed data and
+ returns a :class:`mysqlx.protobuf.Message` object.
+
+ Args:
+ msg_type (int): Message type.
+ payload (string): Serialized message data.
+
+ Returns:
+ mysqlx.protobuf.Message: The Message representing a message
+ containing parsed data.
+ """
+ msg = cls()
+ msg.set_message(
+ Protobuf.mysqlxpb.parse_server_message(msg_type, payload))
+ return msg
+
+
+def mysqlxpb_enum(name):
+ """Returns the value of a MySQL X Protobuf enumerator.
+
+ Args:
+ name (string): MySQL X Protobuf numerator name.
+
+ Returns:
+ int: Value of the enumerator.
+ """
+ return Protobuf.mysqlxpb.enum_value(name)
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ec008435b78f8f86237fc69f0b660d254cb73121
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_connection_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_connection_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..98fecde32dbc237d026de49455d768bc346b66f5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_connection_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_crud_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_crud_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b539bfda01e7f029f4af4448ffe8e378ba5b6998
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_crud_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_cursor_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_cursor_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..80c3b63f0a581c4f4f70937f83057746fdb47acb
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_cursor_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_datatypes_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_datatypes_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4b3b06441c3e6c2472e960a5965e4cdca175d72d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_datatypes_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_expect_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_expect_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0705cee4f6a5e49dd0177a32056272a965eb5894
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_expect_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_expr_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_expr_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..46a59253bab2f7cd5e46e7c6b5516974ad515207
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_expr_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_notice_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_notice_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cea02f1fac255ffe078f8520bd675bb0478b8289
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_notice_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6588b78c686ff53aca2e445afcd6878ad6b0a009
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_prepare_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_prepare_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..29a9f2c5b8d08e44491808f39e56a3af34c71973
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_prepare_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_resultset_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_resultset_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a543d9e1e0974cbe8c565a50308c74f3bdcfbce2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_resultset_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_session_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_session_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..94e78ab073d19870bfe909ff1e4f0daebff3e1bc
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_session_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_sql_pb2.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_sql_pb2.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6f9798194138ab41ab7a38b5ff9f7916b474664f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/__pycache__/mysqlx_sql_pb2.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_connection_pb2.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_connection_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..0ec02820ea7935ead2604ff4969a410e4e6a0603
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_connection_pb2.py
@@ -0,0 +1,316 @@
+# Copyright (c) 2017, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: mysqlx_connection.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from mysqlx.protobuf import mysqlx_datatypes_pb2 as mysqlx__datatypes__pb2
+from mysqlx.protobuf import mysqlx_pb2 as mysqlx__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='mysqlx_connection.proto',
+ package='Mysqlx.Connection',
+ syntax='proto2',
+ serialized_pb=_b('\n\x17mysqlx_connection.proto\x12\x11Mysqlx.Connection\x1a\x16mysqlx_datatypes.proto\x1a\x0cmysqlx.proto\"@\n\nCapability\x12\x0c\n\x04name\x18\x01 \x02(\t\x12$\n\x05value\x18\x02 \x02(\x0b\x32\x15.Mysqlx.Datatypes.Any\"C\n\x0c\x43\x61pabilities\x12\x33\n\x0c\x63\x61pabilities\x18\x01 \x03(\x0b\x32\x1d.Mysqlx.Connection.Capability\"\x11\n\x0f\x43\x61pabilitiesGet\"H\n\x0f\x43\x61pabilitiesSet\x12\x35\n\x0c\x63\x61pabilities\x18\x01 \x02(\x0b\x32\x1f.Mysqlx.Connection.Capabilities\"\x07\n\x05\x43lose\"\xa5\x01\n\x0b\x43ompression\x12\x19\n\x11uncompressed_size\x18\x01 \x01(\x04\x12\x34\n\x0fserver_messages\x18\x02 \x01(\x0e\x32\x1b.Mysqlx.ServerMessages.Type\x12\x34\n\x0f\x63lient_messages\x18\x03 \x01(\x0e\x32\x1b.Mysqlx.ClientMessages.Type\x12\x0f\n\x07payload\x18\x04 \x02(\x0c\x42\x1b\n\x17\x63om.mysql.cj.x.protobufH\x03')
+ ,
+ dependencies=[mysqlx__datatypes__pb2.DESCRIPTOR,mysqlx__pb2.DESCRIPTOR,])
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+
+_CAPABILITY = _descriptor.Descriptor(
+ name='Capability',
+ full_name='Mysqlx.Connection.Capability',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='Mysqlx.Connection.Capability.name', index=0,
+ number=1, type=9, cpp_type=9, label=2,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='Mysqlx.Connection.Capability.value', index=1,
+ number=2, type=11, cpp_type=10, label=2,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=84,
+ serialized_end=148,
+)
+
+
+_CAPABILITIES = _descriptor.Descriptor(
+ name='Capabilities',
+ full_name='Mysqlx.Connection.Capabilities',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='capabilities', full_name='Mysqlx.Connection.Capabilities.capabilities', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=150,
+ serialized_end=217,
+)
+
+
+_CAPABILITIESGET = _descriptor.Descriptor(
+ name='CapabilitiesGet',
+ full_name='Mysqlx.Connection.CapabilitiesGet',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=219,
+ serialized_end=236,
+)
+
+
+_CAPABILITIESSET = _descriptor.Descriptor(
+ name='CapabilitiesSet',
+ full_name='Mysqlx.Connection.CapabilitiesSet',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='capabilities', full_name='Mysqlx.Connection.CapabilitiesSet.capabilities', index=0,
+ number=1, type=11, cpp_type=10, label=2,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=238,
+ serialized_end=310,
+)
+
+
+_CLOSE = _descriptor.Descriptor(
+ name='Close',
+ full_name='Mysqlx.Connection.Close',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=312,
+ serialized_end=319,
+)
+
+
+_COMPRESSION = _descriptor.Descriptor(
+ name='Compression',
+ full_name='Mysqlx.Connection.Compression',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='uncompressed_size', full_name='Mysqlx.Connection.Compression.uncompressed_size', index=0,
+ number=1, type=4, cpp_type=4, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='server_messages', full_name='Mysqlx.Connection.Compression.server_messages', index=1,
+ number=2, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='client_messages', full_name='Mysqlx.Connection.Compression.client_messages', index=2,
+ number=3, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='payload', full_name='Mysqlx.Connection.Compression.payload', index=3,
+ number=4, type=12, cpp_type=9, label=2,
+ has_default_value=False, default_value=_b(""),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=322,
+ serialized_end=487,
+)
+
+_CAPABILITY.fields_by_name['value'].message_type = mysqlx__datatypes__pb2._ANY
+_CAPABILITIES.fields_by_name['capabilities'].message_type = _CAPABILITY
+_CAPABILITIESSET.fields_by_name['capabilities'].message_type = _CAPABILITIES
+_COMPRESSION.fields_by_name['server_messages'].enum_type = mysqlx__pb2._SERVERMESSAGES_TYPE
+_COMPRESSION.fields_by_name['client_messages'].enum_type = mysqlx__pb2._CLIENTMESSAGES_TYPE
+DESCRIPTOR.message_types_by_name['Capability'] = _CAPABILITY
+DESCRIPTOR.message_types_by_name['Capabilities'] = _CAPABILITIES
+DESCRIPTOR.message_types_by_name['CapabilitiesGet'] = _CAPABILITIESGET
+DESCRIPTOR.message_types_by_name['CapabilitiesSet'] = _CAPABILITIESSET
+DESCRIPTOR.message_types_by_name['Close'] = _CLOSE
+DESCRIPTOR.message_types_by_name['Compression'] = _COMPRESSION
+
+Capability = _reflection.GeneratedProtocolMessageType('Capability', (_message.Message,), dict(
+ DESCRIPTOR = _CAPABILITY,
+ __module__ = 'mysqlx_connection_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Connection.Capability)
+ ))
+_sym_db.RegisterMessage(Capability)
+
+Capabilities = _reflection.GeneratedProtocolMessageType('Capabilities', (_message.Message,), dict(
+ DESCRIPTOR = _CAPABILITIES,
+ __module__ = 'mysqlx_connection_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Connection.Capabilities)
+ ))
+_sym_db.RegisterMessage(Capabilities)
+
+CapabilitiesGet = _reflection.GeneratedProtocolMessageType('CapabilitiesGet', (_message.Message,), dict(
+ DESCRIPTOR = _CAPABILITIESGET,
+ __module__ = 'mysqlx_connection_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Connection.CapabilitiesGet)
+ ))
+_sym_db.RegisterMessage(CapabilitiesGet)
+
+CapabilitiesSet = _reflection.GeneratedProtocolMessageType('CapabilitiesSet', (_message.Message,), dict(
+ DESCRIPTOR = _CAPABILITIESSET,
+ __module__ = 'mysqlx_connection_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Connection.CapabilitiesSet)
+ ))
+_sym_db.RegisterMessage(CapabilitiesSet)
+
+Close = _reflection.GeneratedProtocolMessageType('Close', (_message.Message,), dict(
+ DESCRIPTOR = _CLOSE,
+ __module__ = 'mysqlx_connection_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Connection.Close)
+ ))
+_sym_db.RegisterMessage(Close)
+
+Compression = _reflection.GeneratedProtocolMessageType('Compression', (_message.Message,), dict(
+ DESCRIPTOR = _COMPRESSION,
+ __module__ = 'mysqlx_connection_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Connection.Compression)
+ ))
+_sym_db.RegisterMessage(Compression)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.mysql.cj.x.protobufH\003'))
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_crud_pb2.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_crud_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..a266be55a3162d86d90a09c45c56dcd3d95efa10
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_crud_pb2.py
@@ -0,0 +1,1295 @@
+# Copyright (c) 2017, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: mysqlx_crud.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from mysqlx.protobuf import mysqlx_expr_pb2 as mysqlx__expr__pb2
+from mysqlx.protobuf import mysqlx_datatypes_pb2 as mysqlx__datatypes__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='mysqlx_crud.proto',
+ package='Mysqlx.Crud',
+ syntax='proto2',
+ serialized_pb=_b('\n\x11mysqlx_crud.proto\x12\x0bMysqlx.Crud\x1a\x11mysqlx_expr.proto\x1a\x16mysqlx_datatypes.proto\"[\n\x06\x43olumn\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05\x61lias\x18\x02 \x01(\t\x12\x34\n\rdocument_path\x18\x03 \x03(\x0b\x32\x1d.Mysqlx.Expr.DocumentPathItem\">\n\nProjection\x12!\n\x06source\x18\x01 \x02(\x0b\x32\x11.Mysqlx.Expr.Expr\x12\r\n\x05\x61lias\x18\x02 \x01(\t\"*\n\nCollection\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\"*\n\x05Limit\x12\x11\n\trow_count\x18\x01 \x02(\x04\x12\x0e\n\x06offset\x18\x02 \x01(\x04\"T\n\tLimitExpr\x12$\n\trow_count\x18\x01 \x02(\x0b\x32\x11.Mysqlx.Expr.Expr\x12!\n\x06offset\x18\x02 \x01(\x0b\x32\x11.Mysqlx.Expr.Expr\"~\n\x05Order\x12\x1f\n\x04\x65xpr\x18\x01 \x02(\x0b\x32\x11.Mysqlx.Expr.Expr\x12\x34\n\tdirection\x18\x02 \x01(\x0e\x32\x1c.Mysqlx.Crud.Order.Direction:\x03\x41SC\"\x1e\n\tDirection\x12\x07\n\x03\x41SC\x10\x01\x12\x08\n\x04\x44\x45SC\x10\x02\"\xac\x02\n\x0fUpdateOperation\x12-\n\x06source\x18\x01 \x02(\x0b\x32\x1d.Mysqlx.Expr.ColumnIdentifier\x12:\n\toperation\x18\x02 \x02(\x0e\x32\'.Mysqlx.Crud.UpdateOperation.UpdateType\x12 \n\x05value\x18\x03 \x01(\x0b\x32\x11.Mysqlx.Expr.Expr\"\x8b\x01\n\nUpdateType\x12\x07\n\x03SET\x10\x01\x12\x0f\n\x0bITEM_REMOVE\x10\x02\x12\x0c\n\x08ITEM_SET\x10\x03\x12\x10\n\x0cITEM_REPLACE\x10\x04\x12\x0e\n\nITEM_MERGE\x10\x05\x12\x10\n\x0c\x41RRAY_INSERT\x10\x06\x12\x10\n\x0c\x41RRAY_APPEND\x10\x07\x12\x0f\n\x0bMERGE_PATCH\x10\x08\"\xe4\x04\n\x04\x46ind\x12+\n\ncollection\x18\x02 \x02(\x0b\x32\x17.Mysqlx.Crud.Collection\x12*\n\ndata_model\x18\x03 \x01(\x0e\x32\x16.Mysqlx.Crud.DataModel\x12+\n\nprojection\x18\x04 \x03(\x0b\x32\x17.Mysqlx.Crud.Projection\x12&\n\x04\x61rgs\x18\x0b \x03(\x0b\x32\x18.Mysqlx.Datatypes.Scalar\x12#\n\x08\x63riteria\x18\x05 \x01(\x0b\x32\x11.Mysqlx.Expr.Expr\x12!\n\x05limit\x18\x06 \x01(\x0b\x32\x12.Mysqlx.Crud.Limit\x12!\n\x05order\x18\x07 \x03(\x0b\x32\x12.Mysqlx.Crud.Order\x12#\n\x08grouping\x18\x08 \x03(\x0b\x32\x11.Mysqlx.Expr.Expr\x12,\n\x11grouping_criteria\x18\t \x01(\x0b\x32\x11.Mysqlx.Expr.Expr\x12*\n\x07locking\x18\x0c \x01(\x0e\x32\x19.Mysqlx.Crud.Find.RowLock\x12\x39\n\x0flocking_options\x18\r \x01(\x0e\x32 .Mysqlx.Crud.Find.RowLockOptions\x12*\n\nlimit_expr\x18\x0e \x01(\x0b\x32\x16.Mysqlx.Crud.LimitExpr\".\n\x07RowLock\x12\x0f\n\x0bSHARED_LOCK\x10\x01\x12\x12\n\x0e\x45XCLUSIVE_LOCK\x10\x02\"-\n\x0eRowLockOptions\x12\n\n\x06NOWAIT\x10\x01\x12\x0f\n\x0bSKIP_LOCKED\x10\x02\"\xa2\x02\n\x06Insert\x12+\n\ncollection\x18\x01 \x02(\x0b\x32\x17.Mysqlx.Crud.Collection\x12*\n\ndata_model\x18\x02 \x01(\x0e\x32\x16.Mysqlx.Crud.DataModel\x12\'\n\nprojection\x18\x03 \x03(\x0b\x32\x13.Mysqlx.Crud.Column\x12)\n\x03row\x18\x04 \x03(\x0b\x32\x1c.Mysqlx.Crud.Insert.TypedRow\x12&\n\x04\x61rgs\x18\x05 \x03(\x0b\x32\x18.Mysqlx.Datatypes.Scalar\x12\x15\n\x06upsert\x18\x06 \x01(\x08:\x05\x66\x61lse\x1a,\n\x08TypedRow\x12 \n\x05\x66ield\x18\x01 \x03(\x0b\x32\x11.Mysqlx.Expr.Expr\"\xd1\x02\n\x06Update\x12+\n\ncollection\x18\x02 \x02(\x0b\x32\x17.Mysqlx.Crud.Collection\x12*\n\ndata_model\x18\x03 \x01(\x0e\x32\x16.Mysqlx.Crud.DataModel\x12#\n\x08\x63riteria\x18\x04 \x01(\x0b\x32\x11.Mysqlx.Expr.Expr\x12!\n\x05limit\x18\x05 \x01(\x0b\x32\x12.Mysqlx.Crud.Limit\x12!\n\x05order\x18\x06 \x03(\x0b\x32\x12.Mysqlx.Crud.Order\x12/\n\toperation\x18\x07 \x03(\x0b\x32\x1c.Mysqlx.Crud.UpdateOperation\x12&\n\x04\x61rgs\x18\x08 \x03(\x0b\x32\x18.Mysqlx.Datatypes.Scalar\x12*\n\nlimit_expr\x18\t \x01(\x0b\x32\x16.Mysqlx.Crud.LimitExpr\"\xa0\x02\n\x06\x44\x65lete\x12+\n\ncollection\x18\x01 \x02(\x0b\x32\x17.Mysqlx.Crud.Collection\x12*\n\ndata_model\x18\x02 \x01(\x0e\x32\x16.Mysqlx.Crud.DataModel\x12#\n\x08\x63riteria\x18\x03 \x01(\x0b\x32\x11.Mysqlx.Expr.Expr\x12!\n\x05limit\x18\x04 \x01(\x0b\x32\x12.Mysqlx.Crud.Limit\x12!\n\x05order\x18\x05 \x03(\x0b\x32\x12.Mysqlx.Crud.Order\x12&\n\x04\x61rgs\x18\x06 \x03(\x0b\x32\x18.Mysqlx.Datatypes.Scalar\x12*\n\nlimit_expr\x18\x07 \x01(\x0b\x32\x16.Mysqlx.Crud.LimitExpr\"\xbc\x02\n\nCreateView\x12+\n\ncollection\x18\x01 \x02(\x0b\x32\x17.Mysqlx.Crud.Collection\x12\x0f\n\x07\x64\x65\x66iner\x18\x02 \x01(\t\x12\x38\n\talgorithm\x18\x03 \x01(\x0e\x32\x1a.Mysqlx.Crud.ViewAlgorithm:\tUNDEFINED\x12\x37\n\x08security\x18\x04 \x01(\x0e\x32\x1c.Mysqlx.Crud.ViewSqlSecurity:\x07\x44\x45\x46INER\x12+\n\x05\x63heck\x18\x05 \x01(\x0e\x32\x1c.Mysqlx.Crud.ViewCheckOption\x12\x0e\n\x06\x63olumn\x18\x06 \x03(\t\x12\x1f\n\x04stmt\x18\x07 \x02(\x0b\x32\x11.Mysqlx.Crud.Find\x12\x1f\n\x10replace_existing\x18\x08 \x01(\x08:\x05\x66\x61lse\"\x87\x02\n\nModifyView\x12+\n\ncollection\x18\x01 \x02(\x0b\x32\x17.Mysqlx.Crud.Collection\x12\x0f\n\x07\x64\x65\x66iner\x18\x02 \x01(\t\x12-\n\talgorithm\x18\x03 \x01(\x0e\x32\x1a.Mysqlx.Crud.ViewAlgorithm\x12.\n\x08security\x18\x04 \x01(\x0e\x32\x1c.Mysqlx.Crud.ViewSqlSecurity\x12+\n\x05\x63heck\x18\x05 \x01(\x0e\x32\x1c.Mysqlx.Crud.ViewCheckOption\x12\x0e\n\x06\x63olumn\x18\x06 \x03(\t\x12\x1f\n\x04stmt\x18\x07 \x01(\x0b\x32\x11.Mysqlx.Crud.Find\"Q\n\x08\x44ropView\x12+\n\ncollection\x18\x01 \x02(\x0b\x32\x17.Mysqlx.Crud.Collection\x12\x18\n\tif_exists\x18\x02 \x01(\x08:\x05\x66\x61lse*$\n\tDataModel\x12\x0c\n\x08\x44OCUMENT\x10\x01\x12\t\n\x05TABLE\x10\x02*8\n\rViewAlgorithm\x12\r\n\tUNDEFINED\x10\x01\x12\t\n\x05MERGE\x10\x02\x12\r\n\tTEMPTABLE\x10\x03*+\n\x0fViewSqlSecurity\x12\x0b\n\x07INVOKER\x10\x01\x12\x0b\n\x07\x44\x45\x46INER\x10\x02**\n\x0fViewCheckOption\x12\t\n\x05LOCAL\x10\x01\x12\x0c\n\x08\x43\x41SCADED\x10\x02\x42\x1b\n\x17\x63om.mysql.cj.x.protobufH\x03')
+ ,
+ dependencies=[mysqlx__expr__pb2.DESCRIPTOR,mysqlx__datatypes__pb2.DESCRIPTOR,])
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+_DATAMODEL = _descriptor.EnumDescriptor(
+ name='DataModel',
+ full_name='Mysqlx.Crud.DataModel',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='DOCUMENT', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='TABLE', index=1, number=2,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=3046,
+ serialized_end=3082,
+)
+_sym_db.RegisterEnumDescriptor(_DATAMODEL)
+
+DataModel = enum_type_wrapper.EnumTypeWrapper(_DATAMODEL)
+_VIEWALGORITHM = _descriptor.EnumDescriptor(
+ name='ViewAlgorithm',
+ full_name='Mysqlx.Crud.ViewAlgorithm',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='UNDEFINED', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='MERGE', index=1, number=2,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='TEMPTABLE', index=2, number=3,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=3084,
+ serialized_end=3140,
+)
+_sym_db.RegisterEnumDescriptor(_VIEWALGORITHM)
+
+ViewAlgorithm = enum_type_wrapper.EnumTypeWrapper(_VIEWALGORITHM)
+_VIEWSQLSECURITY = _descriptor.EnumDescriptor(
+ name='ViewSqlSecurity',
+ full_name='Mysqlx.Crud.ViewSqlSecurity',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='INVOKER', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='DEFINER', index=1, number=2,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=3142,
+ serialized_end=3185,
+)
+_sym_db.RegisterEnumDescriptor(_VIEWSQLSECURITY)
+
+ViewSqlSecurity = enum_type_wrapper.EnumTypeWrapper(_VIEWSQLSECURITY)
+_VIEWCHECKOPTION = _descriptor.EnumDescriptor(
+ name='ViewCheckOption',
+ full_name='Mysqlx.Crud.ViewCheckOption',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='LOCAL', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='CASCADED', index=1, number=2,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=3187,
+ serialized_end=3229,
+)
+_sym_db.RegisterEnumDescriptor(_VIEWCHECKOPTION)
+
+ViewCheckOption = enum_type_wrapper.EnumTypeWrapper(_VIEWCHECKOPTION)
+DOCUMENT = 1
+TABLE = 2
+UNDEFINED = 1
+MERGE = 2
+TEMPTABLE = 3
+INVOKER = 1
+DEFINER = 2
+LOCAL = 1
+CASCADED = 2
+
+
+_ORDER_DIRECTION = _descriptor.EnumDescriptor(
+ name='Direction',
+ full_name='Mysqlx.Crud.Order.Direction',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='ASC', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='DESC', index=1, number=2,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=504,
+ serialized_end=534,
+)
+_sym_db.RegisterEnumDescriptor(_ORDER_DIRECTION)
+
+_UPDATEOPERATION_UPDATETYPE = _descriptor.EnumDescriptor(
+ name='UpdateType',
+ full_name='Mysqlx.Crud.UpdateOperation.UpdateType',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='SET', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ITEM_REMOVE', index=1, number=2,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ITEM_SET', index=2, number=3,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ITEM_REPLACE', index=3, number=4,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ITEM_MERGE', index=4, number=5,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ARRAY_INSERT', index=5, number=6,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ARRAY_APPEND', index=6, number=7,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='MERGE_PATCH', index=7, number=8,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=698,
+ serialized_end=837,
+)
+_sym_db.RegisterEnumDescriptor(_UPDATEOPERATION_UPDATETYPE)
+
+_FIND_ROWLOCK = _descriptor.EnumDescriptor(
+ name='RowLock',
+ full_name='Mysqlx.Crud.Find.RowLock',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='SHARED_LOCK', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='EXCLUSIVE_LOCK', index=1, number=2,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=1359,
+ serialized_end=1405,
+)
+_sym_db.RegisterEnumDescriptor(_FIND_ROWLOCK)
+
+_FIND_ROWLOCKOPTIONS = _descriptor.EnumDescriptor(
+ name='RowLockOptions',
+ full_name='Mysqlx.Crud.Find.RowLockOptions',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='NOWAIT', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SKIP_LOCKED', index=1, number=2,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=1407,
+ serialized_end=1452,
+)
+_sym_db.RegisterEnumDescriptor(_FIND_ROWLOCKOPTIONS)
+
+
+_COLUMN = _descriptor.Descriptor(
+ name='Column',
+ full_name='Mysqlx.Crud.Column',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='Mysqlx.Crud.Column.name', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='alias', full_name='Mysqlx.Crud.Column.alias', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='document_path', full_name='Mysqlx.Crud.Column.document_path', index=2,
+ number=3, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=77,
+ serialized_end=168,
+)
+
+
+_PROJECTION = _descriptor.Descriptor(
+ name='Projection',
+ full_name='Mysqlx.Crud.Projection',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='source', full_name='Mysqlx.Crud.Projection.source', index=0,
+ number=1, type=11, cpp_type=10, label=2,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='alias', full_name='Mysqlx.Crud.Projection.alias', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=170,
+ serialized_end=232,
+)
+
+
+_COLLECTION = _descriptor.Descriptor(
+ name='Collection',
+ full_name='Mysqlx.Crud.Collection',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='Mysqlx.Crud.Collection.name', index=0,
+ number=1, type=9, cpp_type=9, label=2,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='schema', full_name='Mysqlx.Crud.Collection.schema', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=234,
+ serialized_end=276,
+)
+
+
+_LIMIT = _descriptor.Descriptor(
+ name='Limit',
+ full_name='Mysqlx.Crud.Limit',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='row_count', full_name='Mysqlx.Crud.Limit.row_count', index=0,
+ number=1, type=4, cpp_type=4, label=2,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='offset', full_name='Mysqlx.Crud.Limit.offset', index=1,
+ number=2, type=4, cpp_type=4, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=278,
+ serialized_end=320,
+)
+
+
+_LIMITEXPR = _descriptor.Descriptor(
+ name='LimitExpr',
+ full_name='Mysqlx.Crud.LimitExpr',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='row_count', full_name='Mysqlx.Crud.LimitExpr.row_count', index=0,
+ number=1, type=11, cpp_type=10, label=2,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='offset', full_name='Mysqlx.Crud.LimitExpr.offset', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=322,
+ serialized_end=406,
+)
+
+
+_ORDER = _descriptor.Descriptor(
+ name='Order',
+ full_name='Mysqlx.Crud.Order',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='expr', full_name='Mysqlx.Crud.Order.expr', index=0,
+ number=1, type=11, cpp_type=10, label=2,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='direction', full_name='Mysqlx.Crud.Order.direction', index=1,
+ number=2, type=14, cpp_type=8, label=1,
+ has_default_value=True, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _ORDER_DIRECTION,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=408,
+ serialized_end=534,
+)
+
+
+_UPDATEOPERATION = _descriptor.Descriptor(
+ name='UpdateOperation',
+ full_name='Mysqlx.Crud.UpdateOperation',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='source', full_name='Mysqlx.Crud.UpdateOperation.source', index=0,
+ number=1, type=11, cpp_type=10, label=2,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='operation', full_name='Mysqlx.Crud.UpdateOperation.operation', index=1,
+ number=2, type=14, cpp_type=8, label=2,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='Mysqlx.Crud.UpdateOperation.value', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _UPDATEOPERATION_UPDATETYPE,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=537,
+ serialized_end=837,
+)
+
+
+_FIND = _descriptor.Descriptor(
+ name='Find',
+ full_name='Mysqlx.Crud.Find',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='collection', full_name='Mysqlx.Crud.Find.collection', index=0,
+ number=2, type=11, cpp_type=10, label=2,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='data_model', full_name='Mysqlx.Crud.Find.data_model', index=1,
+ number=3, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='projection', full_name='Mysqlx.Crud.Find.projection', index=2,
+ number=4, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='args', full_name='Mysqlx.Crud.Find.args', index=3,
+ number=11, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='criteria', full_name='Mysqlx.Crud.Find.criteria', index=4,
+ number=5, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='limit', full_name='Mysqlx.Crud.Find.limit', index=5,
+ number=6, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='order', full_name='Mysqlx.Crud.Find.order', index=6,
+ number=7, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='grouping', full_name='Mysqlx.Crud.Find.grouping', index=7,
+ number=8, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='grouping_criteria', full_name='Mysqlx.Crud.Find.grouping_criteria', index=8,
+ number=9, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='locking', full_name='Mysqlx.Crud.Find.locking', index=9,
+ number=12, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='locking_options', full_name='Mysqlx.Crud.Find.locking_options', index=10,
+ number=13, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='limit_expr', full_name='Mysqlx.Crud.Find.limit_expr', index=11,
+ number=14, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _FIND_ROWLOCK,
+ _FIND_ROWLOCKOPTIONS,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=840,
+ serialized_end=1452,
+)
+
+
+_INSERT_TYPEDROW = _descriptor.Descriptor(
+ name='TypedRow',
+ full_name='Mysqlx.Crud.Insert.TypedRow',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='field', full_name='Mysqlx.Crud.Insert.TypedRow.field', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1701,
+ serialized_end=1745,
+)
+
+_INSERT = _descriptor.Descriptor(
+ name='Insert',
+ full_name='Mysqlx.Crud.Insert',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='collection', full_name='Mysqlx.Crud.Insert.collection', index=0,
+ number=1, type=11, cpp_type=10, label=2,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='data_model', full_name='Mysqlx.Crud.Insert.data_model', index=1,
+ number=2, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='projection', full_name='Mysqlx.Crud.Insert.projection', index=2,
+ number=3, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='row', full_name='Mysqlx.Crud.Insert.row', index=3,
+ number=4, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='args', full_name='Mysqlx.Crud.Insert.args', index=4,
+ number=5, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='upsert', full_name='Mysqlx.Crud.Insert.upsert', index=5,
+ number=6, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[_INSERT_TYPEDROW, ],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1455,
+ serialized_end=1745,
+)
+
+
+_UPDATE = _descriptor.Descriptor(
+ name='Update',
+ full_name='Mysqlx.Crud.Update',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='collection', full_name='Mysqlx.Crud.Update.collection', index=0,
+ number=2, type=11, cpp_type=10, label=2,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='data_model', full_name='Mysqlx.Crud.Update.data_model', index=1,
+ number=3, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='criteria', full_name='Mysqlx.Crud.Update.criteria', index=2,
+ number=4, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='limit', full_name='Mysqlx.Crud.Update.limit', index=3,
+ number=5, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='order', full_name='Mysqlx.Crud.Update.order', index=4,
+ number=6, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='operation', full_name='Mysqlx.Crud.Update.operation', index=5,
+ number=7, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='args', full_name='Mysqlx.Crud.Update.args', index=6,
+ number=8, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='limit_expr', full_name='Mysqlx.Crud.Update.limit_expr', index=7,
+ number=9, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1748,
+ serialized_end=2085,
+)
+
+
+_DELETE = _descriptor.Descriptor(
+ name='Delete',
+ full_name='Mysqlx.Crud.Delete',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='collection', full_name='Mysqlx.Crud.Delete.collection', index=0,
+ number=1, type=11, cpp_type=10, label=2,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='data_model', full_name='Mysqlx.Crud.Delete.data_model', index=1,
+ number=2, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='criteria', full_name='Mysqlx.Crud.Delete.criteria', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='limit', full_name='Mysqlx.Crud.Delete.limit', index=3,
+ number=4, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='order', full_name='Mysqlx.Crud.Delete.order', index=4,
+ number=5, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='args', full_name='Mysqlx.Crud.Delete.args', index=5,
+ number=6, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='limit_expr', full_name='Mysqlx.Crud.Delete.limit_expr', index=6,
+ number=7, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=2088,
+ serialized_end=2376,
+)
+
+
+_CREATEVIEW = _descriptor.Descriptor(
+ name='CreateView',
+ full_name='Mysqlx.Crud.CreateView',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='collection', full_name='Mysqlx.Crud.CreateView.collection', index=0,
+ number=1, type=11, cpp_type=10, label=2,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='definer', full_name='Mysqlx.Crud.CreateView.definer', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='algorithm', full_name='Mysqlx.Crud.CreateView.algorithm', index=2,
+ number=3, type=14, cpp_type=8, label=1,
+ has_default_value=True, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='security', full_name='Mysqlx.Crud.CreateView.security', index=3,
+ number=4, type=14, cpp_type=8, label=1,
+ has_default_value=True, default_value=2,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='check', full_name='Mysqlx.Crud.CreateView.check', index=4,
+ number=5, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='column', full_name='Mysqlx.Crud.CreateView.column', index=5,
+ number=6, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='stmt', full_name='Mysqlx.Crud.CreateView.stmt', index=6,
+ number=7, type=11, cpp_type=10, label=2,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='replace_existing', full_name='Mysqlx.Crud.CreateView.replace_existing', index=7,
+ number=8, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=2379,
+ serialized_end=2695,
+)
+
+
+_MODIFYVIEW = _descriptor.Descriptor(
+ name='ModifyView',
+ full_name='Mysqlx.Crud.ModifyView',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='collection', full_name='Mysqlx.Crud.ModifyView.collection', index=0,
+ number=1, type=11, cpp_type=10, label=2,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='definer', full_name='Mysqlx.Crud.ModifyView.definer', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='algorithm', full_name='Mysqlx.Crud.ModifyView.algorithm', index=2,
+ number=3, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='security', full_name='Mysqlx.Crud.ModifyView.security', index=3,
+ number=4, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='check', full_name='Mysqlx.Crud.ModifyView.check', index=4,
+ number=5, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='column', full_name='Mysqlx.Crud.ModifyView.column', index=5,
+ number=6, type=9, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='stmt', full_name='Mysqlx.Crud.ModifyView.stmt', index=6,
+ number=7, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=2698,
+ serialized_end=2961,
+)
+
+
+_DROPVIEW = _descriptor.Descriptor(
+ name='DropView',
+ full_name='Mysqlx.Crud.DropView',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='collection', full_name='Mysqlx.Crud.DropView.collection', index=0,
+ number=1, type=11, cpp_type=10, label=2,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='if_exists', full_name='Mysqlx.Crud.DropView.if_exists', index=1,
+ number=2, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=2963,
+ serialized_end=3044,
+)
+
+_COLUMN.fields_by_name['document_path'].message_type = mysqlx__expr__pb2._DOCUMENTPATHITEM
+_PROJECTION.fields_by_name['source'].message_type = mysqlx__expr__pb2._EXPR
+_LIMITEXPR.fields_by_name['row_count'].message_type = mysqlx__expr__pb2._EXPR
+_LIMITEXPR.fields_by_name['offset'].message_type = mysqlx__expr__pb2._EXPR
+_ORDER.fields_by_name['expr'].message_type = mysqlx__expr__pb2._EXPR
+_ORDER.fields_by_name['direction'].enum_type = _ORDER_DIRECTION
+_ORDER_DIRECTION.containing_type = _ORDER
+_UPDATEOPERATION.fields_by_name['source'].message_type = mysqlx__expr__pb2._COLUMNIDENTIFIER
+_UPDATEOPERATION.fields_by_name['operation'].enum_type = _UPDATEOPERATION_UPDATETYPE
+_UPDATEOPERATION.fields_by_name['value'].message_type = mysqlx__expr__pb2._EXPR
+_UPDATEOPERATION_UPDATETYPE.containing_type = _UPDATEOPERATION
+_FIND.fields_by_name['collection'].message_type = _COLLECTION
+_FIND.fields_by_name['data_model'].enum_type = _DATAMODEL
+_FIND.fields_by_name['projection'].message_type = _PROJECTION
+_FIND.fields_by_name['args'].message_type = mysqlx__datatypes__pb2._SCALAR
+_FIND.fields_by_name['criteria'].message_type = mysqlx__expr__pb2._EXPR
+_FIND.fields_by_name['limit'].message_type = _LIMIT
+_FIND.fields_by_name['order'].message_type = _ORDER
+_FIND.fields_by_name['grouping'].message_type = mysqlx__expr__pb2._EXPR
+_FIND.fields_by_name['grouping_criteria'].message_type = mysqlx__expr__pb2._EXPR
+_FIND.fields_by_name['locking'].enum_type = _FIND_ROWLOCK
+_FIND.fields_by_name['locking_options'].enum_type = _FIND_ROWLOCKOPTIONS
+_FIND.fields_by_name['limit_expr'].message_type = _LIMITEXPR
+_FIND_ROWLOCK.containing_type = _FIND
+_FIND_ROWLOCKOPTIONS.containing_type = _FIND
+_INSERT_TYPEDROW.fields_by_name['field'].message_type = mysqlx__expr__pb2._EXPR
+_INSERT_TYPEDROW.containing_type = _INSERT
+_INSERT.fields_by_name['collection'].message_type = _COLLECTION
+_INSERT.fields_by_name['data_model'].enum_type = _DATAMODEL
+_INSERT.fields_by_name['projection'].message_type = _COLUMN
+_INSERT.fields_by_name['row'].message_type = _INSERT_TYPEDROW
+_INSERT.fields_by_name['args'].message_type = mysqlx__datatypes__pb2._SCALAR
+_UPDATE.fields_by_name['collection'].message_type = _COLLECTION
+_UPDATE.fields_by_name['data_model'].enum_type = _DATAMODEL
+_UPDATE.fields_by_name['criteria'].message_type = mysqlx__expr__pb2._EXPR
+_UPDATE.fields_by_name['limit'].message_type = _LIMIT
+_UPDATE.fields_by_name['order'].message_type = _ORDER
+_UPDATE.fields_by_name['operation'].message_type = _UPDATEOPERATION
+_UPDATE.fields_by_name['args'].message_type = mysqlx__datatypes__pb2._SCALAR
+_UPDATE.fields_by_name['limit_expr'].message_type = _LIMITEXPR
+_DELETE.fields_by_name['collection'].message_type = _COLLECTION
+_DELETE.fields_by_name['data_model'].enum_type = _DATAMODEL
+_DELETE.fields_by_name['criteria'].message_type = mysqlx__expr__pb2._EXPR
+_DELETE.fields_by_name['limit'].message_type = _LIMIT
+_DELETE.fields_by_name['order'].message_type = _ORDER
+_DELETE.fields_by_name['args'].message_type = mysqlx__datatypes__pb2._SCALAR
+_DELETE.fields_by_name['limit_expr'].message_type = _LIMITEXPR
+_CREATEVIEW.fields_by_name['collection'].message_type = _COLLECTION
+_CREATEVIEW.fields_by_name['algorithm'].enum_type = _VIEWALGORITHM
+_CREATEVIEW.fields_by_name['security'].enum_type = _VIEWSQLSECURITY
+_CREATEVIEW.fields_by_name['check'].enum_type = _VIEWCHECKOPTION
+_CREATEVIEW.fields_by_name['stmt'].message_type = _FIND
+_MODIFYVIEW.fields_by_name['collection'].message_type = _COLLECTION
+_MODIFYVIEW.fields_by_name['algorithm'].enum_type = _VIEWALGORITHM
+_MODIFYVIEW.fields_by_name['security'].enum_type = _VIEWSQLSECURITY
+_MODIFYVIEW.fields_by_name['check'].enum_type = _VIEWCHECKOPTION
+_MODIFYVIEW.fields_by_name['stmt'].message_type = _FIND
+_DROPVIEW.fields_by_name['collection'].message_type = _COLLECTION
+DESCRIPTOR.message_types_by_name['Column'] = _COLUMN
+DESCRIPTOR.message_types_by_name['Projection'] = _PROJECTION
+DESCRIPTOR.message_types_by_name['Collection'] = _COLLECTION
+DESCRIPTOR.message_types_by_name['Limit'] = _LIMIT
+DESCRIPTOR.message_types_by_name['LimitExpr'] = _LIMITEXPR
+DESCRIPTOR.message_types_by_name['Order'] = _ORDER
+DESCRIPTOR.message_types_by_name['UpdateOperation'] = _UPDATEOPERATION
+DESCRIPTOR.message_types_by_name['Find'] = _FIND
+DESCRIPTOR.message_types_by_name['Insert'] = _INSERT
+DESCRIPTOR.message_types_by_name['Update'] = _UPDATE
+DESCRIPTOR.message_types_by_name['Delete'] = _DELETE
+DESCRIPTOR.message_types_by_name['CreateView'] = _CREATEVIEW
+DESCRIPTOR.message_types_by_name['ModifyView'] = _MODIFYVIEW
+DESCRIPTOR.message_types_by_name['DropView'] = _DROPVIEW
+DESCRIPTOR.enum_types_by_name['DataModel'] = _DATAMODEL
+DESCRIPTOR.enum_types_by_name['ViewAlgorithm'] = _VIEWALGORITHM
+DESCRIPTOR.enum_types_by_name['ViewSqlSecurity'] = _VIEWSQLSECURITY
+DESCRIPTOR.enum_types_by_name['ViewCheckOption'] = _VIEWCHECKOPTION
+
+Column = _reflection.GeneratedProtocolMessageType('Column', (_message.Message,), dict(
+ DESCRIPTOR = _COLUMN,
+ __module__ = 'mysqlx_crud_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Crud.Column)
+ ))
+_sym_db.RegisterMessage(Column)
+
+Projection = _reflection.GeneratedProtocolMessageType('Projection', (_message.Message,), dict(
+ DESCRIPTOR = _PROJECTION,
+ __module__ = 'mysqlx_crud_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Crud.Projection)
+ ))
+_sym_db.RegisterMessage(Projection)
+
+Collection = _reflection.GeneratedProtocolMessageType('Collection', (_message.Message,), dict(
+ DESCRIPTOR = _COLLECTION,
+ __module__ = 'mysqlx_crud_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Crud.Collection)
+ ))
+_sym_db.RegisterMessage(Collection)
+
+Limit = _reflection.GeneratedProtocolMessageType('Limit', (_message.Message,), dict(
+ DESCRIPTOR = _LIMIT,
+ __module__ = 'mysqlx_crud_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Crud.Limit)
+ ))
+_sym_db.RegisterMessage(Limit)
+
+LimitExpr = _reflection.GeneratedProtocolMessageType('LimitExpr', (_message.Message,), dict(
+ DESCRIPTOR = _LIMITEXPR,
+ __module__ = 'mysqlx_crud_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Crud.LimitExpr)
+ ))
+_sym_db.RegisterMessage(LimitExpr)
+
+Order = _reflection.GeneratedProtocolMessageType('Order', (_message.Message,), dict(
+ DESCRIPTOR = _ORDER,
+ __module__ = 'mysqlx_crud_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Crud.Order)
+ ))
+_sym_db.RegisterMessage(Order)
+
+UpdateOperation = _reflection.GeneratedProtocolMessageType('UpdateOperation', (_message.Message,), dict(
+ DESCRIPTOR = _UPDATEOPERATION,
+ __module__ = 'mysqlx_crud_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Crud.UpdateOperation)
+ ))
+_sym_db.RegisterMessage(UpdateOperation)
+
+Find = _reflection.GeneratedProtocolMessageType('Find', (_message.Message,), dict(
+ DESCRIPTOR = _FIND,
+ __module__ = 'mysqlx_crud_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Crud.Find)
+ ))
+_sym_db.RegisterMessage(Find)
+
+Insert = _reflection.GeneratedProtocolMessageType('Insert', (_message.Message,), dict(
+
+ TypedRow = _reflection.GeneratedProtocolMessageType('TypedRow', (_message.Message,), dict(
+ DESCRIPTOR = _INSERT_TYPEDROW,
+ __module__ = 'mysqlx_crud_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Crud.Insert.TypedRow)
+ ))
+ ,
+ DESCRIPTOR = _INSERT,
+ __module__ = 'mysqlx_crud_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Crud.Insert)
+ ))
+_sym_db.RegisterMessage(Insert)
+_sym_db.RegisterMessage(Insert.TypedRow)
+
+Update = _reflection.GeneratedProtocolMessageType('Update', (_message.Message,), dict(
+ DESCRIPTOR = _UPDATE,
+ __module__ = 'mysqlx_crud_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Crud.Update)
+ ))
+_sym_db.RegisterMessage(Update)
+
+Delete = _reflection.GeneratedProtocolMessageType('Delete', (_message.Message,), dict(
+ DESCRIPTOR = _DELETE,
+ __module__ = 'mysqlx_crud_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Crud.Delete)
+ ))
+_sym_db.RegisterMessage(Delete)
+
+CreateView = _reflection.GeneratedProtocolMessageType('CreateView', (_message.Message,), dict(
+ DESCRIPTOR = _CREATEVIEW,
+ __module__ = 'mysqlx_crud_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Crud.CreateView)
+ ))
+_sym_db.RegisterMessage(CreateView)
+
+ModifyView = _reflection.GeneratedProtocolMessageType('ModifyView', (_message.Message,), dict(
+ DESCRIPTOR = _MODIFYVIEW,
+ __module__ = 'mysqlx_crud_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Crud.ModifyView)
+ ))
+_sym_db.RegisterMessage(ModifyView)
+
+DropView = _reflection.GeneratedProtocolMessageType('DropView', (_message.Message,), dict(
+ DESCRIPTOR = _DROPVIEW,
+ __module__ = 'mysqlx_crud_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Crud.DropView)
+ ))
+_sym_db.RegisterMessage(DropView)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.mysql.cj.x.protobufH\003'))
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_cursor_pb2.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_cursor_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..e74d25c6d78117d2d1084baf8f92885f380585bd
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_cursor_pb2.py
@@ -0,0 +1,269 @@
+# Copyright (c) 2017, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: mysqlx_cursor.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from mysqlx.protobuf import mysqlx_prepare_pb2 as mysqlx__prepare__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='mysqlx_cursor.proto',
+ package='Mysqlx.Cursor',
+ syntax='proto2',
+ serialized_pb=_b('\n\x13mysqlx_cursor.proto\x12\rMysqlx.Cursor\x1a\x14mysqlx_prepare.proto\"\xf2\x01\n\x04Open\x12\x11\n\tcursor_id\x18\x01 \x02(\r\x12.\n\x04stmt\x18\x04 \x02(\x0b\x32 .Mysqlx.Cursor.Open.OneOfMessage\x12\x12\n\nfetch_rows\x18\x05 \x01(\x04\x1a\x92\x01\n\x0cOneOfMessage\x12\x33\n\x04type\x18\x01 \x02(\x0e\x32%.Mysqlx.Cursor.Open.OneOfMessage.Type\x12\x30\n\x0fprepare_execute\x18\x02 \x01(\x0b\x32\x17.Mysqlx.Prepare.Execute\"\x1b\n\x04Type\x12\x13\n\x0fPREPARE_EXECUTE\x10\x00\".\n\x05\x46\x65tch\x12\x11\n\tcursor_id\x18\x01 \x02(\r\x12\x12\n\nfetch_rows\x18\x05 \x01(\x04\"\x1a\n\x05\x43lose\x12\x11\n\tcursor_id\x18\x01 \x02(\rB\x1b\n\x17\x63om.mysql.cj.x.protobufH\x03')
+ ,
+ dependencies=[mysqlx__prepare__pb2.DESCRIPTOR,])
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+_OPEN_ONEOFMESSAGE_TYPE = _descriptor.EnumDescriptor(
+ name='Type',
+ full_name='Mysqlx.Cursor.Open.OneOfMessage.Type',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='PREPARE_EXECUTE', index=0, number=0,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=276,
+ serialized_end=303,
+)
+_sym_db.RegisterEnumDescriptor(_OPEN_ONEOFMESSAGE_TYPE)
+
+
+_OPEN_ONEOFMESSAGE = _descriptor.Descriptor(
+ name='OneOfMessage',
+ full_name='Mysqlx.Cursor.Open.OneOfMessage',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='type', full_name='Mysqlx.Cursor.Open.OneOfMessage.type', index=0,
+ number=1, type=14, cpp_type=8, label=2,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='prepare_execute', full_name='Mysqlx.Cursor.Open.OneOfMessage.prepare_execute', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _OPEN_ONEOFMESSAGE_TYPE,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=157,
+ serialized_end=303,
+)
+
+_OPEN = _descriptor.Descriptor(
+ name='Open',
+ full_name='Mysqlx.Cursor.Open',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='cursor_id', full_name='Mysqlx.Cursor.Open.cursor_id', index=0,
+ number=1, type=13, cpp_type=3, label=2,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='stmt', full_name='Mysqlx.Cursor.Open.stmt', index=1,
+ number=4, type=11, cpp_type=10, label=2,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='fetch_rows', full_name='Mysqlx.Cursor.Open.fetch_rows', index=2,
+ number=5, type=4, cpp_type=4, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[_OPEN_ONEOFMESSAGE, ],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=61,
+ serialized_end=303,
+)
+
+
+_FETCH = _descriptor.Descriptor(
+ name='Fetch',
+ full_name='Mysqlx.Cursor.Fetch',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='cursor_id', full_name='Mysqlx.Cursor.Fetch.cursor_id', index=0,
+ number=1, type=13, cpp_type=3, label=2,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='fetch_rows', full_name='Mysqlx.Cursor.Fetch.fetch_rows', index=1,
+ number=5, type=4, cpp_type=4, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=305,
+ serialized_end=351,
+)
+
+
+_CLOSE = _descriptor.Descriptor(
+ name='Close',
+ full_name='Mysqlx.Cursor.Close',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='cursor_id', full_name='Mysqlx.Cursor.Close.cursor_id', index=0,
+ number=1, type=13, cpp_type=3, label=2,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=353,
+ serialized_end=379,
+)
+
+_OPEN_ONEOFMESSAGE.fields_by_name['type'].enum_type = _OPEN_ONEOFMESSAGE_TYPE
+_OPEN_ONEOFMESSAGE.fields_by_name['prepare_execute'].message_type = mysqlx__prepare__pb2._EXECUTE
+_OPEN_ONEOFMESSAGE.containing_type = _OPEN
+_OPEN_ONEOFMESSAGE_TYPE.containing_type = _OPEN_ONEOFMESSAGE
+_OPEN.fields_by_name['stmt'].message_type = _OPEN_ONEOFMESSAGE
+DESCRIPTOR.message_types_by_name['Open'] = _OPEN
+DESCRIPTOR.message_types_by_name['Fetch'] = _FETCH
+DESCRIPTOR.message_types_by_name['Close'] = _CLOSE
+
+Open = _reflection.GeneratedProtocolMessageType('Open', (_message.Message,), dict(
+
+ OneOfMessage = _reflection.GeneratedProtocolMessageType('OneOfMessage', (_message.Message,), dict(
+ DESCRIPTOR = _OPEN_ONEOFMESSAGE,
+ __module__ = 'mysqlx_cursor_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Cursor.Open.OneOfMessage)
+ ))
+ ,
+ DESCRIPTOR = _OPEN,
+ __module__ = 'mysqlx_cursor_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Cursor.Open)
+ ))
+_sym_db.RegisterMessage(Open)
+_sym_db.RegisterMessage(Open.OneOfMessage)
+
+Fetch = _reflection.GeneratedProtocolMessageType('Fetch', (_message.Message,), dict(
+ DESCRIPTOR = _FETCH,
+ __module__ = 'mysqlx_cursor_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Cursor.Fetch)
+ ))
+_sym_db.RegisterMessage(Fetch)
+
+Close = _reflection.GeneratedProtocolMessageType('Close', (_message.Message,), dict(
+ DESCRIPTOR = _CLOSE,
+ __module__ = 'mysqlx_cursor_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Cursor.Close)
+ ))
+_sym_db.RegisterMessage(Close)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.mysql.cj.x.protobufH\003'))
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_datatypes_pb2.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_datatypes_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..d343bcab54cfe4e278d47596543d27570f8a15b9
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_datatypes_pb2.py
@@ -0,0 +1,510 @@
+# Copyright (c) 2017, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: mysqlx_datatypes.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='mysqlx_datatypes.proto',
+ package='Mysqlx.Datatypes',
+ syntax='proto2',
+ serialized_pb=_b('\n\x16mysqlx_datatypes.proto\x12\x10Mysqlx.Datatypes\"\xc6\x03\n\x06Scalar\x12+\n\x04type\x18\x01 \x02(\x0e\x32\x1d.Mysqlx.Datatypes.Scalar.Type\x12\x14\n\x0cv_signed_int\x18\x02 \x01(\x12\x12\x16\n\x0ev_unsigned_int\x18\x03 \x01(\x04\x12\x31\n\x08v_octets\x18\x05 \x01(\x0b\x32\x1f.Mysqlx.Datatypes.Scalar.Octets\x12\x10\n\x08v_double\x18\x06 \x01(\x01\x12\x0f\n\x07v_float\x18\x07 \x01(\x02\x12\x0e\n\x06v_bool\x18\x08 \x01(\x08\x12\x31\n\x08v_string\x18\t \x01(\x0b\x32\x1f.Mysqlx.Datatypes.Scalar.String\x1a*\n\x06String\x12\r\n\x05value\x18\x01 \x02(\x0c\x12\x11\n\tcollation\x18\x02 \x01(\x04\x1a-\n\x06Octets\x12\r\n\x05value\x18\x01 \x02(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x01(\r\"m\n\x04Type\x12\n\n\x06V_SINT\x10\x01\x12\n\n\x06V_UINT\x10\x02\x12\n\n\x06V_NULL\x10\x03\x12\x0c\n\x08V_OCTETS\x10\x04\x12\x0c\n\x08V_DOUBLE\x10\x05\x12\x0b\n\x07V_FLOAT\x10\x06\x12\n\n\x06V_BOOL\x10\x07\x12\x0c\n\x08V_STRING\x10\x08\"}\n\x06Object\x12\x31\n\x03\x66ld\x18\x01 \x03(\x0b\x32$.Mysqlx.Datatypes.Object.ObjectField\x1a@\n\x0bObjectField\x12\x0b\n\x03key\x18\x01 \x02(\t\x12$\n\x05value\x18\x02 \x02(\x0b\x32\x15.Mysqlx.Datatypes.Any\"-\n\x05\x41rray\x12$\n\x05value\x18\x01 \x03(\x0b\x32\x15.Mysqlx.Datatypes.Any\"\xd3\x01\n\x03\x41ny\x12(\n\x04type\x18\x01 \x02(\x0e\x32\x1a.Mysqlx.Datatypes.Any.Type\x12(\n\x06scalar\x18\x02 \x01(\x0b\x32\x18.Mysqlx.Datatypes.Scalar\x12%\n\x03obj\x18\x03 \x01(\x0b\x32\x18.Mysqlx.Datatypes.Object\x12&\n\x05\x61rray\x18\x04 \x01(\x0b\x32\x17.Mysqlx.Datatypes.Array\")\n\x04Type\x12\n\n\x06SCALAR\x10\x01\x12\n\n\x06OBJECT\x10\x02\x12\t\n\x05\x41RRAY\x10\x03\x42\x1b\n\x17\x63om.mysql.cj.x.protobufH\x03')
+)
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+_SCALAR_TYPE = _descriptor.EnumDescriptor(
+ name='Type',
+ full_name='Mysqlx.Datatypes.Scalar.Type',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='V_SINT', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='V_UINT', index=1, number=2,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='V_NULL', index=2, number=3,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='V_OCTETS', index=3, number=4,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='V_DOUBLE', index=4, number=5,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='V_FLOAT', index=5, number=6,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='V_BOOL', index=6, number=7,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='V_STRING', index=7, number=8,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=390,
+ serialized_end=499,
+)
+_sym_db.RegisterEnumDescriptor(_SCALAR_TYPE)
+
+_ANY_TYPE = _descriptor.EnumDescriptor(
+ name='Type',
+ full_name='Mysqlx.Datatypes.Any.Type',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='SCALAR', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='OBJECT', index=1, number=2,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ARRAY', index=2, number=3,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=846,
+ serialized_end=887,
+)
+_sym_db.RegisterEnumDescriptor(_ANY_TYPE)
+
+
+_SCALAR_STRING = _descriptor.Descriptor(
+ name='String',
+ full_name='Mysqlx.Datatypes.Scalar.String',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='value', full_name='Mysqlx.Datatypes.Scalar.String.value', index=0,
+ number=1, type=12, cpp_type=9, label=2,
+ has_default_value=False, default_value=_b(""),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='collation', full_name='Mysqlx.Datatypes.Scalar.String.collation', index=1,
+ number=2, type=4, cpp_type=4, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=299,
+ serialized_end=341,
+)
+
+_SCALAR_OCTETS = _descriptor.Descriptor(
+ name='Octets',
+ full_name='Mysqlx.Datatypes.Scalar.Octets',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='value', full_name='Mysqlx.Datatypes.Scalar.Octets.value', index=0,
+ number=1, type=12, cpp_type=9, label=2,
+ has_default_value=False, default_value=_b(""),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='content_type', full_name='Mysqlx.Datatypes.Scalar.Octets.content_type', index=1,
+ number=2, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=343,
+ serialized_end=388,
+)
+
+_SCALAR = _descriptor.Descriptor(
+ name='Scalar',
+ full_name='Mysqlx.Datatypes.Scalar',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='type', full_name='Mysqlx.Datatypes.Scalar.type', index=0,
+ number=1, type=14, cpp_type=8, label=2,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='v_signed_int', full_name='Mysqlx.Datatypes.Scalar.v_signed_int', index=1,
+ number=2, type=18, cpp_type=2, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='v_unsigned_int', full_name='Mysqlx.Datatypes.Scalar.v_unsigned_int', index=2,
+ number=3, type=4, cpp_type=4, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='v_octets', full_name='Mysqlx.Datatypes.Scalar.v_octets', index=3,
+ number=5, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='v_double', full_name='Mysqlx.Datatypes.Scalar.v_double', index=4,
+ number=6, type=1, cpp_type=5, label=1,
+ has_default_value=False, default_value=float(0),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='v_float', full_name='Mysqlx.Datatypes.Scalar.v_float', index=5,
+ number=7, type=2, cpp_type=6, label=1,
+ has_default_value=False, default_value=float(0),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='v_bool', full_name='Mysqlx.Datatypes.Scalar.v_bool', index=6,
+ number=8, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='v_string', full_name='Mysqlx.Datatypes.Scalar.v_string', index=7,
+ number=9, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[_SCALAR_STRING, _SCALAR_OCTETS, ],
+ enum_types=[
+ _SCALAR_TYPE,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=45,
+ serialized_end=499,
+)
+
+
+_OBJECT_OBJECTFIELD = _descriptor.Descriptor(
+ name='ObjectField',
+ full_name='Mysqlx.Datatypes.Object.ObjectField',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='key', full_name='Mysqlx.Datatypes.Object.ObjectField.key', index=0,
+ number=1, type=9, cpp_type=9, label=2,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='Mysqlx.Datatypes.Object.ObjectField.value', index=1,
+ number=2, type=11, cpp_type=10, label=2,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=562,
+ serialized_end=626,
+)
+
+_OBJECT = _descriptor.Descriptor(
+ name='Object',
+ full_name='Mysqlx.Datatypes.Object',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='fld', full_name='Mysqlx.Datatypes.Object.fld', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[_OBJECT_OBJECTFIELD, ],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=501,
+ serialized_end=626,
+)
+
+
+_ARRAY = _descriptor.Descriptor(
+ name='Array',
+ full_name='Mysqlx.Datatypes.Array',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='value', full_name='Mysqlx.Datatypes.Array.value', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=628,
+ serialized_end=673,
+)
+
+
+_ANY = _descriptor.Descriptor(
+ name='Any',
+ full_name='Mysqlx.Datatypes.Any',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='type', full_name='Mysqlx.Datatypes.Any.type', index=0,
+ number=1, type=14, cpp_type=8, label=2,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='scalar', full_name='Mysqlx.Datatypes.Any.scalar', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='obj', full_name='Mysqlx.Datatypes.Any.obj', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='array', full_name='Mysqlx.Datatypes.Any.array', index=3,
+ number=4, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _ANY_TYPE,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=676,
+ serialized_end=887,
+)
+
+_SCALAR_STRING.containing_type = _SCALAR
+_SCALAR_OCTETS.containing_type = _SCALAR
+_SCALAR.fields_by_name['type'].enum_type = _SCALAR_TYPE
+_SCALAR.fields_by_name['v_octets'].message_type = _SCALAR_OCTETS
+_SCALAR.fields_by_name['v_string'].message_type = _SCALAR_STRING
+_SCALAR_TYPE.containing_type = _SCALAR
+_OBJECT_OBJECTFIELD.fields_by_name['value'].message_type = _ANY
+_OBJECT_OBJECTFIELD.containing_type = _OBJECT
+_OBJECT.fields_by_name['fld'].message_type = _OBJECT_OBJECTFIELD
+_ARRAY.fields_by_name['value'].message_type = _ANY
+_ANY.fields_by_name['type'].enum_type = _ANY_TYPE
+_ANY.fields_by_name['scalar'].message_type = _SCALAR
+_ANY.fields_by_name['obj'].message_type = _OBJECT
+_ANY.fields_by_name['array'].message_type = _ARRAY
+_ANY_TYPE.containing_type = _ANY
+DESCRIPTOR.message_types_by_name['Scalar'] = _SCALAR
+DESCRIPTOR.message_types_by_name['Object'] = _OBJECT
+DESCRIPTOR.message_types_by_name['Array'] = _ARRAY
+DESCRIPTOR.message_types_by_name['Any'] = _ANY
+
+Scalar = _reflection.GeneratedProtocolMessageType('Scalar', (_message.Message,), dict(
+
+ String = _reflection.GeneratedProtocolMessageType('String', (_message.Message,), dict(
+ DESCRIPTOR = _SCALAR_STRING,
+ __module__ = 'mysqlx_datatypes_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Datatypes.Scalar.String)
+ ))
+ ,
+
+ Octets = _reflection.GeneratedProtocolMessageType('Octets', (_message.Message,), dict(
+ DESCRIPTOR = _SCALAR_OCTETS,
+ __module__ = 'mysqlx_datatypes_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Datatypes.Scalar.Octets)
+ ))
+ ,
+ DESCRIPTOR = _SCALAR,
+ __module__ = 'mysqlx_datatypes_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Datatypes.Scalar)
+ ))
+_sym_db.RegisterMessage(Scalar)
+_sym_db.RegisterMessage(Scalar.String)
+_sym_db.RegisterMessage(Scalar.Octets)
+
+Object = _reflection.GeneratedProtocolMessageType('Object', (_message.Message,), dict(
+
+ ObjectField = _reflection.GeneratedProtocolMessageType('ObjectField', (_message.Message,), dict(
+ DESCRIPTOR = _OBJECT_OBJECTFIELD,
+ __module__ = 'mysqlx_datatypes_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Datatypes.Object.ObjectField)
+ ))
+ ,
+ DESCRIPTOR = _OBJECT,
+ __module__ = 'mysqlx_datatypes_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Datatypes.Object)
+ ))
+_sym_db.RegisterMessage(Object)
+_sym_db.RegisterMessage(Object.ObjectField)
+
+Array = _reflection.GeneratedProtocolMessageType('Array', (_message.Message,), dict(
+ DESCRIPTOR = _ARRAY,
+ __module__ = 'mysqlx_datatypes_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Datatypes.Array)
+ ))
+_sym_db.RegisterMessage(Array)
+
+Any = _reflection.GeneratedProtocolMessageType('Any', (_message.Message,), dict(
+ DESCRIPTOR = _ANY,
+ __module__ = 'mysqlx_datatypes_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Datatypes.Any)
+ ))
+_sym_db.RegisterMessage(Any)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.mysql.cj.x.protobufH\003'))
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_expect_pb2.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_expect_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..ca26aec4a72f6c555af87251a9f42a7a42bc5ee1
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_expect_pb2.py
@@ -0,0 +1,270 @@
+# Copyright (c) 2017, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: mysqlx_expect.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='mysqlx_expect.proto',
+ package='Mysqlx.Expect',
+ syntax='proto2',
+ serialized_pb=_b('\n\x13mysqlx_expect.proto\x12\rMysqlx.Expect\"\xd0\x03\n\x04Open\x12\x42\n\x02op\x18\x01 \x01(\x0e\x32 .Mysqlx.Expect.Open.CtxOperation:\x14\x45XPECT_CTX_COPY_PREV\x12+\n\x04\x63ond\x18\x02 \x03(\x0b\x32\x1d.Mysqlx.Expect.Open.Condition\x1a\x96\x02\n\tCondition\x12\x15\n\rcondition_key\x18\x01 \x02(\r\x12\x17\n\x0f\x63ondition_value\x18\x02 \x01(\x0c\x12K\n\x02op\x18\x03 \x01(\x0e\x32\x30.Mysqlx.Expect.Open.Condition.ConditionOperation:\rEXPECT_OP_SET\"N\n\x03Key\x12\x13\n\x0f\x45XPECT_NO_ERROR\x10\x01\x12\x16\n\x12\x45XPECT_FIELD_EXIST\x10\x02\x12\x1a\n\x16\x45XPECT_DOCID_GENERATED\x10\x03\"<\n\x12\x43onditionOperation\x12\x11\n\rEXPECT_OP_SET\x10\x00\x12\x13\n\x0f\x45XPECT_OP_UNSET\x10\x01\">\n\x0c\x43txOperation\x12\x18\n\x14\x45XPECT_CTX_COPY_PREV\x10\x00\x12\x14\n\x10\x45XPECT_CTX_EMPTY\x10\x01\"\x07\n\x05\x43loseB\x1b\n\x17\x63om.mysql.cj.x.protobufH\x03')
+)
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+_OPEN_CONDITION_KEY = _descriptor.EnumDescriptor(
+ name='Key',
+ full_name='Mysqlx.Expect.Open.Condition.Key',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='EXPECT_NO_ERROR', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='EXPECT_FIELD_EXIST', index=1, number=2,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='EXPECT_DOCID_GENERATED', index=2, number=3,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=299,
+ serialized_end=377,
+)
+_sym_db.RegisterEnumDescriptor(_OPEN_CONDITION_KEY)
+
+_OPEN_CONDITION_CONDITIONOPERATION = _descriptor.EnumDescriptor(
+ name='ConditionOperation',
+ full_name='Mysqlx.Expect.Open.Condition.ConditionOperation',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='EXPECT_OP_SET', index=0, number=0,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='EXPECT_OP_UNSET', index=1, number=1,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=379,
+ serialized_end=439,
+)
+_sym_db.RegisterEnumDescriptor(_OPEN_CONDITION_CONDITIONOPERATION)
+
+_OPEN_CTXOPERATION = _descriptor.EnumDescriptor(
+ name='CtxOperation',
+ full_name='Mysqlx.Expect.Open.CtxOperation',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='EXPECT_CTX_COPY_PREV', index=0, number=0,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='EXPECT_CTX_EMPTY', index=1, number=1,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=441,
+ serialized_end=503,
+)
+_sym_db.RegisterEnumDescriptor(_OPEN_CTXOPERATION)
+
+
+_OPEN_CONDITION = _descriptor.Descriptor(
+ name='Condition',
+ full_name='Mysqlx.Expect.Open.Condition',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='condition_key', full_name='Mysqlx.Expect.Open.Condition.condition_key', index=0,
+ number=1, type=13, cpp_type=3, label=2,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='condition_value', full_name='Mysqlx.Expect.Open.Condition.condition_value', index=1,
+ number=2, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b(""),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='op', full_name='Mysqlx.Expect.Open.Condition.op', index=2,
+ number=3, type=14, cpp_type=8, label=1,
+ has_default_value=True, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _OPEN_CONDITION_KEY,
+ _OPEN_CONDITION_CONDITIONOPERATION,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=161,
+ serialized_end=439,
+)
+
+_OPEN = _descriptor.Descriptor(
+ name='Open',
+ full_name='Mysqlx.Expect.Open',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='op', full_name='Mysqlx.Expect.Open.op', index=0,
+ number=1, type=14, cpp_type=8, label=1,
+ has_default_value=True, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='cond', full_name='Mysqlx.Expect.Open.cond', index=1,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[_OPEN_CONDITION, ],
+ enum_types=[
+ _OPEN_CTXOPERATION,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=39,
+ serialized_end=503,
+)
+
+
+_CLOSE = _descriptor.Descriptor(
+ name='Close',
+ full_name='Mysqlx.Expect.Close',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=505,
+ serialized_end=512,
+)
+
+_OPEN_CONDITION.fields_by_name['op'].enum_type = _OPEN_CONDITION_CONDITIONOPERATION
+_OPEN_CONDITION.containing_type = _OPEN
+_OPEN_CONDITION_KEY.containing_type = _OPEN_CONDITION
+_OPEN_CONDITION_CONDITIONOPERATION.containing_type = _OPEN_CONDITION
+_OPEN.fields_by_name['op'].enum_type = _OPEN_CTXOPERATION
+_OPEN.fields_by_name['cond'].message_type = _OPEN_CONDITION
+_OPEN_CTXOPERATION.containing_type = _OPEN
+DESCRIPTOR.message_types_by_name['Open'] = _OPEN
+DESCRIPTOR.message_types_by_name['Close'] = _CLOSE
+
+Open = _reflection.GeneratedProtocolMessageType('Open', (_message.Message,), dict(
+
+ Condition = _reflection.GeneratedProtocolMessageType('Condition', (_message.Message,), dict(
+ DESCRIPTOR = _OPEN_CONDITION,
+ __module__ = 'mysqlx_expect_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Expect.Open.Condition)
+ ))
+ ,
+ DESCRIPTOR = _OPEN,
+ __module__ = 'mysqlx_expect_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Expect.Open)
+ ))
+_sym_db.RegisterMessage(Open)
+_sym_db.RegisterMessage(Open.Condition)
+
+Close = _reflection.GeneratedProtocolMessageType('Close', (_message.Message,), dict(
+ DESCRIPTOR = _CLOSE,
+ __module__ = 'mysqlx_expect_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Expect.Close)
+ ))
+_sym_db.RegisterMessage(Close)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.mysql.cj.x.protobufH\003'))
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_expr_pb2.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_expr_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..27181a1818f90943b3945802233c431da443d089
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_expr_pb2.py
@@ -0,0 +1,631 @@
+# Copyright (c) 2017, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: mysqlx_expr.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from mysqlx.protobuf import mysqlx_datatypes_pb2 as mysqlx__datatypes__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='mysqlx_expr.proto',
+ package='Mysqlx.Expr',
+ syntax='proto2',
+ serialized_pb=_b('\n\x11mysqlx_expr.proto\x12\x0bMysqlx.Expr\x1a\x16mysqlx_datatypes.proto\"\xc4\x03\n\x04\x45xpr\x12$\n\x04type\x18\x01 \x02(\x0e\x32\x16.Mysqlx.Expr.Expr.Type\x12\x31\n\nidentifier\x18\x02 \x01(\x0b\x32\x1d.Mysqlx.Expr.ColumnIdentifier\x12\x10\n\x08variable\x18\x03 \x01(\t\x12)\n\x07literal\x18\x04 \x01(\x0b\x32\x18.Mysqlx.Datatypes.Scalar\x12\x30\n\rfunction_call\x18\x05 \x01(\x0b\x32\x19.Mysqlx.Expr.FunctionCall\x12\'\n\x08operator\x18\x06 \x01(\x0b\x32\x15.Mysqlx.Expr.Operator\x12\x10\n\x08position\x18\x07 \x01(\r\x12#\n\x06object\x18\x08 \x01(\x0b\x32\x13.Mysqlx.Expr.Object\x12!\n\x05\x61rray\x18\t \x01(\x0b\x32\x12.Mysqlx.Expr.Array\"q\n\x04Type\x12\t\n\x05IDENT\x10\x01\x12\x0b\n\x07LITERAL\x10\x02\x12\x0c\n\x08VARIABLE\x10\x03\x12\r\n\tFUNC_CALL\x10\x04\x12\x0c\n\x08OPERATOR\x10\x05\x12\x0f\n\x0bPLACEHOLDER\x10\x06\x12\n\n\x06OBJECT\x10\x07\x12\t\n\x05\x41RRAY\x10\x08\"/\n\nIdentifier\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\x13\n\x0bschema_name\x18\x02 \x01(\t\"\xcb\x01\n\x10\x44ocumentPathItem\x12\x30\n\x04type\x18\x01 \x02(\x0e\x32\".Mysqlx.Expr.DocumentPathItem.Type\x12\r\n\x05value\x18\x02 \x01(\t\x12\r\n\x05index\x18\x03 \x01(\r\"g\n\x04Type\x12\n\n\x06MEMBER\x10\x01\x12\x13\n\x0fMEMBER_ASTERISK\x10\x02\x12\x0f\n\x0b\x41RRAY_INDEX\x10\x03\x12\x18\n\x14\x41RRAY_INDEX_ASTERISK\x10\x04\x12\x13\n\x0f\x44OUBLE_ASTERISK\x10\x05\"\x7f\n\x10\x43olumnIdentifier\x12\x34\n\rdocument_path\x18\x01 \x03(\x0b\x32\x1d.Mysqlx.Expr.DocumentPathItem\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x12\n\ntable_name\x18\x03 \x01(\t\x12\x13\n\x0bschema_name\x18\x04 \x01(\t\"W\n\x0c\x46unctionCall\x12%\n\x04name\x18\x01 \x02(\x0b\x32\x17.Mysqlx.Expr.Identifier\x12 \n\x05param\x18\x02 \x03(\x0b\x32\x11.Mysqlx.Expr.Expr\":\n\x08Operator\x12\x0c\n\x04name\x18\x01 \x02(\t\x12 \n\x05param\x18\x02 \x03(\x0b\x32\x11.Mysqlx.Expr.Expr\"t\n\x06Object\x12,\n\x03\x66ld\x18\x01 \x03(\x0b\x32\x1f.Mysqlx.Expr.Object.ObjectField\x1a<\n\x0bObjectField\x12\x0b\n\x03key\x18\x01 \x02(\t\x12 \n\x05value\x18\x02 \x02(\x0b\x32\x11.Mysqlx.Expr.Expr\")\n\x05\x41rray\x12 \n\x05value\x18\x01 \x03(\x0b\x32\x11.Mysqlx.Expr.ExprB\x1b\n\x17\x63om.mysql.cj.x.protobufH\x03')
+ ,
+ dependencies=[mysqlx__datatypes__pb2.DESCRIPTOR,])
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+_EXPR_TYPE = _descriptor.EnumDescriptor(
+ name='Type',
+ full_name='Mysqlx.Expr.Expr.Type',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='IDENT', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='LITERAL', index=1, number=2,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='VARIABLE', index=2, number=3,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='FUNC_CALL', index=3, number=4,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='OPERATOR', index=4, number=5,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='PLACEHOLDER', index=5, number=6,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='OBJECT', index=6, number=7,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ARRAY', index=7, number=8,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=398,
+ serialized_end=511,
+)
+_sym_db.RegisterEnumDescriptor(_EXPR_TYPE)
+
+_DOCUMENTPATHITEM_TYPE = _descriptor.EnumDescriptor(
+ name='Type',
+ full_name='Mysqlx.Expr.DocumentPathItem.Type',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='MEMBER', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='MEMBER_ASTERISK', index=1, number=2,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ARRAY_INDEX', index=2, number=3,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ARRAY_INDEX_ASTERISK', index=3, number=4,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='DOUBLE_ASTERISK', index=4, number=5,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=663,
+ serialized_end=766,
+)
+_sym_db.RegisterEnumDescriptor(_DOCUMENTPATHITEM_TYPE)
+
+
+_EXPR = _descriptor.Descriptor(
+ name='Expr',
+ full_name='Mysqlx.Expr.Expr',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='type', full_name='Mysqlx.Expr.Expr.type', index=0,
+ number=1, type=14, cpp_type=8, label=2,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='identifier', full_name='Mysqlx.Expr.Expr.identifier', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='variable', full_name='Mysqlx.Expr.Expr.variable', index=2,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='literal', full_name='Mysqlx.Expr.Expr.literal', index=3,
+ number=4, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='function_call', full_name='Mysqlx.Expr.Expr.function_call', index=4,
+ number=5, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='operator', full_name='Mysqlx.Expr.Expr.operator', index=5,
+ number=6, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='position', full_name='Mysqlx.Expr.Expr.position', index=6,
+ number=7, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='object', full_name='Mysqlx.Expr.Expr.object', index=7,
+ number=8, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='array', full_name='Mysqlx.Expr.Expr.array', index=8,
+ number=9, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _EXPR_TYPE,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=59,
+ serialized_end=511,
+)
+
+
+_IDENTIFIER = _descriptor.Descriptor(
+ name='Identifier',
+ full_name='Mysqlx.Expr.Identifier',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='Mysqlx.Expr.Identifier.name', index=0,
+ number=1, type=9, cpp_type=9, label=2,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='schema_name', full_name='Mysqlx.Expr.Identifier.schema_name', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=513,
+ serialized_end=560,
+)
+
+
+_DOCUMENTPATHITEM = _descriptor.Descriptor(
+ name='DocumentPathItem',
+ full_name='Mysqlx.Expr.DocumentPathItem',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='type', full_name='Mysqlx.Expr.DocumentPathItem.type', index=0,
+ number=1, type=14, cpp_type=8, label=2,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='Mysqlx.Expr.DocumentPathItem.value', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='index', full_name='Mysqlx.Expr.DocumentPathItem.index', index=2,
+ number=3, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _DOCUMENTPATHITEM_TYPE,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=563,
+ serialized_end=766,
+)
+
+
+_COLUMNIDENTIFIER = _descriptor.Descriptor(
+ name='ColumnIdentifier',
+ full_name='Mysqlx.Expr.ColumnIdentifier',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='document_path', full_name='Mysqlx.Expr.ColumnIdentifier.document_path', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='name', full_name='Mysqlx.Expr.ColumnIdentifier.name', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='table_name', full_name='Mysqlx.Expr.ColumnIdentifier.table_name', index=2,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='schema_name', full_name='Mysqlx.Expr.ColumnIdentifier.schema_name', index=3,
+ number=4, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=768,
+ serialized_end=895,
+)
+
+
+_FUNCTIONCALL = _descriptor.Descriptor(
+ name='FunctionCall',
+ full_name='Mysqlx.Expr.FunctionCall',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='Mysqlx.Expr.FunctionCall.name', index=0,
+ number=1, type=11, cpp_type=10, label=2,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='param', full_name='Mysqlx.Expr.FunctionCall.param', index=1,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=897,
+ serialized_end=984,
+)
+
+
+_OPERATOR = _descriptor.Descriptor(
+ name='Operator',
+ full_name='Mysqlx.Expr.Operator',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='name', full_name='Mysqlx.Expr.Operator.name', index=0,
+ number=1, type=9, cpp_type=9, label=2,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='param', full_name='Mysqlx.Expr.Operator.param', index=1,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=986,
+ serialized_end=1044,
+)
+
+
+_OBJECT_OBJECTFIELD = _descriptor.Descriptor(
+ name='ObjectField',
+ full_name='Mysqlx.Expr.Object.ObjectField',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='key', full_name='Mysqlx.Expr.Object.ObjectField.key', index=0,
+ number=1, type=9, cpp_type=9, label=2,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='Mysqlx.Expr.Object.ObjectField.value', index=1,
+ number=2, type=11, cpp_type=10, label=2,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1102,
+ serialized_end=1162,
+)
+
+_OBJECT = _descriptor.Descriptor(
+ name='Object',
+ full_name='Mysqlx.Expr.Object',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='fld', full_name='Mysqlx.Expr.Object.fld', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[_OBJECT_OBJECTFIELD, ],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1046,
+ serialized_end=1162,
+)
+
+
+_ARRAY = _descriptor.Descriptor(
+ name='Array',
+ full_name='Mysqlx.Expr.Array',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='value', full_name='Mysqlx.Expr.Array.value', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1164,
+ serialized_end=1205,
+)
+
+_EXPR.fields_by_name['type'].enum_type = _EXPR_TYPE
+_EXPR.fields_by_name['identifier'].message_type = _COLUMNIDENTIFIER
+_EXPR.fields_by_name['literal'].message_type = mysqlx__datatypes__pb2._SCALAR
+_EXPR.fields_by_name['function_call'].message_type = _FUNCTIONCALL
+_EXPR.fields_by_name['operator'].message_type = _OPERATOR
+_EXPR.fields_by_name['object'].message_type = _OBJECT
+_EXPR.fields_by_name['array'].message_type = _ARRAY
+_EXPR_TYPE.containing_type = _EXPR
+_DOCUMENTPATHITEM.fields_by_name['type'].enum_type = _DOCUMENTPATHITEM_TYPE
+_DOCUMENTPATHITEM_TYPE.containing_type = _DOCUMENTPATHITEM
+_COLUMNIDENTIFIER.fields_by_name['document_path'].message_type = _DOCUMENTPATHITEM
+_FUNCTIONCALL.fields_by_name['name'].message_type = _IDENTIFIER
+_FUNCTIONCALL.fields_by_name['param'].message_type = _EXPR
+_OPERATOR.fields_by_name['param'].message_type = _EXPR
+_OBJECT_OBJECTFIELD.fields_by_name['value'].message_type = _EXPR
+_OBJECT_OBJECTFIELD.containing_type = _OBJECT
+_OBJECT.fields_by_name['fld'].message_type = _OBJECT_OBJECTFIELD
+_ARRAY.fields_by_name['value'].message_type = _EXPR
+DESCRIPTOR.message_types_by_name['Expr'] = _EXPR
+DESCRIPTOR.message_types_by_name['Identifier'] = _IDENTIFIER
+DESCRIPTOR.message_types_by_name['DocumentPathItem'] = _DOCUMENTPATHITEM
+DESCRIPTOR.message_types_by_name['ColumnIdentifier'] = _COLUMNIDENTIFIER
+DESCRIPTOR.message_types_by_name['FunctionCall'] = _FUNCTIONCALL
+DESCRIPTOR.message_types_by_name['Operator'] = _OPERATOR
+DESCRIPTOR.message_types_by_name['Object'] = _OBJECT
+DESCRIPTOR.message_types_by_name['Array'] = _ARRAY
+
+Expr = _reflection.GeneratedProtocolMessageType('Expr', (_message.Message,), dict(
+ DESCRIPTOR = _EXPR,
+ __module__ = 'mysqlx_expr_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Expr.Expr)
+ ))
+_sym_db.RegisterMessage(Expr)
+
+Identifier = _reflection.GeneratedProtocolMessageType('Identifier', (_message.Message,), dict(
+ DESCRIPTOR = _IDENTIFIER,
+ __module__ = 'mysqlx_expr_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Expr.Identifier)
+ ))
+_sym_db.RegisterMessage(Identifier)
+
+DocumentPathItem = _reflection.GeneratedProtocolMessageType('DocumentPathItem', (_message.Message,), dict(
+ DESCRIPTOR = _DOCUMENTPATHITEM,
+ __module__ = 'mysqlx_expr_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Expr.DocumentPathItem)
+ ))
+_sym_db.RegisterMessage(DocumentPathItem)
+
+ColumnIdentifier = _reflection.GeneratedProtocolMessageType('ColumnIdentifier', (_message.Message,), dict(
+ DESCRIPTOR = _COLUMNIDENTIFIER,
+ __module__ = 'mysqlx_expr_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Expr.ColumnIdentifier)
+ ))
+_sym_db.RegisterMessage(ColumnIdentifier)
+
+FunctionCall = _reflection.GeneratedProtocolMessageType('FunctionCall', (_message.Message,), dict(
+ DESCRIPTOR = _FUNCTIONCALL,
+ __module__ = 'mysqlx_expr_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Expr.FunctionCall)
+ ))
+_sym_db.RegisterMessage(FunctionCall)
+
+Operator = _reflection.GeneratedProtocolMessageType('Operator', (_message.Message,), dict(
+ DESCRIPTOR = _OPERATOR,
+ __module__ = 'mysqlx_expr_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Expr.Operator)
+ ))
+_sym_db.RegisterMessage(Operator)
+
+Object = _reflection.GeneratedProtocolMessageType('Object', (_message.Message,), dict(
+
+ ObjectField = _reflection.GeneratedProtocolMessageType('ObjectField', (_message.Message,), dict(
+ DESCRIPTOR = _OBJECT_OBJECTFIELD,
+ __module__ = 'mysqlx_expr_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Expr.Object.ObjectField)
+ ))
+ ,
+ DESCRIPTOR = _OBJECT,
+ __module__ = 'mysqlx_expr_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Expr.Object)
+ ))
+_sym_db.RegisterMessage(Object)
+_sym_db.RegisterMessage(Object.ObjectField)
+
+Array = _reflection.GeneratedProtocolMessageType('Array', (_message.Message,), dict(
+ DESCRIPTOR = _ARRAY,
+ __module__ = 'mysqlx_expr_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Expr.Array)
+ ))
+_sym_db.RegisterMessage(Array)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.mysql.cj.x.protobufH\003'))
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_notice_pb2.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_notice_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..a4624cee29f8b52fbc9f8f36ac7000ceee5519ed
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_notice_pb2.py
@@ -0,0 +1,523 @@
+# Copyright (c) 2017, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: mysqlx_notice.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from mysqlx.protobuf import mysqlx_datatypes_pb2 as mysqlx__datatypes__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='mysqlx_notice.proto',
+ package='Mysqlx.Notice',
+ syntax='proto2',
+ serialized_pb=_b('\n\x13mysqlx_notice.proto\x12\rMysqlx.Notice\x1a\x16mysqlx_datatypes.proto\"\xff\x01\n\x05\x46rame\x12\x0c\n\x04type\x18\x01 \x02(\r\x12\x31\n\x05scope\x18\x02 \x01(\x0e\x32\x1a.Mysqlx.Notice.Frame.Scope:\x06GLOBAL\x12\x0f\n\x07payload\x18\x03 \x01(\x0c\"\x1e\n\x05Scope\x12\n\n\x06GLOBAL\x10\x01\x12\t\n\x05LOCAL\x10\x02\"\x83\x01\n\x04Type\x12\x0b\n\x07WARNING\x10\x01\x12\x1c\n\x18SESSION_VARIABLE_CHANGED\x10\x02\x12\x19\n\x15SESSION_STATE_CHANGED\x10\x03\x12#\n\x1fGROUP_REPLICATION_STATE_CHANGED\x10\x04\x12\x10\n\x0cSERVER_HELLO\x10\x05\"\x85\x01\n\x07Warning\x12\x34\n\x05level\x18\x01 \x01(\x0e\x32\x1c.Mysqlx.Notice.Warning.Level:\x07WARNING\x12\x0c\n\x04\x63ode\x18\x02 \x02(\r\x12\x0b\n\x03msg\x18\x03 \x02(\t\")\n\x05Level\x12\x08\n\x04NOTE\x10\x01\x12\x0b\n\x07WARNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\"P\n\x16SessionVariableChanged\x12\r\n\x05param\x18\x01 \x02(\t\x12\'\n\x05value\x18\x02 \x01(\x0b\x32\x18.Mysqlx.Datatypes.Scalar\"\xf1\x02\n\x13SessionStateChanged\x12;\n\x05param\x18\x01 \x02(\x0e\x32,.Mysqlx.Notice.SessionStateChanged.Parameter\x12\'\n\x05value\x18\x02 \x03(\x0b\x32\x18.Mysqlx.Datatypes.Scalar\"\xf3\x01\n\tParameter\x12\x12\n\x0e\x43URRENT_SCHEMA\x10\x01\x12\x13\n\x0f\x41\x43\x43OUNT_EXPIRED\x10\x02\x12\x17\n\x13GENERATED_INSERT_ID\x10\x03\x12\x11\n\rROWS_AFFECTED\x10\x04\x12\x0e\n\nROWS_FOUND\x10\x05\x12\x10\n\x0cROWS_MATCHED\x10\x06\x12\x11\n\rTRX_COMMITTED\x10\x07\x12\x12\n\x0eTRX_ROLLEDBACK\x10\t\x12\x14\n\x10PRODUCED_MESSAGE\x10\n\x12\x16\n\x12\x43LIENT_ID_ASSIGNED\x10\x0b\x12\x1a\n\x16GENERATED_DOCUMENT_IDS\x10\x0c\"\xae\x01\n\x1cGroupReplicationStateChanged\x12\x0c\n\x04type\x18\x01 \x02(\r\x12\x0f\n\x07view_id\x18\x02 \x01(\t\"o\n\x04Type\x12\x1a\n\x16MEMBERSHIP_QUORUM_LOSS\x10\x01\x12\x1a\n\x16MEMBERSHIP_VIEW_CHANGE\x10\x02\x12\x16\n\x12MEMBER_ROLE_CHANGE\x10\x03\x12\x17\n\x13MEMBER_STATE_CHANGE\x10\x04\"\r\n\x0bServerHelloB\x1b\n\x17\x63om.mysql.cj.x.protobufH\x03')
+ ,
+ dependencies=[mysqlx__datatypes__pb2.DESCRIPTOR,])
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+_FRAME_SCOPE = _descriptor.EnumDescriptor(
+ name='Scope',
+ full_name='Mysqlx.Notice.Frame.Scope',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='GLOBAL', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='LOCAL', index=1, number=2,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=154,
+ serialized_end=184,
+)
+_sym_db.RegisterEnumDescriptor(_FRAME_SCOPE)
+
+_FRAME_TYPE = _descriptor.EnumDescriptor(
+ name='Type',
+ full_name='Mysqlx.Notice.Frame.Type',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='WARNING', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SESSION_VARIABLE_CHANGED', index=1, number=2,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SESSION_STATE_CHANGED', index=2, number=3,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='GROUP_REPLICATION_STATE_CHANGED', index=3, number=4,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SERVER_HELLO', index=4, number=5,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=187,
+ serialized_end=318,
+)
+_sym_db.RegisterEnumDescriptor(_FRAME_TYPE)
+
+_WARNING_LEVEL = _descriptor.EnumDescriptor(
+ name='Level',
+ full_name='Mysqlx.Notice.Warning.Level',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='NOTE', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='WARNING', index=1, number=2,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ERROR', index=2, number=3,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=413,
+ serialized_end=454,
+)
+_sym_db.RegisterEnumDescriptor(_WARNING_LEVEL)
+
+_SESSIONSTATECHANGED_PARAMETER = _descriptor.EnumDescriptor(
+ name='Parameter',
+ full_name='Mysqlx.Notice.SessionStateChanged.Parameter',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='CURRENT_SCHEMA', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ACCOUNT_EXPIRED', index=1, number=2,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='GENERATED_INSERT_ID', index=2, number=3,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ROWS_AFFECTED', index=3, number=4,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ROWS_FOUND', index=4, number=5,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ROWS_MATCHED', index=5, number=6,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='TRX_COMMITTED', index=6, number=7,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='TRX_ROLLEDBACK', index=7, number=9,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='PRODUCED_MESSAGE', index=8, number=10,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='CLIENT_ID_ASSIGNED', index=9, number=11,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='GENERATED_DOCUMENT_IDS', index=10, number=12,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=665,
+ serialized_end=908,
+)
+_sym_db.RegisterEnumDescriptor(_SESSIONSTATECHANGED_PARAMETER)
+
+_GROUPREPLICATIONSTATECHANGED_TYPE = _descriptor.EnumDescriptor(
+ name='Type',
+ full_name='Mysqlx.Notice.GroupReplicationStateChanged.Type',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='MEMBERSHIP_QUORUM_LOSS', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='MEMBERSHIP_VIEW_CHANGE', index=1, number=2,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='MEMBER_ROLE_CHANGE', index=2, number=3,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='MEMBER_STATE_CHANGE', index=3, number=4,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=974,
+ serialized_end=1085,
+)
+_sym_db.RegisterEnumDescriptor(_GROUPREPLICATIONSTATECHANGED_TYPE)
+
+
+_FRAME = _descriptor.Descriptor(
+ name='Frame',
+ full_name='Mysqlx.Notice.Frame',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='type', full_name='Mysqlx.Notice.Frame.type', index=0,
+ number=1, type=13, cpp_type=3, label=2,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='scope', full_name='Mysqlx.Notice.Frame.scope', index=1,
+ number=2, type=14, cpp_type=8, label=1,
+ has_default_value=True, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='payload', full_name='Mysqlx.Notice.Frame.payload', index=2,
+ number=3, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b(""),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _FRAME_SCOPE,
+ _FRAME_TYPE,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=63,
+ serialized_end=318,
+)
+
+
+_WARNING = _descriptor.Descriptor(
+ name='Warning',
+ full_name='Mysqlx.Notice.Warning',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='level', full_name='Mysqlx.Notice.Warning.level', index=0,
+ number=1, type=14, cpp_type=8, label=1,
+ has_default_value=True, default_value=2,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='code', full_name='Mysqlx.Notice.Warning.code', index=1,
+ number=2, type=13, cpp_type=3, label=2,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='msg', full_name='Mysqlx.Notice.Warning.msg', index=2,
+ number=3, type=9, cpp_type=9, label=2,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _WARNING_LEVEL,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=321,
+ serialized_end=454,
+)
+
+
+_SESSIONVARIABLECHANGED = _descriptor.Descriptor(
+ name='SessionVariableChanged',
+ full_name='Mysqlx.Notice.SessionVariableChanged',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='param', full_name='Mysqlx.Notice.SessionVariableChanged.param', index=0,
+ number=1, type=9, cpp_type=9, label=2,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='Mysqlx.Notice.SessionVariableChanged.value', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=456,
+ serialized_end=536,
+)
+
+
+_SESSIONSTATECHANGED = _descriptor.Descriptor(
+ name='SessionStateChanged',
+ full_name='Mysqlx.Notice.SessionStateChanged',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='param', full_name='Mysqlx.Notice.SessionStateChanged.param', index=0,
+ number=1, type=14, cpp_type=8, label=2,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='Mysqlx.Notice.SessionStateChanged.value', index=1,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _SESSIONSTATECHANGED_PARAMETER,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=539,
+ serialized_end=908,
+)
+
+
+_GROUPREPLICATIONSTATECHANGED = _descriptor.Descriptor(
+ name='GroupReplicationStateChanged',
+ full_name='Mysqlx.Notice.GroupReplicationStateChanged',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='type', full_name='Mysqlx.Notice.GroupReplicationStateChanged.type', index=0,
+ number=1, type=13, cpp_type=3, label=2,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='view_id', full_name='Mysqlx.Notice.GroupReplicationStateChanged.view_id', index=1,
+ number=2, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _GROUPREPLICATIONSTATECHANGED_TYPE,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=911,
+ serialized_end=1085,
+)
+
+
+_SERVERHELLO = _descriptor.Descriptor(
+ name='ServerHello',
+ full_name='Mysqlx.Notice.ServerHello',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1087,
+ serialized_end=1100,
+)
+
+_FRAME.fields_by_name['scope'].enum_type = _FRAME_SCOPE
+_FRAME_SCOPE.containing_type = _FRAME
+_FRAME_TYPE.containing_type = _FRAME
+_WARNING.fields_by_name['level'].enum_type = _WARNING_LEVEL
+_WARNING_LEVEL.containing_type = _WARNING
+_SESSIONVARIABLECHANGED.fields_by_name['value'].message_type = mysqlx__datatypes__pb2._SCALAR
+_SESSIONSTATECHANGED.fields_by_name['param'].enum_type = _SESSIONSTATECHANGED_PARAMETER
+_SESSIONSTATECHANGED.fields_by_name['value'].message_type = mysqlx__datatypes__pb2._SCALAR
+_SESSIONSTATECHANGED_PARAMETER.containing_type = _SESSIONSTATECHANGED
+_GROUPREPLICATIONSTATECHANGED_TYPE.containing_type = _GROUPREPLICATIONSTATECHANGED
+DESCRIPTOR.message_types_by_name['Frame'] = _FRAME
+DESCRIPTOR.message_types_by_name['Warning'] = _WARNING
+DESCRIPTOR.message_types_by_name['SessionVariableChanged'] = _SESSIONVARIABLECHANGED
+DESCRIPTOR.message_types_by_name['SessionStateChanged'] = _SESSIONSTATECHANGED
+DESCRIPTOR.message_types_by_name['GroupReplicationStateChanged'] = _GROUPREPLICATIONSTATECHANGED
+DESCRIPTOR.message_types_by_name['ServerHello'] = _SERVERHELLO
+
+Frame = _reflection.GeneratedProtocolMessageType('Frame', (_message.Message,), dict(
+ DESCRIPTOR = _FRAME,
+ __module__ = 'mysqlx_notice_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Notice.Frame)
+ ))
+_sym_db.RegisterMessage(Frame)
+
+Warning = _reflection.GeneratedProtocolMessageType('Warning', (_message.Message,), dict(
+ DESCRIPTOR = _WARNING,
+ __module__ = 'mysqlx_notice_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Notice.Warning)
+ ))
+_sym_db.RegisterMessage(Warning)
+
+SessionVariableChanged = _reflection.GeneratedProtocolMessageType('SessionVariableChanged', (_message.Message,), dict(
+ DESCRIPTOR = _SESSIONVARIABLECHANGED,
+ __module__ = 'mysqlx_notice_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Notice.SessionVariableChanged)
+ ))
+_sym_db.RegisterMessage(SessionVariableChanged)
+
+SessionStateChanged = _reflection.GeneratedProtocolMessageType('SessionStateChanged', (_message.Message,), dict(
+ DESCRIPTOR = _SESSIONSTATECHANGED,
+ __module__ = 'mysqlx_notice_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Notice.SessionStateChanged)
+ ))
+_sym_db.RegisterMessage(SessionStateChanged)
+
+GroupReplicationStateChanged = _reflection.GeneratedProtocolMessageType('GroupReplicationStateChanged', (_message.Message,), dict(
+ DESCRIPTOR = _GROUPREPLICATIONSTATECHANGED,
+ __module__ = 'mysqlx_notice_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Notice.GroupReplicationStateChanged)
+ ))
+_sym_db.RegisterMessage(GroupReplicationStateChanged)
+
+ServerHello = _reflection.GeneratedProtocolMessageType('ServerHello', (_message.Message,), dict(
+ DESCRIPTOR = _SERVERHELLO,
+ __module__ = 'mysqlx_notice_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Notice.ServerHello)
+ ))
+_sym_db.RegisterMessage(ServerHello)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.mysql.cj.x.protobufH\003'))
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_pb2.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..a9c12acd7fad847f72de8a51a54eb6fd455eee0e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_pb2.py
@@ -0,0 +1,432 @@
+# Copyright (c) 2017, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: mysqlx.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='mysqlx.proto',
+ package='Mysqlx',
+ syntax='proto2',
+ serialized_pb=_b('\n\x0cmysqlx.proto\x12\x06Mysqlx\"\xfc\x03\n\x0e\x43lientMessages\"\xe9\x03\n\x04Type\x12\x18\n\x14\x43ON_CAPABILITIES_GET\x10\x01\x12\x18\n\x14\x43ON_CAPABILITIES_SET\x10\x02\x12\r\n\tCON_CLOSE\x10\x03\x12\x1b\n\x17SESS_AUTHENTICATE_START\x10\x04\x12\x1e\n\x1aSESS_AUTHENTICATE_CONTINUE\x10\x05\x12\x0e\n\nSESS_RESET\x10\x06\x12\x0e\n\nSESS_CLOSE\x10\x07\x12\x14\n\x10SQL_STMT_EXECUTE\x10\x0c\x12\r\n\tCRUD_FIND\x10\x11\x12\x0f\n\x0b\x43RUD_INSERT\x10\x12\x12\x0f\n\x0b\x43RUD_UPDATE\x10\x13\x12\x0f\n\x0b\x43RUD_DELETE\x10\x14\x12\x0f\n\x0b\x45XPECT_OPEN\x10\x18\x12\x10\n\x0c\x45XPECT_CLOSE\x10\x19\x12\x14\n\x10\x43RUD_CREATE_VIEW\x10\x1e\x12\x14\n\x10\x43RUD_MODIFY_VIEW\x10\x1f\x12\x12\n\x0e\x43RUD_DROP_VIEW\x10 \x12\x13\n\x0fPREPARE_PREPARE\x10(\x12\x13\n\x0fPREPARE_EXECUTE\x10)\x12\x16\n\x12PREPARE_DEALLOCATE\x10*\x12\x0f\n\x0b\x43URSOR_OPEN\x10+\x12\x10\n\x0c\x43URSOR_CLOSE\x10,\x12\x10\n\x0c\x43URSOR_FETCH\x10-\x12\x0f\n\x0b\x43OMPRESSION\x10.\"\xf3\x02\n\x0eServerMessages\"\xe0\x02\n\x04Type\x12\x06\n\x02OK\x10\x00\x12\t\n\x05\x45RROR\x10\x01\x12\x15\n\x11\x43ONN_CAPABILITIES\x10\x02\x12\x1e\n\x1aSESS_AUTHENTICATE_CONTINUE\x10\x03\x12\x18\n\x14SESS_AUTHENTICATE_OK\x10\x04\x12\n\n\x06NOTICE\x10\x0b\x12\x1e\n\x1aRESULTSET_COLUMN_META_DATA\x10\x0c\x12\x11\n\rRESULTSET_ROW\x10\r\x12\x18\n\x14RESULTSET_FETCH_DONE\x10\x0e\x12\x1d\n\x19RESULTSET_FETCH_SUSPENDED\x10\x0f\x12(\n$RESULTSET_FETCH_DONE_MORE_RESULTSETS\x10\x10\x12\x17\n\x13SQL_STMT_EXECUTE_OK\x10\x11\x12(\n$RESULTSET_FETCH_DONE_MORE_OUT_PARAMS\x10\x12\x12\x0f\n\x0b\x43OMPRESSION\x10\x13\"\x11\n\x02Ok\x12\x0b\n\x03msg\x18\x01 \x01(\t\"\x88\x01\n\x05\x45rror\x12/\n\x08severity\x18\x01 \x01(\x0e\x32\x16.Mysqlx.Error.Severity:\x05\x45RROR\x12\x0c\n\x04\x63ode\x18\x02 \x02(\r\x12\x11\n\tsql_state\x18\x04 \x02(\t\x12\x0b\n\x03msg\x18\x03 \x02(\t\" \n\x08Severity\x12\t\n\x05\x45RROR\x10\x00\x12\t\n\x05\x46\x41TAL\x10\x01\x42\x1b\n\x17\x63om.mysql.cj.x.protobufH\x03')
+)
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+_CLIENTMESSAGES_TYPE = _descriptor.EnumDescriptor(
+ name='Type',
+ full_name='Mysqlx.ClientMessages.Type',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='CON_CAPABILITIES_GET', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='CON_CAPABILITIES_SET', index=1, number=2,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='CON_CLOSE', index=2, number=3,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SESS_AUTHENTICATE_START', index=3, number=4,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SESS_AUTHENTICATE_CONTINUE', index=4, number=5,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SESS_RESET', index=5, number=6,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SESS_CLOSE', index=6, number=7,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SQL_STMT_EXECUTE', index=7, number=12,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='CRUD_FIND', index=8, number=17,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='CRUD_INSERT', index=9, number=18,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='CRUD_UPDATE', index=10, number=19,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='CRUD_DELETE', index=11, number=20,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='EXPECT_OPEN', index=12, number=24,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='EXPECT_CLOSE', index=13, number=25,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='CRUD_CREATE_VIEW', index=14, number=30,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='CRUD_MODIFY_VIEW', index=15, number=31,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='CRUD_DROP_VIEW', index=16, number=32,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='PREPARE_PREPARE', index=17, number=40,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='PREPARE_EXECUTE', index=18, number=41,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='PREPARE_DEALLOCATE', index=19, number=42,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='CURSOR_OPEN', index=20, number=43,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='CURSOR_CLOSE', index=21, number=44,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='CURSOR_FETCH', index=22, number=45,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='COMPRESSION', index=23, number=46,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=44,
+ serialized_end=533,
+)
+_sym_db.RegisterEnumDescriptor(_CLIENTMESSAGES_TYPE)
+
+_SERVERMESSAGES_TYPE = _descriptor.EnumDescriptor(
+ name='Type',
+ full_name='Mysqlx.ServerMessages.Type',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='OK', index=0, number=0,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ERROR', index=1, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='CONN_CAPABILITIES', index=2, number=2,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SESS_AUTHENTICATE_CONTINUE', index=3, number=3,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SESS_AUTHENTICATE_OK', index=4, number=4,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='NOTICE', index=5, number=11,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='RESULTSET_COLUMN_META_DATA', index=6, number=12,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='RESULTSET_ROW', index=7, number=13,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='RESULTSET_FETCH_DONE', index=8, number=14,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='RESULTSET_FETCH_SUSPENDED', index=9, number=15,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='RESULTSET_FETCH_DONE_MORE_RESULTSETS', index=10, number=16,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SQL_STMT_EXECUTE_OK', index=11, number=17,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='RESULTSET_FETCH_DONE_MORE_OUT_PARAMS', index=12, number=18,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='COMPRESSION', index=13, number=19,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=555,
+ serialized_end=907,
+)
+_sym_db.RegisterEnumDescriptor(_SERVERMESSAGES_TYPE)
+
+_ERROR_SEVERITY = _descriptor.EnumDescriptor(
+ name='Severity',
+ full_name='Mysqlx.Error.Severity',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='ERROR', index=0, number=0,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='FATAL', index=1, number=1,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=1033,
+ serialized_end=1065,
+)
+_sym_db.RegisterEnumDescriptor(_ERROR_SEVERITY)
+
+
+_CLIENTMESSAGES = _descriptor.Descriptor(
+ name='ClientMessages',
+ full_name='Mysqlx.ClientMessages',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _CLIENTMESSAGES_TYPE,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=25,
+ serialized_end=533,
+)
+
+
+_SERVERMESSAGES = _descriptor.Descriptor(
+ name='ServerMessages',
+ full_name='Mysqlx.ServerMessages',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _SERVERMESSAGES_TYPE,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=536,
+ serialized_end=907,
+)
+
+
+_OK = _descriptor.Descriptor(
+ name='Ok',
+ full_name='Mysqlx.Ok',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='msg', full_name='Mysqlx.Ok.msg', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=909,
+ serialized_end=926,
+)
+
+
+_ERROR = _descriptor.Descriptor(
+ name='Error',
+ full_name='Mysqlx.Error',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='severity', full_name='Mysqlx.Error.severity', index=0,
+ number=1, type=14, cpp_type=8, label=1,
+ has_default_value=True, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='code', full_name='Mysqlx.Error.code', index=1,
+ number=2, type=13, cpp_type=3, label=2,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='sql_state', full_name='Mysqlx.Error.sql_state', index=2,
+ number=4, type=9, cpp_type=9, label=2,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='msg', full_name='Mysqlx.Error.msg', index=3,
+ number=3, type=9, cpp_type=9, label=2,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _ERROR_SEVERITY,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=929,
+ serialized_end=1065,
+)
+
+_CLIENTMESSAGES_TYPE.containing_type = _CLIENTMESSAGES
+_SERVERMESSAGES_TYPE.containing_type = _SERVERMESSAGES
+_ERROR.fields_by_name['severity'].enum_type = _ERROR_SEVERITY
+_ERROR_SEVERITY.containing_type = _ERROR
+DESCRIPTOR.message_types_by_name['ClientMessages'] = _CLIENTMESSAGES
+DESCRIPTOR.message_types_by_name['ServerMessages'] = _SERVERMESSAGES
+DESCRIPTOR.message_types_by_name['Ok'] = _OK
+DESCRIPTOR.message_types_by_name['Error'] = _ERROR
+
+ClientMessages = _reflection.GeneratedProtocolMessageType('ClientMessages', (_message.Message,), dict(
+ DESCRIPTOR = _CLIENTMESSAGES,
+ __module__ = 'mysqlx_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.ClientMessages)
+ ))
+_sym_db.RegisterMessage(ClientMessages)
+
+ServerMessages = _reflection.GeneratedProtocolMessageType('ServerMessages', (_message.Message,), dict(
+ DESCRIPTOR = _SERVERMESSAGES,
+ __module__ = 'mysqlx_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.ServerMessages)
+ ))
+_sym_db.RegisterMessage(ServerMessages)
+
+Ok = _reflection.GeneratedProtocolMessageType('Ok', (_message.Message,), dict(
+ DESCRIPTOR = _OK,
+ __module__ = 'mysqlx_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Ok)
+ ))
+_sym_db.RegisterMessage(Ok)
+
+Error = _reflection.GeneratedProtocolMessageType('Error', (_message.Message,), dict(
+ DESCRIPTOR = _ERROR,
+ __module__ = 'mysqlx_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Error)
+ ))
+_sym_db.RegisterMessage(Error)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.mysql.cj.x.protobufH\003'))
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_prepare_pb2.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_prepare_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..e8131ebe1c3fced540d46e041bee50a1aee0142a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_prepare_pb2.py
@@ -0,0 +1,320 @@
+# Copyright (c) 2017, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: mysqlx_prepare.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from mysqlx.protobuf import mysqlx_sql_pb2 as mysqlx__sql__pb2
+from mysqlx.protobuf import mysqlx_crud_pb2 as mysqlx__crud__pb2
+from mysqlx.protobuf import mysqlx_datatypes_pb2 as mysqlx__datatypes__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='mysqlx_prepare.proto',
+ package='Mysqlx.Prepare',
+ syntax='proto2',
+ serialized_pb=_b('\n\x14mysqlx_prepare.proto\x12\x0eMysqlx.Prepare\x1a\x10mysqlx_sql.proto\x1a\x11mysqlx_crud.proto\x1a\x16mysqlx_datatypes.proto\"\x97\x03\n\x07Prepare\x12\x0f\n\x07stmt_id\x18\x01 \x02(\r\x12\x32\n\x04stmt\x18\x02 \x02(\x0b\x32$.Mysqlx.Prepare.Prepare.OneOfMessage\x1a\xc6\x02\n\x0cOneOfMessage\x12\x37\n\x04type\x18\x01 \x02(\x0e\x32).Mysqlx.Prepare.Prepare.OneOfMessage.Type\x12\x1f\n\x04\x66ind\x18\x02 \x01(\x0b\x32\x11.Mysqlx.Crud.Find\x12#\n\x06insert\x18\x03 \x01(\x0b\x32\x13.Mysqlx.Crud.Insert\x12#\n\x06update\x18\x04 \x01(\x0b\x32\x13.Mysqlx.Crud.Update\x12#\n\x06\x64\x65lete\x18\x05 \x01(\x0b\x32\x13.Mysqlx.Crud.Delete\x12-\n\x0cstmt_execute\x18\x06 \x01(\x0b\x32\x17.Mysqlx.Sql.StmtExecute\">\n\x04Type\x12\x08\n\x04\x46IND\x10\x00\x12\n\n\x06INSERT\x10\x01\x12\n\n\x06UPDATE\x10\x02\x12\n\n\x06\x44\x45LETE\x10\x04\x12\x08\n\x04STMT\x10\x05\"`\n\x07\x45xecute\x12\x0f\n\x07stmt_id\x18\x01 \x02(\r\x12#\n\x04\x61rgs\x18\x02 \x03(\x0b\x32\x15.Mysqlx.Datatypes.Any\x12\x1f\n\x10\x63ompact_metadata\x18\x03 \x01(\x08:\x05\x66\x61lse\"\x1d\n\nDeallocate\x12\x0f\n\x07stmt_id\x18\x01 \x02(\rB\x1b\n\x17\x63om.mysql.cj.x.protobufH\x03')
+ ,
+ dependencies=[mysqlx__sql__pb2.DESCRIPTOR,mysqlx__crud__pb2.DESCRIPTOR,mysqlx__datatypes__pb2.DESCRIPTOR,])
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+_PREPARE_ONEOFMESSAGE_TYPE = _descriptor.EnumDescriptor(
+ name='Type',
+ full_name='Mysqlx.Prepare.Prepare.OneOfMessage.Type',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='FIND', index=0, number=0,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='INSERT', index=1, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='UPDATE', index=2, number=2,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='DELETE', index=3, number=4,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='STMT', index=4, number=5,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=447,
+ serialized_end=509,
+)
+_sym_db.RegisterEnumDescriptor(_PREPARE_ONEOFMESSAGE_TYPE)
+
+
+_PREPARE_ONEOFMESSAGE = _descriptor.Descriptor(
+ name='OneOfMessage',
+ full_name='Mysqlx.Prepare.Prepare.OneOfMessage',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='type', full_name='Mysqlx.Prepare.Prepare.OneOfMessage.type', index=0,
+ number=1, type=14, cpp_type=8, label=2,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='find', full_name='Mysqlx.Prepare.Prepare.OneOfMessage.find', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='insert', full_name='Mysqlx.Prepare.Prepare.OneOfMessage.insert', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='update', full_name='Mysqlx.Prepare.Prepare.OneOfMessage.update', index=3,
+ number=4, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='delete', full_name='Mysqlx.Prepare.Prepare.OneOfMessage.delete', index=4,
+ number=5, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='stmt_execute', full_name='Mysqlx.Prepare.Prepare.OneOfMessage.stmt_execute', index=5,
+ number=6, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _PREPARE_ONEOFMESSAGE_TYPE,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=183,
+ serialized_end=509,
+)
+
+_PREPARE = _descriptor.Descriptor(
+ name='Prepare',
+ full_name='Mysqlx.Prepare.Prepare',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='stmt_id', full_name='Mysqlx.Prepare.Prepare.stmt_id', index=0,
+ number=1, type=13, cpp_type=3, label=2,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='stmt', full_name='Mysqlx.Prepare.Prepare.stmt', index=1,
+ number=2, type=11, cpp_type=10, label=2,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[_PREPARE_ONEOFMESSAGE, ],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=102,
+ serialized_end=509,
+)
+
+
+_EXECUTE = _descriptor.Descriptor(
+ name='Execute',
+ full_name='Mysqlx.Prepare.Execute',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='stmt_id', full_name='Mysqlx.Prepare.Execute.stmt_id', index=0,
+ number=1, type=13, cpp_type=3, label=2,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='args', full_name='Mysqlx.Prepare.Execute.args', index=1,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='compact_metadata', full_name='Mysqlx.Prepare.Execute.compact_metadata', index=2,
+ number=3, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=511,
+ serialized_end=607,
+)
+
+
+_DEALLOCATE = _descriptor.Descriptor(
+ name='Deallocate',
+ full_name='Mysqlx.Prepare.Deallocate',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='stmt_id', full_name='Mysqlx.Prepare.Deallocate.stmt_id', index=0,
+ number=1, type=13, cpp_type=3, label=2,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=609,
+ serialized_end=638,
+)
+
+_PREPARE_ONEOFMESSAGE.fields_by_name['type'].enum_type = _PREPARE_ONEOFMESSAGE_TYPE
+_PREPARE_ONEOFMESSAGE.fields_by_name['find'].message_type = mysqlx__crud__pb2._FIND
+_PREPARE_ONEOFMESSAGE.fields_by_name['insert'].message_type = mysqlx__crud__pb2._INSERT
+_PREPARE_ONEOFMESSAGE.fields_by_name['update'].message_type = mysqlx__crud__pb2._UPDATE
+_PREPARE_ONEOFMESSAGE.fields_by_name['delete'].message_type = mysqlx__crud__pb2._DELETE
+_PREPARE_ONEOFMESSAGE.fields_by_name['stmt_execute'].message_type = mysqlx__sql__pb2._STMTEXECUTE
+_PREPARE_ONEOFMESSAGE.containing_type = _PREPARE
+_PREPARE_ONEOFMESSAGE_TYPE.containing_type = _PREPARE_ONEOFMESSAGE
+_PREPARE.fields_by_name['stmt'].message_type = _PREPARE_ONEOFMESSAGE
+_EXECUTE.fields_by_name['args'].message_type = mysqlx__datatypes__pb2._ANY
+DESCRIPTOR.message_types_by_name['Prepare'] = _PREPARE
+DESCRIPTOR.message_types_by_name['Execute'] = _EXECUTE
+DESCRIPTOR.message_types_by_name['Deallocate'] = _DEALLOCATE
+
+Prepare = _reflection.GeneratedProtocolMessageType('Prepare', (_message.Message,), dict(
+
+ OneOfMessage = _reflection.GeneratedProtocolMessageType('OneOfMessage', (_message.Message,), dict(
+ DESCRIPTOR = _PREPARE_ONEOFMESSAGE,
+ __module__ = 'mysqlx_prepare_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Prepare.Prepare.OneOfMessage)
+ ))
+ ,
+ DESCRIPTOR = _PREPARE,
+ __module__ = 'mysqlx_prepare_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Prepare.Prepare)
+ ))
+_sym_db.RegisterMessage(Prepare)
+_sym_db.RegisterMessage(Prepare.OneOfMessage)
+
+Execute = _reflection.GeneratedProtocolMessageType('Execute', (_message.Message,), dict(
+ DESCRIPTOR = _EXECUTE,
+ __module__ = 'mysqlx_prepare_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Prepare.Execute)
+ ))
+_sym_db.RegisterMessage(Execute)
+
+Deallocate = _reflection.GeneratedProtocolMessageType('Deallocate', (_message.Message,), dict(
+ DESCRIPTOR = _DEALLOCATE,
+ __module__ = 'mysqlx_prepare_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Prepare.Deallocate)
+ ))
+_sym_db.RegisterMessage(Deallocate)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.mysql.cj.x.protobufH\003'))
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_resultset_pb2.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_resultset_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..0ba75dad6662e73010eb94893bc93e06fddd0024
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_resultset_pb2.py
@@ -0,0 +1,462 @@
+# Copyright (c) 2017, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: mysqlx_resultset.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='mysqlx_resultset.proto',
+ package='Mysqlx.Resultset',
+ syntax='proto2',
+ serialized_pb=_b('\n\x16mysqlx_resultset.proto\x12\x10Mysqlx.Resultset\"\x18\n\x16\x46\x65tchDoneMoreOutParams\"\x19\n\x17\x46\x65tchDoneMoreResultsets\"\x0b\n\tFetchDone\"\x10\n\x0e\x46\x65tchSuspended\"\x9f\x03\n\x0e\x43olumnMetaData\x12\x38\n\x04type\x18\x01 \x02(\x0e\x32*.Mysqlx.Resultset.ColumnMetaData.FieldType\x12\x0c\n\x04name\x18\x02 \x01(\x0c\x12\x15\n\roriginal_name\x18\x03 \x01(\x0c\x12\r\n\x05table\x18\x04 \x01(\x0c\x12\x16\n\x0eoriginal_table\x18\x05 \x01(\x0c\x12\x0e\n\x06schema\x18\x06 \x01(\x0c\x12\x0f\n\x07\x63\x61talog\x18\x07 \x01(\x0c\x12\x11\n\tcollation\x18\x08 \x01(\x04\x12\x19\n\x11\x66ractional_digits\x18\t \x01(\r\x12\x0e\n\x06length\x18\n \x01(\r\x12\r\n\x05\x66lags\x18\x0b \x01(\r\x12\x14\n\x0c\x63ontent_type\x18\x0c \x01(\r\"\x82\x01\n\tFieldType\x12\x08\n\x04SINT\x10\x01\x12\x08\n\x04UINT\x10\x02\x12\n\n\x06\x44OUBLE\x10\x05\x12\t\n\x05\x46LOAT\x10\x06\x12\t\n\x05\x42YTES\x10\x07\x12\x08\n\x04TIME\x10\n\x12\x0c\n\x08\x44\x41TETIME\x10\x0c\x12\x07\n\x03SET\x10\x0f\x12\x08\n\x04\x45NUM\x10\x10\x12\x07\n\x03\x42IT\x10\x11\x12\x0b\n\x07\x44\x45\x43IMAL\x10\x12\"\x14\n\x03Row\x12\r\n\x05\x66ield\x18\x01 \x03(\x0c*4\n\x11\x43ontentType_BYTES\x12\x0c\n\x08GEOMETRY\x10\x01\x12\x08\n\x04JSON\x10\x02\x12\x07\n\x03XML\x10\x03*.\n\x14\x43ontentType_DATETIME\x12\x08\n\x04\x44\x41TE\x10\x01\x12\x0c\n\x08\x44\x41TETIME\x10\x02\x42\x1b\n\x17\x63om.mysql.cj.x.protobufH\x03')
+)
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+_CONTENTTYPE_BYTES = _descriptor.EnumDescriptor(
+ name='ContentType_BYTES',
+ full_name='Mysqlx.Resultset.ContentType_BYTES',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='GEOMETRY', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='JSON', index=1, number=2,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='XML', index=2, number=3,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=568,
+ serialized_end=620,
+)
+_sym_db.RegisterEnumDescriptor(_CONTENTTYPE_BYTES)
+
+ContentType_BYTES = enum_type_wrapper.EnumTypeWrapper(_CONTENTTYPE_BYTES)
+_CONTENTTYPE_DATETIME = _descriptor.EnumDescriptor(
+ name='ContentType_DATETIME',
+ full_name='Mysqlx.Resultset.ContentType_DATETIME',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='DATE', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='DATETIME', index=1, number=2,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=622,
+ serialized_end=668,
+)
+_sym_db.RegisterEnumDescriptor(_CONTENTTYPE_DATETIME)
+
+ContentType_DATETIME = enum_type_wrapper.EnumTypeWrapper(_CONTENTTYPE_DATETIME)
+GEOMETRY = 1
+JSON = 2
+XML = 3
+DATE = 1
+DATETIME = 2
+
+
+_COLUMNMETADATA_FIELDTYPE = _descriptor.EnumDescriptor(
+ name='FieldType',
+ full_name='Mysqlx.Resultset.ColumnMetaData.FieldType',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='SINT', index=0, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='UINT', index=1, number=2,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='DOUBLE', index=2, number=5,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='FLOAT', index=3, number=6,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='BYTES', index=4, number=7,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='TIME', index=5, number=10,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='DATETIME', index=6, number=12,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='SET', index=7, number=15,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='ENUM', index=8, number=16,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='BIT', index=9, number=17,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='DECIMAL', index=10, number=18,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=414,
+ serialized_end=544,
+)
+_sym_db.RegisterEnumDescriptor(_COLUMNMETADATA_FIELDTYPE)
+
+
+_FETCHDONEMOREOUTPARAMS = _descriptor.Descriptor(
+ name='FetchDoneMoreOutParams',
+ full_name='Mysqlx.Resultset.FetchDoneMoreOutParams',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=44,
+ serialized_end=68,
+)
+
+
+_FETCHDONEMORERESULTSETS = _descriptor.Descriptor(
+ name='FetchDoneMoreResultsets',
+ full_name='Mysqlx.Resultset.FetchDoneMoreResultsets',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=70,
+ serialized_end=95,
+)
+
+
+_FETCHDONE = _descriptor.Descriptor(
+ name='FetchDone',
+ full_name='Mysqlx.Resultset.FetchDone',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=97,
+ serialized_end=108,
+)
+
+
+_FETCHSUSPENDED = _descriptor.Descriptor(
+ name='FetchSuspended',
+ full_name='Mysqlx.Resultset.FetchSuspended',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=110,
+ serialized_end=126,
+)
+
+
+_COLUMNMETADATA = _descriptor.Descriptor(
+ name='ColumnMetaData',
+ full_name='Mysqlx.Resultset.ColumnMetaData',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='type', full_name='Mysqlx.Resultset.ColumnMetaData.type', index=0,
+ number=1, type=14, cpp_type=8, label=2,
+ has_default_value=False, default_value=1,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='name', full_name='Mysqlx.Resultset.ColumnMetaData.name', index=1,
+ number=2, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b(""),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='original_name', full_name='Mysqlx.Resultset.ColumnMetaData.original_name', index=2,
+ number=3, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b(""),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='table', full_name='Mysqlx.Resultset.ColumnMetaData.table', index=3,
+ number=4, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b(""),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='original_table', full_name='Mysqlx.Resultset.ColumnMetaData.original_table', index=4,
+ number=5, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b(""),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='schema', full_name='Mysqlx.Resultset.ColumnMetaData.schema', index=5,
+ number=6, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b(""),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='catalog', full_name='Mysqlx.Resultset.ColumnMetaData.catalog', index=6,
+ number=7, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b(""),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='collation', full_name='Mysqlx.Resultset.ColumnMetaData.collation', index=7,
+ number=8, type=4, cpp_type=4, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='fractional_digits', full_name='Mysqlx.Resultset.ColumnMetaData.fractional_digits', index=8,
+ number=9, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='length', full_name='Mysqlx.Resultset.ColumnMetaData.length', index=9,
+ number=10, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='flags', full_name='Mysqlx.Resultset.ColumnMetaData.flags', index=10,
+ number=11, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='content_type', full_name='Mysqlx.Resultset.ColumnMetaData.content_type', index=11,
+ number=12, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _COLUMNMETADATA_FIELDTYPE,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=129,
+ serialized_end=544,
+)
+
+
+_ROW = _descriptor.Descriptor(
+ name='Row',
+ full_name='Mysqlx.Resultset.Row',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='field', full_name='Mysqlx.Resultset.Row.field', index=0,
+ number=1, type=12, cpp_type=9, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=546,
+ serialized_end=566,
+)
+
+_COLUMNMETADATA.fields_by_name['type'].enum_type = _COLUMNMETADATA_FIELDTYPE
+_COLUMNMETADATA_FIELDTYPE.containing_type = _COLUMNMETADATA
+DESCRIPTOR.message_types_by_name['FetchDoneMoreOutParams'] = _FETCHDONEMOREOUTPARAMS
+DESCRIPTOR.message_types_by_name['FetchDoneMoreResultsets'] = _FETCHDONEMORERESULTSETS
+DESCRIPTOR.message_types_by_name['FetchDone'] = _FETCHDONE
+DESCRIPTOR.message_types_by_name['FetchSuspended'] = _FETCHSUSPENDED
+DESCRIPTOR.message_types_by_name['ColumnMetaData'] = _COLUMNMETADATA
+DESCRIPTOR.message_types_by_name['Row'] = _ROW
+DESCRIPTOR.enum_types_by_name['ContentType_BYTES'] = _CONTENTTYPE_BYTES
+DESCRIPTOR.enum_types_by_name['ContentType_DATETIME'] = _CONTENTTYPE_DATETIME
+
+FetchDoneMoreOutParams = _reflection.GeneratedProtocolMessageType('FetchDoneMoreOutParams', (_message.Message,), dict(
+ DESCRIPTOR = _FETCHDONEMOREOUTPARAMS,
+ __module__ = 'mysqlx_resultset_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Resultset.FetchDoneMoreOutParams)
+ ))
+_sym_db.RegisterMessage(FetchDoneMoreOutParams)
+
+FetchDoneMoreResultsets = _reflection.GeneratedProtocolMessageType('FetchDoneMoreResultsets', (_message.Message,), dict(
+ DESCRIPTOR = _FETCHDONEMORERESULTSETS,
+ __module__ = 'mysqlx_resultset_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Resultset.FetchDoneMoreResultsets)
+ ))
+_sym_db.RegisterMessage(FetchDoneMoreResultsets)
+
+FetchDone = _reflection.GeneratedProtocolMessageType('FetchDone', (_message.Message,), dict(
+ DESCRIPTOR = _FETCHDONE,
+ __module__ = 'mysqlx_resultset_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Resultset.FetchDone)
+ ))
+_sym_db.RegisterMessage(FetchDone)
+
+FetchSuspended = _reflection.GeneratedProtocolMessageType('FetchSuspended', (_message.Message,), dict(
+ DESCRIPTOR = _FETCHSUSPENDED,
+ __module__ = 'mysqlx_resultset_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Resultset.FetchSuspended)
+ ))
+_sym_db.RegisterMessage(FetchSuspended)
+
+ColumnMetaData = _reflection.GeneratedProtocolMessageType('ColumnMetaData', (_message.Message,), dict(
+ DESCRIPTOR = _COLUMNMETADATA,
+ __module__ = 'mysqlx_resultset_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Resultset.ColumnMetaData)
+ ))
+_sym_db.RegisterMessage(ColumnMetaData)
+
+Row = _reflection.GeneratedProtocolMessageType('Row', (_message.Message,), dict(
+ DESCRIPTOR = _ROW,
+ __module__ = 'mysqlx_resultset_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Resultset.Row)
+ ))
+_sym_db.RegisterMessage(Row)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.mysql.cj.x.protobufH\003'))
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_session_pb2.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_session_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..25924c0b5280e953d4158bd29c0b71e41d7ac078
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_session_pb2.py
@@ -0,0 +1,262 @@
+# Copyright (c) 2017, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: mysqlx_session.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='mysqlx_session.proto',
+ package='Mysqlx.Session',
+ syntax='proto2',
+ serialized_pb=_b('\n\x14mysqlx_session.proto\x12\x0eMysqlx.Session\"S\n\x11\x41uthenticateStart\x12\x11\n\tmech_name\x18\x01 \x02(\t\x12\x11\n\tauth_data\x18\x02 \x01(\x0c\x12\x18\n\x10initial_response\x18\x03 \x01(\x0c\")\n\x14\x41uthenticateContinue\x12\x11\n\tauth_data\x18\x01 \x02(\x0c\"#\n\x0e\x41uthenticateOk\x12\x11\n\tauth_data\x18\x01 \x01(\x0c\"!\n\x05Reset\x12\x18\n\tkeep_open\x18\x01 \x01(\x08:\x05\x66\x61lse\"\x07\n\x05\x43loseB\x1b\n\x17\x63om.mysql.cj.x.protobufH\x03')
+)
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+
+_AUTHENTICATESTART = _descriptor.Descriptor(
+ name='AuthenticateStart',
+ full_name='Mysqlx.Session.AuthenticateStart',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='mech_name', full_name='Mysqlx.Session.AuthenticateStart.mech_name', index=0,
+ number=1, type=9, cpp_type=9, label=2,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='auth_data', full_name='Mysqlx.Session.AuthenticateStart.auth_data', index=1,
+ number=2, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b(""),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='initial_response', full_name='Mysqlx.Session.AuthenticateStart.initial_response', index=2,
+ number=3, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b(""),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=40,
+ serialized_end=123,
+)
+
+
+_AUTHENTICATECONTINUE = _descriptor.Descriptor(
+ name='AuthenticateContinue',
+ full_name='Mysqlx.Session.AuthenticateContinue',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='auth_data', full_name='Mysqlx.Session.AuthenticateContinue.auth_data', index=0,
+ number=1, type=12, cpp_type=9, label=2,
+ has_default_value=False, default_value=_b(""),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=125,
+ serialized_end=166,
+)
+
+
+_AUTHENTICATEOK = _descriptor.Descriptor(
+ name='AuthenticateOk',
+ full_name='Mysqlx.Session.AuthenticateOk',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='auth_data', full_name='Mysqlx.Session.AuthenticateOk.auth_data', index=0,
+ number=1, type=12, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b(""),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=168,
+ serialized_end=203,
+)
+
+
+_RESET = _descriptor.Descriptor(
+ name='Reset',
+ full_name='Mysqlx.Session.Reset',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='keep_open', full_name='Mysqlx.Session.Reset.keep_open', index=0,
+ number=1, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=205,
+ serialized_end=238,
+)
+
+
+_CLOSE = _descriptor.Descriptor(
+ name='Close',
+ full_name='Mysqlx.Session.Close',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=240,
+ serialized_end=247,
+)
+
+DESCRIPTOR.message_types_by_name['AuthenticateStart'] = _AUTHENTICATESTART
+DESCRIPTOR.message_types_by_name['AuthenticateContinue'] = _AUTHENTICATECONTINUE
+DESCRIPTOR.message_types_by_name['AuthenticateOk'] = _AUTHENTICATEOK
+DESCRIPTOR.message_types_by_name['Reset'] = _RESET
+DESCRIPTOR.message_types_by_name['Close'] = _CLOSE
+
+AuthenticateStart = _reflection.GeneratedProtocolMessageType('AuthenticateStart', (_message.Message,), dict(
+ DESCRIPTOR = _AUTHENTICATESTART,
+ __module__ = 'mysqlx_session_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Session.AuthenticateStart)
+ ))
+_sym_db.RegisterMessage(AuthenticateStart)
+
+AuthenticateContinue = _reflection.GeneratedProtocolMessageType('AuthenticateContinue', (_message.Message,), dict(
+ DESCRIPTOR = _AUTHENTICATECONTINUE,
+ __module__ = 'mysqlx_session_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Session.AuthenticateContinue)
+ ))
+_sym_db.RegisterMessage(AuthenticateContinue)
+
+AuthenticateOk = _reflection.GeneratedProtocolMessageType('AuthenticateOk', (_message.Message,), dict(
+ DESCRIPTOR = _AUTHENTICATEOK,
+ __module__ = 'mysqlx_session_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Session.AuthenticateOk)
+ ))
+_sym_db.RegisterMessage(AuthenticateOk)
+
+Reset = _reflection.GeneratedProtocolMessageType('Reset', (_message.Message,), dict(
+ DESCRIPTOR = _RESET,
+ __module__ = 'mysqlx_session_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Session.Reset)
+ ))
+_sym_db.RegisterMessage(Reset)
+
+Close = _reflection.GeneratedProtocolMessageType('Close', (_message.Message,), dict(
+ DESCRIPTOR = _CLOSE,
+ __module__ = 'mysqlx_session_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Session.Close)
+ ))
+_sym_db.RegisterMessage(Close)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.mysql.cj.x.protobufH\003'))
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_sql_pb2.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_sql_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..79e56543b3f4b8e3000fb5fbdad5ef420efb39c7
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/protobuf/mysqlx_sql_pb2.py
@@ -0,0 +1,155 @@
+# Copyright (c) 2017, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: mysqlx_sql.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from mysqlx.protobuf import mysqlx_datatypes_pb2 as mysqlx__datatypes__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='mysqlx_sql.proto',
+ package='Mysqlx.Sql',
+ syntax='proto2',
+ serialized_pb=_b('\n\x10mysqlx_sql.proto\x12\nMysqlx.Sql\x1a\x16mysqlx_datatypes.proto\"y\n\x0bStmtExecute\x12\x16\n\tnamespace\x18\x03 \x01(\t:\x03sql\x12\x0c\n\x04stmt\x18\x01 \x02(\x0c\x12#\n\x04\x61rgs\x18\x02 \x03(\x0b\x32\x15.Mysqlx.Datatypes.Any\x12\x1f\n\x10\x63ompact_metadata\x18\x04 \x01(\x08:\x05\x66\x61lse\"\x0f\n\rStmtExecuteOkB\x1b\n\x17\x63om.mysql.cj.x.protobufH\x03')
+ ,
+ dependencies=[mysqlx__datatypes__pb2.DESCRIPTOR,])
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+
+_STMTEXECUTE = _descriptor.Descriptor(
+ name='StmtExecute',
+ full_name='Mysqlx.Sql.StmtExecute',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='namespace', full_name='Mysqlx.Sql.StmtExecute.namespace', index=0,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=True, default_value=_b("sql").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='stmt', full_name='Mysqlx.Sql.StmtExecute.stmt', index=1,
+ number=1, type=12, cpp_type=9, label=2,
+ has_default_value=False, default_value=_b(""),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='args', full_name='Mysqlx.Sql.StmtExecute.args', index=2,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='compact_metadata', full_name='Mysqlx.Sql.StmtExecute.compact_metadata', index=3,
+ number=4, type=8, cpp_type=7, label=1,
+ has_default_value=True, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=56,
+ serialized_end=177,
+)
+
+
+_STMTEXECUTEOK = _descriptor.Descriptor(
+ name='StmtExecuteOk',
+ full_name='Mysqlx.Sql.StmtExecuteOk',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto2',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=179,
+ serialized_end=194,
+)
+
+_STMTEXECUTE.fields_by_name['args'].message_type = mysqlx__datatypes__pb2._ANY
+DESCRIPTOR.message_types_by_name['StmtExecute'] = _STMTEXECUTE
+DESCRIPTOR.message_types_by_name['StmtExecuteOk'] = _STMTEXECUTEOK
+
+StmtExecute = _reflection.GeneratedProtocolMessageType('StmtExecute', (_message.Message,), dict(
+ DESCRIPTOR = _STMTEXECUTE,
+ __module__ = 'mysqlx_sql_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Sql.StmtExecute)
+ ))
+_sym_db.RegisterMessage(StmtExecute)
+
+StmtExecuteOk = _reflection.GeneratedProtocolMessageType('StmtExecuteOk', (_message.Message,), dict(
+ DESCRIPTOR = _STMTEXECUTEOK,
+ __module__ = 'mysqlx_sql_pb2'
+ # @@protoc_insertion_point(class_scope:Mysqlx.Sql.StmtExecuteOk)
+ ))
+_sym_db.RegisterMessage(StmtExecuteOk)
+
+
+DESCRIPTOR.has_options = True
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.mysql.cj.x.protobufH\003'))
+# @@protoc_insertion_point(module_scope)
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/protocol.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/protocol.py
new file mode 100644
index 0000000000000000000000000000000000000000..3f88bbee4406968064b612f54bdd54573c81ed00
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/protocol.py
@@ -0,0 +1,1064 @@
+# Copyright (c) 2016, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Implementation of the X protocol for MySQL servers.
+"""
+
+import logging
+import struct
+import zlib
+
+from io import BytesIO
+
+try:
+ import lz4.frame
+ HAVE_LZ4 = True
+except ImportError:
+ HAVE_LZ4 = False
+
+try:
+ import zstandard as zstd
+ HAVE_ZSTD = True
+except ImportError:
+ HAVE_ZSTD = False
+
+from .errors import (InterfaceError, NotSupportedError, OperationalError,
+ ProgrammingError)
+from .expr import (ExprParser, build_expr, build_scalar, build_bool_scalar,
+ build_int_scalar, build_unsigned_int_scalar)
+from .helpers import encode_to_bytes, get_item_or_attr
+from .result import Column
+from .protobuf import (CRUD_PREPARE_MAPPING, SERVER_MESSAGES,
+ PROTOBUF_REPEATED_TYPES, Message, mysqlxpb_enum)
+
+
+_COMPRESSION_THRESHOLD = 1000
+_LOGGER = logging.getLogger("mysqlx")
+
+
+class Compressor(object):
+ """Implements compression/decompression using `zstd_stream`, `lz4_message`
+ and `deflate_stream` algorithms.
+
+ Args:
+ algorithm (str): Compression algorithm.
+
+ .. versionadded:: 8.0.21
+
+ """
+ def __init__(self, algorithm):
+ self._algorithm = algorithm
+ if algorithm == "zstd_stream":
+ self._compressobj = zstd.ZstdCompressor()
+ self._decompressobj = zstd.ZstdDecompressor()
+ elif algorithm == "deflate_stream":
+ self._compressobj = zlib.compressobj()
+ self._decompressobj = zlib.decompressobj()
+ else:
+ self._compressobj = None
+ self._decompressobj = None
+
+ def compress(self, data):
+ """Compresses data and returns it.
+
+ Args:
+ data (str, bytes or buffer object): Data to be compressed.
+
+ Returns:
+ bytes: Compressed data.
+ """
+ if self._algorithm == "zstd_stream":
+ return self._compressobj.compress(data)
+ if self._algorithm == "lz4_message":
+ with lz4.frame.LZ4FrameCompressor() as compressor:
+ compressed = compressor.begin()
+ compressed += compressor.compress(data)
+ compressed += compressor.flush()
+ return compressed
+
+ # Using 'deflate_stream' algorithm
+ compressed = self._compressobj.compress(data)
+ compressed += self._compressobj.flush(zlib.Z_SYNC_FLUSH)
+ return compressed
+
+ def decompress(self, data):
+ """Decompresses a frame of data and returns it as a string of bytes.
+
+ Args:
+ data (str, bytes or buffer object): Data to be compressed.
+
+ Returns:
+ bytes: Decompresssed data.
+ """
+ if self._algorithm == "zstd_stream":
+ return self._decompressobj.decompress(data)
+ if self._algorithm == "lz4_message":
+ with lz4.frame.LZ4FrameDecompressor() as decompressor:
+ decompressed = decompressor.decompress(data)
+ return decompressed
+
+ # Using 'deflate' algorithm
+ decompressed = self._decompressobj.decompress(data)
+ decompressed += self._decompressobj.flush(zlib.Z_SYNC_FLUSH)
+ return decompressed
+
+
+class MessageReader(object):
+ """Implements a Message Reader.
+
+ Args:
+ socket_stream (mysqlx.connection.SocketStream): `SocketStream` object.
+
+ .. versionadded:: 8.0.21
+ """
+ def __init__(self, socket_stream):
+ self._stream = socket_stream
+ self._compressor = None
+ self._msg = None
+ self._msg_queue = []
+
+ def _read_message(self):
+ """Reads X Protocol messages from the stream and returns a
+ :class:`mysqlx.protobuf.Message` object.
+
+ Raises:
+ :class:`mysqlx.ProgrammingError`: If e connected server does not
+ have the MySQL X protocol plugin
+ enabled.
+
+ Returns:
+ mysqlx.protobuf.Message: MySQL X Protobuf Message.
+ """
+ if self._msg_queue:
+ return self._msg_queue.pop(0)
+
+ frame_size, frame_type = struct.unpack("<LB", self._stream.read(5))
+
+ if frame_type == 10:
+ raise ProgrammingError("The connected server does not have the "
+ "MySQL X protocol plugin enabled or "
+ "protocol mismatch")
+
+ frame_payload = self._stream.read(frame_size - 1)
+ if frame_type not in SERVER_MESSAGES:
+ raise ValueError("Unknown message type: {}".format(frame_type))
+
+ # Do not parse empty notices, Message requires a type in payload
+ if frame_type == 11 and frame_payload == b"":
+ return self._read_message()
+
+ frame_msg = Message.from_server_message(frame_type, frame_payload)
+
+ if frame_type == 19: # Mysqlx.ServerMessages.Type.COMPRESSION
+ uncompressed_size = frame_msg["uncompressed_size"]
+ stream = BytesIO(self._compressor.decompress(frame_msg["payload"]))
+ bytes_processed = 0
+ while bytes_processed < uncompressed_size:
+ payload_size, msg_type = \
+ struct.unpack("<LB", stream.read(5))
+ payload = stream.read(payload_size - 1)
+ self._msg_queue.append(
+ Message.from_server_message(msg_type, payload))
+ bytes_processed += payload_size + 4
+ return self._msg_queue.pop(0) if self._msg_queue else None
+
+ return frame_msg
+
+ def read_message(self):
+ """Read message.
+
+ Returns:
+ mysqlx.protobuf.Message: MySQL X Protobuf Message.
+ """
+ if self._msg is not None:
+ msg = self._msg
+ self._msg = None
+ return msg
+ return self._read_message()
+
+ def push_message(self, msg):
+ """Push message.
+
+ Args:
+ msg (mysqlx.protobuf.Message): MySQL X Protobuf Message.
+
+ Raises:
+ :class:`mysqlx.OperationalError`: If message push slot is full.
+ """
+ if self._msg is not None:
+ raise OperationalError("Message push slot is full")
+ self._msg = msg
+
+ def set_compression(self, algorithm):
+ """Creates a :class:`mysqlx.protocol.Compressor` object based on the
+ compression algorithm.
+
+ Args:
+ algorithm (str): Compression algorithm.
+
+ .. versionadded:: 8.0.21
+
+ """
+ self._compressor = Compressor(algorithm) if algorithm else None
+
+
+class MessageWriter(object):
+ """Implements a Message Writer.
+
+ Args:
+ socket_stream (mysqlx.connection.SocketStream): `SocketStream` object.
+
+ .. versionadded:: 8.0.21
+
+ """
+ def __init__(self, socket_stream):
+ self._stream = socket_stream
+ self._compressor = None
+
+ def write_message(self, msg_type, msg):
+ """Write message.
+
+ Args:
+ msg_type (int): The message type.
+ msg (mysqlx.protobuf.Message): MySQL X Protobuf Message.
+ """
+ msg_size = msg.byte_size(msg)
+ if self._compressor and msg_size > _COMPRESSION_THRESHOLD:
+ msg_str = encode_to_bytes(msg.serialize_to_string())
+ header = struct.pack("<LB", msg_size + 1, msg_type)
+ compressed = self._compressor.compress(b"".join([header, msg_str]))
+
+ msg_first_fields = Message("Mysqlx.Connection.Compression")
+ msg_first_fields["client_messages"] = msg_type
+ msg_first_fields["uncompressed_size"] = msg_size + 5
+
+ msg_payload = Message("Mysqlx.Connection.Compression")
+ msg_payload["payload"] = compressed
+
+ output = b"".join([
+ encode_to_bytes(msg_first_fields.serialize_partial_to_string())[:-2],
+ encode_to_bytes(msg_payload.serialize_partial_to_string())
+ ])
+
+ msg_comp_id = \
+ mysqlxpb_enum("Mysqlx.ClientMessages.Type.COMPRESSION")
+ header = struct.pack("<LB", len(output) + 1, msg_comp_id)
+ self._stream.sendall(b"".join([header, output]))
+ else:
+ msg_str = encode_to_bytes(msg.serialize_to_string())
+ header = struct.pack("<LB", msg_size + 1, msg_type)
+ self._stream.sendall(b"".join([header, msg_str]))
+
+ def set_compression(self, algorithm):
+ """Creates a :class:`mysqlx.protocol.Compressor` object based on the
+ compression algorithm.
+
+ Args:
+ algorithm (str): Compression algorithm.
+ """
+ self._compressor = Compressor(algorithm) if algorithm else None
+
+
+class Protocol(object):
+ """Implements the MySQL X Protocol.
+
+ Args:
+ read (mysqlx.protocol.MessageReader): A Message Reader object.
+ writer (mysqlx.protocol.MessageWriter): A Message Writer object.
+
+ .. versionchanged:: 8.0.21
+ """
+ def __init__(self, reader, writer):
+ self._reader = reader
+ self._writer = writer
+ self._compression_algorithm = None
+ self._warnings = []
+
+ @property
+ def compression_algorithm(self):
+ """str: The compresion algorithm.
+ """
+ return self._compression_algorithm
+
+ def _apply_filter(self, msg, stmt):
+ """Apply filter.
+
+ Args:
+ msg (mysqlx.protobuf.Message): The MySQL X Protobuf Message.
+ stmt (Statement): A `Statement` based type object.
+ """
+ if stmt.has_where:
+ msg["criteria"] = stmt.get_where_expr()
+ if stmt.has_sort:
+ msg["order"].extend(stmt.get_sort_expr())
+ if stmt.has_group_by:
+ msg["grouping"].extend(stmt.get_grouping())
+ if stmt.has_having:
+ msg["grouping_criteria"] = stmt.get_having()
+
+ def _create_any(self, arg):
+ """Create any.
+
+ Args:
+ arg (object): Arbitrary object.
+
+ Returns:
+ mysqlx.protobuf.Message: MySQL X Protobuf Message.
+ """
+ if isinstance(arg, str):
+ value = Message("Mysqlx.Datatypes.Scalar.String", value=arg)
+ scalar = Message("Mysqlx.Datatypes.Scalar", type=8, v_string=value)
+ return Message("Mysqlx.Datatypes.Any", type=1, scalar=scalar)
+ elif isinstance(arg, bool):
+ return Message("Mysqlx.Datatypes.Any", type=1,
+ scalar=build_bool_scalar(arg))
+ elif isinstance(arg, int):
+ if arg < 0:
+ return Message("Mysqlx.Datatypes.Any", type=1,
+ scalar=build_int_scalar(arg))
+ return Message("Mysqlx.Datatypes.Any", type=1,
+ scalar=build_unsigned_int_scalar(arg))
+ elif isinstance(arg, tuple) and len(arg) == 2:
+ arg_key, arg_value = arg
+ obj_fld = Message("Mysqlx.Datatypes.Object.ObjectField",
+ key=arg_key, value=self._create_any(arg_value))
+ obj = Message("Mysqlx.Datatypes.Object",
+ fld=[obj_fld.get_message()])
+ return Message("Mysqlx.Datatypes.Any", type=2, obj=obj)
+ elif isinstance(arg, dict) or (isinstance(arg, (list, tuple)) and
+ isinstance(arg[0], dict)):
+ array_values = []
+ for items in arg:
+ obj_flds = []
+ for key, value in items.items():
+ # Array can only handle Any types, Mysqlx.Datatypes.Any.obj
+ obj_fld = Message("Mysqlx.Datatypes.Object.ObjectField",
+ key=key, value=self._create_any(value))
+ obj_flds.append(obj_fld.get_message())
+ msg_obj = Message("Mysqlx.Datatypes.Object", fld=obj_flds)
+ msg_any = Message("Mysqlx.Datatypes.Any", type=2, obj=msg_obj)
+ array_values.append(msg_any.get_message())
+
+ msg = Message("Mysqlx.Datatypes.Array")
+ msg["value"] = array_values
+ return Message("Mysqlx.Datatypes.Any", type=3, array=msg)
+ elif isinstance(arg, list):
+ obj_flds = []
+ for key, value in arg:
+ obj_fld = Message("Mysqlx.Datatypes.Object.ObjectField",
+ key=key, value=self._create_any(value))
+ obj_flds.append(obj_fld.get_message())
+ msg_obj = Message("Mysqlx.Datatypes.Object", fld=obj_flds)
+ msg_any = Message("Mysqlx.Datatypes.Any", type=2, obj=msg_obj)
+ return msg_any
+
+ return None
+
+ def _get_binding_args(self, stmt, is_scalar=True):
+ """Returns the binding any/scalar.
+
+ Args:
+ stmt (Statement): A `Statement` based type object.
+ is_scalar (bool): `True` to return scalar values.
+
+ Raises:
+ :class:`mysqlx.ProgrammingError`: If unable to find placeholder for
+ parameter.
+
+ Returns:
+ list: A list of ``Any`` or ``Scalar`` objects.
+ """
+ build_value = lambda value: build_scalar(value).get_message() \
+ if is_scalar else self._create_any(value).get_message()
+ bindings = stmt.get_bindings()
+ binding_map = stmt.get_binding_map()
+
+ # If binding_map is None it's a SqlStatement object
+ if binding_map is None:
+ return [build_value(value) for value in bindings]
+
+ count = len(binding_map)
+ args = count * [None]
+ if count != len(bindings):
+ raise ProgrammingError("The number of bind parameters and "
+ "placeholders do not match")
+ for name, value in bindings.items():
+ if name not in binding_map:
+ raise ProgrammingError("Unable to find placeholder for "
+ "parameter: {0}".format(name))
+ pos = binding_map[name]
+ args[pos] = build_value(value)
+ return args
+
+ def _process_frame(self, msg, result):
+ """Process frame.
+
+ Args:
+ msg (mysqlx.protobuf.Message): A MySQL X Protobuf Message.
+ result (Result): A `Result` based type object.
+ """
+ if msg["type"] == 1:
+ warn_msg = Message.from_message("Mysqlx.Notice.Warning",
+ msg["payload"])
+ self._warnings.append(warn_msg.msg)
+ _LOGGER.warning("Protocol.process_frame Received Warning Notice "
+ "code %s: %s", warn_msg.code, warn_msg.msg)
+ result.append_warning(warn_msg.level, warn_msg.code, warn_msg.msg)
+ elif msg["type"] == 2:
+ Message.from_message("Mysqlx.Notice.SessionVariableChanged",
+ msg["payload"])
+ elif msg["type"] == 3:
+ sess_state_msg = Message.from_message(
+ "Mysqlx.Notice.SessionStateChanged", msg["payload"])
+ if sess_state_msg["param"] == mysqlxpb_enum(
+ "Mysqlx.Notice.SessionStateChanged.Parameter."
+ "GENERATED_DOCUMENT_IDS"):
+ result.set_generated_ids(
+ [get_item_or_attr(
+ get_item_or_attr(value, 'v_octets'), 'value').decode()
+ for value in sess_state_msg["value"]])
+ else: # Following results are unitary and not a list
+ sess_state_value = sess_state_msg["value"][0] \
+ if isinstance(sess_state_msg["value"],
+ tuple(PROTOBUF_REPEATED_TYPES)) \
+ else sess_state_msg["value"]
+ if sess_state_msg["param"] == mysqlxpb_enum(
+ "Mysqlx.Notice.SessionStateChanged.Parameter."
+ "ROWS_AFFECTED"):
+ result.set_rows_affected(
+ get_item_or_attr(sess_state_value, "v_unsigned_int"))
+ elif sess_state_msg["param"] == mysqlxpb_enum(
+ "Mysqlx.Notice.SessionStateChanged.Parameter."
+ "GENERATED_INSERT_ID"):
+ result.set_generated_insert_id(get_item_or_attr(
+ sess_state_value, "v_unsigned_int"))
+
+ def _read_message(self, result):
+ """Read message.
+
+ Args:
+ result (Result): A `Result` based type object.
+ """
+ while True:
+ try:
+ msg = self._reader.read_message()
+ except RuntimeError as err:
+ warnings = repr(result.get_warnings())
+ if warnings:
+ raise RuntimeError(
+ "{} reason: {}".format(err, warnings))
+ if msg.type == "Mysqlx.Error":
+ raise OperationalError(msg["msg"], msg["code"])
+ elif msg.type == "Mysqlx.Notice.Frame":
+ try:
+ self._process_frame(msg, result)
+ except:
+ continue
+ elif msg.type == "Mysqlx.Sql.StmtExecuteOk":
+ return None
+ elif msg.type == "Mysqlx.Resultset.FetchDone":
+ result.set_closed(True)
+ elif msg.type == "Mysqlx.Resultset.FetchDoneMoreResultsets":
+ result.set_has_more_results(True)
+ elif msg.type == "Mysqlx.Resultset.Row":
+ result.set_has_data(True)
+ break
+ else:
+ break
+ return msg
+
+ def set_compression(self, algorithm):
+ """Sets the compression algorithm to be used by the compression
+ object, for uplink and downlink.
+
+ Args:
+ algorithm (str): Algorithm to be used in compression/decompression.
+
+ .. versionadded:: 8.0.21
+
+ """
+ self._compression_algorithm = algorithm
+ self._reader.set_compression(algorithm)
+ self._writer.set_compression(algorithm)
+
+ def get_capabilites(self):
+ """Get capabilities.
+
+ Returns:
+ mysqlx.protobuf.Message: MySQL X Protobuf Message.
+ """
+ msg = Message("Mysqlx.Connection.CapabilitiesGet")
+ self._writer.write_message(
+ mysqlxpb_enum("Mysqlx.ClientMessages.Type.CON_CAPABILITIES_GET"),
+ msg)
+ msg = self._reader.read_message()
+ while msg.type == "Mysqlx.Notice.Frame":
+ msg = self._reader.read_message()
+
+ if msg.type == "Mysqlx.Error":
+ raise OperationalError(msg["msg"], msg["code"])
+
+ return msg
+
+ def set_capabilities(self, **kwargs):
+ """Set capabilities.
+
+ Args:
+ **kwargs: Arbitrary keyword arguments.
+
+ Returns:
+ mysqlx.protobuf.Message: MySQL X Protobuf Message.
+ """
+ if not kwargs:
+ return
+ capabilities = Message("Mysqlx.Connection.Capabilities")
+ for key, value in kwargs.items():
+ capability = Message("Mysqlx.Connection.Capability")
+ capability["name"] = key
+ if isinstance(value, dict):
+ items = value
+ obj_flds = []
+ for item in items:
+ obj_fld = Message("Mysqlx.Datatypes.Object.ObjectField",
+ key=item,
+ value=self._create_any(items[item]))
+ obj_flds.append(obj_fld.get_message())
+ msg_obj = Message("Mysqlx.Datatypes.Object", fld=obj_flds)
+ msg_any = Message("Mysqlx.Datatypes.Any", type=2, obj=msg_obj)
+ capability["value"] = msg_any.get_message()
+ else:
+ capability["value"] = self._create_any(value)
+
+ capabilities["capabilities"].extend([capability.get_message()])
+ msg = Message("Mysqlx.Connection.CapabilitiesSet")
+ msg["capabilities"] = capabilities
+ self._writer.write_message(
+ mysqlxpb_enum("Mysqlx.ClientMessages.Type.CON_CAPABILITIES_SET"),
+ msg)
+
+ try:
+ return self.read_ok()
+ except InterfaceError as err:
+ # Skip capability "session_connect_attrs" error since
+ # is only available on version >= 8.0.16
+ if err.errno != 5002:
+ raise
+ return None
+
+ def send_auth_start(self, method, auth_data=None, initial_response=None):
+ """Send authenticate start.
+
+ Args:
+ method (str): Message method.
+ auth_data (Optional[str]): Authentication data.
+ initial_response (Optional[str]): Initial response.
+ """
+ msg = Message("Mysqlx.Session.AuthenticateStart")
+ msg["mech_name"] = method
+ if auth_data is not None:
+ msg["auth_data"] = auth_data
+ if initial_response is not None:
+ msg["initial_response"] = initial_response
+ self._writer.write_message(mysqlxpb_enum(
+ "Mysqlx.ClientMessages.Type.SESS_AUTHENTICATE_START"), msg)
+
+ def read_auth_continue(self):
+ """Read authenticate continue.
+
+ Raises:
+ :class:`InterfaceError`: If the message type is not
+ `Mysqlx.Session.AuthenticateContinue`
+
+ Returns:
+ str: The authentication data.
+ """
+ msg = self._reader.read_message()
+ while msg.type == "Mysqlx.Notice.Frame":
+ msg = self._reader.read_message()
+ if msg.type != "Mysqlx.Session.AuthenticateContinue":
+ raise InterfaceError("Unexpected message encountered during "
+ "authentication handshake")
+ return msg["auth_data"]
+
+ def send_auth_continue(self, auth_data):
+ """Send authenticate continue.
+
+ Args:
+ auth_data (str): Authentication data.
+ """
+ msg = Message("Mysqlx.Session.AuthenticateContinue",
+ auth_data=auth_data)
+ self._writer.write_message(mysqlxpb_enum(
+ "Mysqlx.ClientMessages.Type.SESS_AUTHENTICATE_CONTINUE"), msg)
+
+ def read_auth_ok(self):
+ """Read authenticate OK.
+
+ Raises:
+ :class:`mysqlx.InterfaceError`: If message type is `Mysqlx.Error`.
+ """
+ while True:
+ msg = self._reader.read_message()
+ if msg.type == "Mysqlx.Session.AuthenticateOk":
+ break
+ if msg.type == "Mysqlx.Error":
+ raise InterfaceError(msg.msg)
+
+ def send_prepare_prepare(self, msg_type, msg, stmt):
+ """
+ Send prepare statement.
+
+ Args:
+ msg_type (str): Message ID string.
+ msg (mysqlx.protobuf.Message): MySQL X Protobuf Message.
+ stmt (Statement): A `Statement` based type object.
+
+ Raises:
+ :class:`mysqlx.NotSupportedError`: If prepared statements are not
+ supported.
+
+ .. versionadded:: 8.0.16
+ """
+ if stmt.has_limit and msg.type != "Mysqlx.Crud.Insert":
+ # Remove 'limit' from message by building a new one
+ if msg.type == "Mysqlx.Crud.Find":
+ _, msg = self.build_find(stmt)
+ elif msg.type == "Mysqlx.Crud.Update":
+ _, msg = self.build_update(stmt)
+ elif msg.type == "Mysqlx.Crud.Delete":
+ _, msg = self.build_delete(stmt)
+ else:
+ raise ValueError("Invalid message type: {}".format(msg_type))
+ # Build 'limit_expr' message
+ position = len(stmt.get_bindings())
+ placeholder = mysqlxpb_enum("Mysqlx.Expr.Expr.Type.PLACEHOLDER")
+ msg_limit_expr = Message("Mysqlx.Crud.LimitExpr")
+ msg_limit_expr["row_count"] = Message("Mysqlx.Expr.Expr",
+ type=placeholder,
+ position=position)
+ if msg.type == "Mysqlx.Crud.Find":
+ msg_limit_expr["offset"] = Message("Mysqlx.Expr.Expr",
+ type=placeholder,
+ position=position + 1)
+ msg["limit_expr"] = msg_limit_expr
+
+ oneof_type, oneof_op = CRUD_PREPARE_MAPPING[msg_type]
+ msg_oneof = Message("Mysqlx.Prepare.Prepare.OneOfMessage")
+ msg_oneof["type"] = mysqlxpb_enum(oneof_type)
+ msg_oneof[oneof_op] = msg
+ msg_prepare = Message("Mysqlx.Prepare.Prepare")
+ msg_prepare["stmt_id"] = stmt.stmt_id
+ msg_prepare["stmt"] = msg_oneof
+
+ self._writer.write_message(
+ mysqlxpb_enum("Mysqlx.ClientMessages.Type.PREPARE_PREPARE"),
+ msg_prepare)
+
+ try:
+ self.read_ok()
+ except InterfaceError:
+ raise NotSupportedError
+
+ def send_prepare_execute(self, msg_type, msg, stmt):
+ """
+ Send execute statement.
+
+ Args:
+ msg_type (str): Message ID string.
+ msg (mysqlx.protobuf.Message): MySQL X Protobuf Message.
+ stmt (Statement): A `Statement` based type object.
+
+ .. versionadded:: 8.0.16
+ """
+ oneof_type, oneof_op = CRUD_PREPARE_MAPPING[msg_type]
+ msg_oneof = Message("Mysqlx.Prepare.Prepare.OneOfMessage")
+ msg_oneof["type"] = mysqlxpb_enum(oneof_type)
+ msg_oneof[oneof_op] = msg
+ msg_execute = Message("Mysqlx.Prepare.Execute")
+ msg_execute["stmt_id"] = stmt.stmt_id
+
+ args = self._get_binding_args(stmt, is_scalar=False)
+ if args:
+ msg_execute["args"].extend(args)
+
+ if stmt.has_limit:
+ msg_execute["args"].extend([
+ self._create_any(stmt.get_limit_row_count()).get_message(),
+ self._create_any(stmt.get_limit_offset()).get_message()
+ ])
+
+ self._writer.write_message(
+ mysqlxpb_enum("Mysqlx.ClientMessages.Type.PREPARE_EXECUTE"),
+ msg_execute)
+
+ def send_prepare_deallocate(self, stmt_id):
+ """
+ Send prepare deallocate statement.
+
+ Args:
+ stmt_id (int): Statement ID.
+
+ .. versionadded:: 8.0.16
+ """
+ msg_dealloc = Message("Mysqlx.Prepare.Deallocate")
+ msg_dealloc["stmt_id"] = stmt_id
+ self._writer.write_message(
+ mysqlxpb_enum("Mysqlx.ClientMessages.Type.PREPARE_DEALLOCATE"),
+ msg_dealloc)
+ self.read_ok()
+
+ def send_msg_without_ps(self, msg_type, msg, stmt):
+ """
+ Send a message without prepared statements support.
+
+ Args:
+ msg_type (str): Message ID string.
+ msg (mysqlx.protobuf.Message): MySQL X Protobuf Message.
+ stmt (Statement): A `Statement` based type object.
+
+ .. versionadded:: 8.0.16
+ """
+ if stmt.has_limit:
+ msg_limit = Message("Mysqlx.Crud.Limit")
+ msg_limit["row_count"] = stmt.get_limit_row_count()
+ if msg.type == "Mysqlx.Crud.Find":
+ msg_limit["offset"] = stmt.get_limit_offset()
+ msg["limit"] = msg_limit
+ is_scalar = False \
+ if msg_type == "Mysqlx.ClientMessages.Type.SQL_STMT_EXECUTE" \
+ else True
+ args = self._get_binding_args(stmt, is_scalar=is_scalar)
+ if args:
+ msg["args"].extend(args)
+ self.send_msg(msg_type, msg)
+
+ def send_msg(self, msg_type, msg):
+ """
+ Send a message.
+
+ Args:
+ msg_type (str): Message ID string.
+ msg (mysqlx.protobuf.Message): MySQL X Protobuf Message.
+
+ .. versionadded:: 8.0.16
+ """
+ self._writer.write_message(mysqlxpb_enum(msg_type), msg)
+
+ def build_find(self, stmt):
+ """Build find/read message.
+
+ Args:
+ stmt (Statement): A :class:`mysqlx.ReadStatement` or
+ :class:`mysqlx.FindStatement` object.
+
+ Returns:
+ (tuple): Tuple containing:
+
+ * `str`: Message ID string.
+ * :class:`mysqlx.protobuf.Message`: MySQL X Protobuf Message.
+
+ .. versionadded:: 8.0.16
+ """
+ data_model = mysqlxpb_enum("Mysqlx.Crud.DataModel.DOCUMENT"
+ if stmt.is_doc_based() else
+ "Mysqlx.Crud.DataModel.TABLE")
+ collection = Message("Mysqlx.Crud.Collection",
+ name=stmt.target.name,
+ schema=stmt.schema.name)
+ msg = Message("Mysqlx.Crud.Find", data_model=data_model,
+ collection=collection)
+ if stmt.has_projection:
+ msg["projection"] = stmt.get_projection_expr()
+ self._apply_filter(msg, stmt)
+
+ if stmt.is_lock_exclusive():
+ msg["locking"] = \
+ mysqlxpb_enum("Mysqlx.Crud.Find.RowLock.EXCLUSIVE_LOCK")
+ elif stmt.is_lock_shared():
+ msg["locking"] = \
+ mysqlxpb_enum("Mysqlx.Crud.Find.RowLock.SHARED_LOCK")
+
+ if stmt.lock_contention > 0:
+ msg["locking_options"] = stmt.lock_contention
+
+ return "Mysqlx.ClientMessages.Type.CRUD_FIND", msg
+
+ def build_update(self, stmt):
+ """Build update message.
+
+ Args:
+ stmt (Statement): A :class:`mysqlx.ModifyStatement` or
+ :class:`mysqlx.UpdateStatement` object.
+
+ Returns:
+ (tuple): Tuple containing:
+
+ * `str`: Message ID string.
+ * :class:`mysqlx.protobuf.Message`: MySQL X Protobuf Message.
+
+ .. versionadded:: 8.0.16
+ """
+ data_model = mysqlxpb_enum("Mysqlx.Crud.DataModel.DOCUMENT"
+ if stmt.is_doc_based() else
+ "Mysqlx.Crud.DataModel.TABLE")
+ collection = Message("Mysqlx.Crud.Collection",
+ name=stmt.target.name,
+ schema=stmt.schema.name)
+ msg = Message("Mysqlx.Crud.Update", data_model=data_model,
+ collection=collection)
+ self._apply_filter(msg, stmt)
+ for _, update_op in stmt.get_update_ops().items():
+ operation = Message("Mysqlx.Crud.UpdateOperation")
+ operation["operation"] = update_op.update_type
+ operation["source"] = update_op.source
+ if update_op.value is not None:
+ operation["value"] = build_expr(update_op.value)
+ msg["operation"].extend([operation.get_message()])
+
+ return "Mysqlx.ClientMessages.Type.CRUD_UPDATE", msg
+
+ def build_delete(self, stmt):
+ """Build delete message.
+
+ Args:
+ stmt (Statement): A :class:`mysqlx.DeleteStatement` or
+ :class:`mysqlx.RemoveStatement` object.
+
+ Returns:
+ (tuple): Tuple containing:
+
+ * `str`: Message ID string.
+ * :class:`mysqlx.protobuf.Message`: MySQL X Protobuf Message.
+
+ .. versionadded:: 8.0.16
+ """
+ data_model = mysqlxpb_enum("Mysqlx.Crud.DataModel.DOCUMENT"
+ if stmt.is_doc_based() else
+ "Mysqlx.Crud.DataModel.TABLE")
+ collection = Message("Mysqlx.Crud.Collection", name=stmt.target.name,
+ schema=stmt.schema.name)
+ msg = Message("Mysqlx.Crud.Delete", data_model=data_model,
+ collection=collection)
+ self._apply_filter(msg, stmt)
+ return "Mysqlx.ClientMessages.Type.CRUD_DELETE", msg
+
+ def build_execute_statement(self, namespace, stmt, fields=None):
+ """Build execute statement.
+
+ Args:
+ namespace (str): The namespace.
+ stmt (Statement): A `Statement` based type object.
+ fields (Optional[dict]): The message fields.
+
+ Returns:
+ (tuple): Tuple containing:
+
+ * `str`: Message ID string.
+ * :class:`mysqlx.protobuf.Message`: MySQL X Protobuf Message.
+
+ .. versionadded:: 8.0.16
+ """
+ msg = Message("Mysqlx.Sql.StmtExecute", namespace=namespace, stmt=stmt,
+ compact_metadata=False)
+
+ if fields:
+ obj_flds = []
+ for key, value in fields.items():
+ obj_fld = Message("Mysqlx.Datatypes.Object.ObjectField",
+ key=key, value=self._create_any(value))
+ obj_flds.append(obj_fld.get_message())
+ msg_obj = Message("Mysqlx.Datatypes.Object", fld=obj_flds)
+ msg_any = Message("Mysqlx.Datatypes.Any", type=2, obj=msg_obj)
+ msg["args"] = [msg_any.get_message()]
+ return "Mysqlx.ClientMessages.Type.SQL_STMT_EXECUTE", msg
+
+ def build_insert(self, stmt):
+ """Build insert statement.
+
+ Args:
+ stmt (Statement): A :class:`mysqlx.AddStatement` or
+ :class:`mysqlx.InsertStatement` object.
+
+ Returns:
+ (tuple): Tuple containing:
+
+ * `str`: Message ID string.
+ * :class:`mysqlx.protobuf.Message`: MySQL X Protobuf Message.
+
+ .. versionadded:: 8.0.16
+ """
+ data_model = mysqlxpb_enum("Mysqlx.Crud.DataModel.DOCUMENT"
+ if stmt.is_doc_based() else
+ "Mysqlx.Crud.DataModel.TABLE")
+ collection = Message("Mysqlx.Crud.Collection",
+ name=stmt.target.name,
+ schema=stmt.schema.name)
+ msg = Message("Mysqlx.Crud.Insert", data_model=data_model,
+ collection=collection)
+
+ if hasattr(stmt, "_fields"):
+ for field in stmt._fields:
+ expr = ExprParser(field, not stmt.is_doc_based()) \
+ .parse_table_insert_field()
+ msg["projection"].extend([expr.get_message()])
+
+ for value in stmt.get_values():
+ row = Message("Mysqlx.Crud.Insert.TypedRow")
+ if isinstance(value, list):
+ for val in value:
+ row["field"].extend([build_expr(val).get_message()])
+ else:
+ row["field"].extend([build_expr(value).get_message()])
+ msg["row"].extend([row.get_message()])
+
+ if hasattr(stmt, "is_upsert"):
+ msg["upsert"] = stmt.is_upsert()
+
+ return "Mysqlx.ClientMessages.Type.CRUD_INSERT", msg
+
+ def close_result(self, result):
+ """Close the result.
+
+ Args:
+ result (Result): A `Result` based type object.
+
+ Raises:
+ :class:`mysqlx.OperationalError`: If message read is None.
+ """
+ msg = self._read_message(result)
+ if msg is not None:
+ raise OperationalError("Expected to close the result")
+
+ def read_row(self, result):
+ """Read row.
+
+ Args:
+ result (Result): A `Result` based type object.
+ """
+ msg = self._read_message(result)
+ if msg is None:
+ return None
+ if msg.type == "Mysqlx.Resultset.Row":
+ return msg
+ self._reader.push_message(msg)
+ return None
+
+ def get_column_metadata(self, result):
+ """Returns column metadata.
+
+ Args:
+ result (Result): A `Result` based type object.
+
+ Raises:
+ :class:`mysqlx.InterfaceError`: If unexpected message.
+ """
+ columns = []
+ while True:
+ msg = self._read_message(result)
+ if msg is None:
+ break
+ if msg.type == "Mysqlx.Resultset.Row":
+ self._reader.push_message(msg)
+ break
+ if msg.type != "Mysqlx.Resultset.ColumnMetaData":
+ raise InterfaceError("Unexpected msg type")
+ col = Column(msg["type"], msg["catalog"], msg["schema"],
+ msg["table"], msg["original_table"],
+ msg["name"], msg["original_name"],
+ msg.get("length", 21),
+ msg.get("collation", 0),
+ msg.get("fractional_digits", 0),
+ msg.get("flags", 16),
+ msg.get("content_type"))
+ columns.append(col)
+ return columns
+
+ def read_ok(self):
+ """Read OK.
+
+ Raises:
+ :class:`mysqlx.InterfaceError`: If unexpected message.
+ """
+ msg = self._reader.read_message()
+ if msg.type == "Mysqlx.Error":
+ raise InterfaceError("Mysqlx.Error: {}".format(msg["msg"]),
+ errno=msg["code"])
+ if msg.type != "Mysqlx.Ok":
+ raise InterfaceError("Unexpected message encountered")
+
+ def send_connection_close(self):
+ """Send connection close."""
+ msg = Message("Mysqlx.Connection.Close")
+ self._writer.write_message(mysqlxpb_enum(
+ "Mysqlx.ClientMessages.Type.CON_CLOSE"), msg)
+
+ def send_close(self):
+ """Send close."""
+ msg = Message("Mysqlx.Session.Close")
+ self._writer.write_message(mysqlxpb_enum(
+ "Mysqlx.ClientMessages.Type.SESS_CLOSE"), msg)
+
+ def send_expect_open(self):
+ """Send expectation."""
+ cond_key = mysqlxpb_enum(
+ "Mysqlx.Expect.Open.Condition.Key.EXPECT_FIELD_EXIST")
+ msg_oc = Message("Mysqlx.Expect.Open.Condition")
+ msg_oc["condition_key"] = cond_key
+ msg_oc["condition_value"] = "6.1"
+
+ msg_eo = Message("Mysqlx.Expect.Open")
+ msg_eo['cond'] = [msg_oc.get_message()]
+
+ self._writer.write_message(mysqlxpb_enum(
+ "Mysqlx.ClientMessages.Type.EXPECT_OPEN"), msg_eo)
+
+ def send_reset(self, keep_open=None):
+ """Send reset session message.
+
+ Returns:
+ boolean: ``True`` if the server will keep the session open,
+ otherwise ``False``.
+ """
+ msg = Message("Mysqlx.Session.Reset")
+ if keep_open is None:
+ try:
+ # Send expectation: keep connection open
+ self.send_expect_open()
+ self.read_ok()
+ keep_open = True
+ except InterfaceError:
+ # Expectation is unkown by this version of the server
+ keep_open = False
+ if keep_open:
+ msg["keep_open"] = True
+ self._writer.write_message(mysqlxpb_enum(
+ "Mysqlx.ClientMessages.Type.SESS_RESET"), msg)
+ self.read_ok()
+ if keep_open:
+ return True
+ return False
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/result.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/result.py
new file mode 100644
index 0000000000000000000000000000000000000000..117d93a26d1db46f06b4d01aaff1f97094420c9a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/result.py
@@ -0,0 +1,1135 @@
+# Copyright (c) 2016, 2021, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Implementation of the Result classes."""
+
+import decimal
+import struct
+import sys
+
+from datetime import datetime, timedelta
+
+from .dbdoc import DbDoc
+from .charsets import MYSQL_CHARACTER_SETS
+from .helpers import decode_from_bytes, deprecated
+
+
+# pylint: disable=C0111
+def from_protobuf(column, payload):
+ if len(payload) == 0:
+ return None
+
+ if column.get_type() == ColumnType.STRING:
+ return decode_from_bytes(payload[:-1]) # Strip trailing char
+
+ try:
+ return ColumnProtoType.converter_map[
+ column.get_proto_type()
+ ](payload)
+ except KeyError as err:
+ sys.stderr.write("{0}".format(err))
+ sys.stderr.write("{0}".format(payload.encode("hex")))
+ return None
+
+
+def bytes_from_protobuf(payload):
+ # Strip trailing char
+ return payload[:-1]
+
+
+def float_from_protobuf(payload):
+ assert len(payload) == 4
+ return struct.unpack("<f", payload)[0]
+
+
+def double_from_protobuf(payload):
+ assert len(payload) == 8
+ return struct.unpack("<d", payload)[0]
+
+
+def varint_from_protobuf_stream(payload):
+ if len(payload) == 0:
+ raise ValueError("Payload is empty")
+
+ cur = 0
+ i = 0
+ shift = 0
+
+ for item in payload:
+ char = item if isinstance(item, int) else ord(item)
+ eos = (char & 0x80) == 0
+ cur_bits = (char & 0x7f)
+ cur_bits <<= shift
+ i |= cur_bits
+ if eos:
+ return i, payload[cur + 1:]
+ cur += 1
+ shift += 7
+
+ raise EOFError("Payload too short")
+
+
+def varint_from_protobuf(payload):
+ i, payload = varint_from_protobuf_stream(payload)
+ if len(payload) != 0:
+ raise ValueError("Payload too long")
+
+ return i
+
+
+def varsint_from_protobuf(payload):
+ i, payload = varint_from_protobuf_stream(payload)
+ if len(payload) != 0:
+ raise ValueError("Payload too long")
+
+ # Zigzag encoded, revert it
+ if i & 0x1:
+ i = ~i
+ i = (i >> 1)
+ i |= 1 << 63
+ else:
+ i = (i >> 1)
+
+ return i
+
+
+def set_from_protobuf(payload):
+ set_pb = []
+ while True:
+ try:
+ field_len, payload = varint_from_protobuf_stream(payload)
+ if len(payload) < field_len:
+ if len(payload) == 0 and field_len == 1 and len(set_pb) == 0:
+ # Special case for empty set
+ return []
+ raise ValueError("Invalid Set encoding")
+
+ set_pb.append(payload[:field_len])
+ payload = payload[field_len:]
+ if len(payload) == 0:
+ # Done
+ break
+ except ValueError:
+ break
+ return set_pb
+
+
+def decimal_from_protobuf(payload):
+ digits = []
+ sign = None
+ scale = payload[0] if isinstance(payload[0], int) else ord(payload[0])
+ payload = payload[1:]
+
+ for item in payload:
+ char = item if isinstance(item, int) else ord(item)
+ high_bcd = (char & 0xf0) >> 4
+ low_bcd = char & 0x0f
+ if high_bcd < 0x0a:
+ digits.append(high_bcd)
+ if low_bcd < 0x0a:
+ digits.append(low_bcd)
+ elif low_bcd == 0x0c:
+ sign = 0
+ break
+ elif low_bcd == 0x0d:
+ sign = 1
+ break
+ else:
+ raise ValueError("Invalid BCD")
+ elif high_bcd == 0x0c:
+ sign = 0
+ assert low_bcd == 0x00
+ break
+ elif high_bcd == 0x0d:
+ sign = 1
+ assert low_bcd == 0x00
+ break
+ else:
+ raise ValueError("Invalid BCD: {0}".format(high_bcd))
+
+ return decimal.Decimal((sign, digits, -scale))
+
+
+def datetime_from_protobuf(payload):
+ # A sequence of varints
+ hour = 0
+ minutes = 0
+ seconds = 0
+ useconds = 0
+ year, payload = varint_from_protobuf_stream(payload)
+ month, payload = varint_from_protobuf_stream(payload)
+ day, payload = varint_from_protobuf_stream(payload)
+
+ try:
+ hour, payload = varint_from_protobuf_stream(payload)
+ minutes, payload = varint_from_protobuf_stream(payload)
+ seconds, payload = varint_from_protobuf_stream(payload)
+ useconds, payload = varint_from_protobuf_stream(payload)
+ except ValueError:
+ pass
+
+ return datetime(year, month, day, hour, minutes, seconds, useconds)
+
+
+def time_from_protobuf(payload):
+ # A sequence of varints
+ hour = 0
+ minutes = 0
+ seconds = 0
+ useconds = 0
+ negate = payload[0] == 1
+ payload = payload[1:]
+
+ try:
+ hour, payload = varint_from_protobuf_stream(payload)
+ minutes, payload = varint_from_protobuf_stream(payload)
+ seconds, payload = varint_from_protobuf_stream(payload)
+ useconds, payload = varint_from_protobuf_stream(payload)
+ except ValueError:
+ pass
+
+ if negate:
+ # Negate the first non-zero value
+ if hour:
+ hour *= -1
+ elif minutes:
+ minutes *= -1
+ elif seconds:
+ seconds *= -1
+ elif useconds:
+ useconds *= -1
+
+ return timedelta(hours=hour, minutes=minutes, seconds=seconds,
+ microseconds=useconds)
+
+
+class Collations(object):
+ UTF8_GENERAL_CI = 33
+
+
+class ColumnType(object):
+ BIT = 1
+ TINYINT = 2
+ SMALLINT = 3
+ MEDIUMINT = 4
+ INT = 5
+ BIGINT = 6
+ REAL = 7
+ FLOAT = 8
+ DECIMAL = 9
+ NUMERIC = 10
+ DOUBLE = 11
+ JSON = 12
+ STRING = 13
+ BYTES = 14
+ TIME = 15
+ DATE = 16
+ DATETIME = 17
+ TIMESTAMP = 18
+ SET = 19
+ ENUM = 20
+ GEOMETRY = 21
+ XML = 22
+ YEAR = 23
+ CHAR = 24
+ VARCHAR = 25
+ BINARY = 26
+ VARBINARY = 27
+ TINYBLOB = 28
+ BLOB = 29
+ MEDIUMBLOB = 30
+ LONGBLOB = 31
+ TINYTEXT = 32
+ TEXT = 33
+ MEDIUMTEXT = 34
+ LONGTEXT = 35
+
+ @classmethod
+ def to_string(cls, needle):
+ for key, value in vars(cls).items():
+ if value == needle:
+ return key
+ return None
+
+ @classmethod
+ def from_string(cls, key):
+ return getattr(cls, key.upper(), None)
+
+ @classmethod
+ def is_char(cls, col_type):
+ return col_type in (cls.CHAR, cls.VARCHAR,)
+
+ @classmethod
+ def is_binary(cls, col_type):
+ return col_type in (cls.BINARY, cls.VARBINARY,)
+
+ @classmethod
+ def is_text(cls, col_type):
+ return col_type in (cls.TEXT, cls.TINYTEXT, cls.MEDIUMTEXT,
+ cls.LONGTEXT,)
+
+ @classmethod
+ def is_decimals(cls, col_type):
+ return col_type in (cls.REAL, cls.DOUBLE, cls.FLOAT, cls.DECIMAL,
+ cls.NUMERIC,)
+
+ @classmethod
+ def is_numeric(cls, col_type):
+ return col_type in (cls.BIT, cls.TINYINT, cls.SMALLINT, cls.MEDIUMINT,
+ cls.INT, cls.BIGINT,)
+
+ @classmethod
+ def is_finite_set(cls, col_type):
+ return col_type in (cls.SET, cls.ENUM,)
+
+
+class ColumnProtoType(object):
+ SINT = 1
+ UINT = 2
+ DOUBLE = 5
+ FLOAT = 6
+ BYTES = 7
+ TIME = 10
+ DATETIME = 12
+ SET = 15
+ ENUM = 16
+ BIT = 17
+ DECIMAL = 18
+
+ converter_map = {
+ SINT: varsint_from_protobuf,
+ UINT: varint_from_protobuf,
+ BYTES: bytes_from_protobuf,
+ DATETIME: datetime_from_protobuf,
+ TIME: time_from_protobuf,
+ FLOAT: float_from_protobuf,
+ DOUBLE: double_from_protobuf,
+ BIT: varint_from_protobuf,
+ SET: set_from_protobuf,
+ ENUM: bytes_from_protobuf,
+ DECIMAL: decimal_from_protobuf,
+ }
+
+
+class Flags(object):
+ def __init__(self, value):
+ self._allowed_flags = {}
+ self._flag_names = {}
+ for key, val in self.__class__.__dict__.items():
+ if key.startswith("__"):
+ continue
+ if isinstance(val, int):
+ self._allowed_flags[key] = val
+ self._flag_names[val] = key
+ self._value = value
+
+ def __str__(self):
+ mask = 1
+ flag_names = []
+ value = self._value
+
+ for _ in range(0, 63):
+ mask <<= 1
+ flag = value & mask
+ if flag:
+ # We matched something, find the name for it
+ try:
+ flag_names.append(self._flag_names[flag])
+ except KeyError:
+ sys.stderr.write("{0}".format(self._flag_names))
+ sys.stderr.write("{0}".format(self.__class__.__dict__))
+
+ return ",".join(flag_names)
+
+ @property
+ def value(self):
+ return self._value
+
+ @value.setter
+ def value(self, val):
+ self._value = val
+
+
+class ColumnFlags(Flags):
+ NOT_NULL = 0x0010
+ PRIMARY_KEY = 0x0020
+ UNIQUE_KEY = 0x0040
+ MULTIPLE_KEY = 0x0080
+ AUTO_INCREMENT = 0x0100
+
+
+class DatetimeColumnFlags(ColumnFlags):
+ TIMESTAMP = 0x0001
+
+
+class UIntColumnFlags(ColumnFlags):
+ ZEROFILL = 0x0001
+
+
+class DoubleColumnFlags(ColumnFlags):
+ UNSIGNED = 0x0001
+
+
+class FloatColumnFlags(ColumnFlags):
+ UNSIGNED = 0x0001
+
+
+class BytesColumnFlags(ColumnFlags):
+ RIGHT_PAD = 0x0001
+
+
+class BytesContentType(ColumnFlags):
+ GEOMETRY = 0x0001
+ JSON = 0x0002
+ XML = 0x0003
+# pylint: enable=C0111
+
+
+class Column(object):
+ """Represents meta data for a table column.
+
+ Args:
+ col_type (int): The column type.
+ catalog (str): The catalog.
+ schema (str): The schema name.
+ table (str): The table name.
+ original_table (str): The original table name.
+ name (str): The column name.
+ original_name (str): The original table name.
+ length (int): The column length,
+ collation (str): The collation name.
+ fractional_digits (int): The fractional digits.
+ flags (int): The flags.
+ content_type (int): The content type.
+
+ .. versionchanged:: 8.0.12
+ """
+ def __init__(self, col_type, catalog=None, schema=None, table=None,
+ original_table=None, name=None, original_name=None,
+ length=None, collation=None, fractional_digits=None,
+ flags=None, content_type=None):
+ self._schema = decode_from_bytes(schema)
+ self._name = decode_from_bytes(name)
+ self._original_name = decode_from_bytes(original_name)
+ self._table = decode_from_bytes(table)
+ self._original_table = decode_from_bytes(original_table)
+ self._proto_type = col_type
+ self._col_type = None
+ self._catalog = catalog
+ self._length = length
+ self._collation = collation
+ self._fractional_digits = fractional_digits
+ self._flags = flags
+ self._content_type = content_type
+ self._number_signed = False
+ self._is_padded = False
+ self._is_binary = False
+ self._is_bytes = False
+ self._collation_name = None
+ self._character_set_name = None
+ self._zero_fill = None
+
+ if self._collation > 0:
+ if self._collation >= len(MYSQL_CHARACTER_SETS):
+ raise ValueError("No mapping found for collation {0}"
+ "".format(self._collation))
+ info = MYSQL_CHARACTER_SETS[self._collation]
+ self._character_set_name = info[0]
+ self._collation_name = info[1]
+ self._is_binary = ("binary" in self._collation_name or
+ "_bin" in self._collation_name)
+ self._map_type()
+ self._is_bytes = self._col_type in (
+ ColumnType.GEOMETRY, ColumnType.JSON, ColumnType.XML,
+ ColumnType.BYTES, ColumnType.STRING)
+
+ def __str__(self):
+ return str({
+ "col_type": self._col_type,
+ "schema": self._schema,
+ "table": self._table,
+ "flags": str(self._flags),
+ })
+
+ def _map_bytes(self):
+ """Map bytes."""
+ if self._content_type == BytesContentType.GEOMETRY:
+ self._col_type = ColumnType.GEOMETRY
+ elif self._content_type == BytesContentType.JSON:
+ self._col_type = ColumnType.JSON
+ elif self._content_type == BytesContentType.XML:
+ self._col_type = ColumnType.XML
+ elif self._is_binary:
+ self._col_type = ColumnType.BYTES
+ else:
+ self._col_type = ColumnType.STRING
+ self._is_padded = self._flags & 1
+
+ def _map_datetime(self):
+ """Map datetime."""
+ if self._length == 10:
+ self._col_type = ColumnType.DATE
+ elif self._flags & DatetimeColumnFlags.TIMESTAMP > 0:
+ self._col_type = ColumnType.TIMESTAMP
+ elif self._length >= 19:
+ self._col_type = ColumnType.DATETIME
+ else:
+ raise ValueError("Datetime mapping scenario unhandled")
+
+ def _map_int_type(self):
+ """Map int type."""
+ if self._length <= 4:
+ self._col_type = ColumnType.TINYINT
+ elif self._length <= 6:
+ self._col_type = ColumnType.SMALLINT
+ elif self._length <= 9:
+ self._col_type = ColumnType.MEDIUMINT
+ elif self._length <= 11:
+ self._col_type = ColumnType.INT
+ else:
+ self._col_type = ColumnType.BIGINT
+ self._number_signed = True
+
+ def _map_uint_type(self):
+ """Map uint type."""
+ if self._length <= 3:
+ self._col_type = ColumnType.TINYINT
+ elif self._length <= 5:
+ self._col_type = ColumnType.SMALLINT
+ elif self._length <= 8:
+ self._col_type = ColumnType.MEDIUMINT
+ elif self._length <= 10:
+ self._col_type = ColumnType.INT
+ else:
+ self._col_type = ColumnType.BIGINT
+ self._zero_fill = self._flags & 1
+
+ def _map_type(self):
+ """Map type."""
+ if self._proto_type == ColumnProtoType.SINT:
+ self._map_int_type()
+ elif self._proto_type == ColumnProtoType.UINT:
+ self._map_uint_type()
+ elif self._proto_type == ColumnProtoType.FLOAT:
+ self._col_type = ColumnType.FLOAT
+ self._is_number_signed = \
+ (self._flags & FloatColumnFlags.UNSIGNED) == 0
+ elif self._proto_type == ColumnProtoType.DECIMAL:
+ self._col_type = ColumnType.DECIMAL
+ self._is_number_signed = \
+ (self._flags & FloatColumnFlags.UNSIGNED) == 0
+ elif self._proto_type == ColumnProtoType.DOUBLE:
+ self._col_type = ColumnType.DOUBLE
+ self._is_number_signed = \
+ (self._flags & FloatColumnFlags.UNSIGNED) == 0
+ elif self._proto_type == ColumnProtoType.BYTES:
+ self._map_bytes()
+ elif self._proto_type == ColumnProtoType.TIME:
+ self._col_type = ColumnType.TIME
+ elif self._proto_type == ColumnProtoType.DATETIME:
+ self._map_datetime()
+ elif self._proto_type == ColumnProtoType.SET:
+ self._col_type = ColumnType.SET
+ elif self._proto_type == ColumnProtoType.ENUM:
+ self._col_type = ColumnType.ENUM
+ elif self._proto_type == ColumnProtoType.BIT:
+ self._col_type = ColumnType.BIT
+ else:
+ raise ValueError("Unknown column type {0}".format(self._proto_type))
+
+ @property
+ def schema_name(self):
+ """str: The schema name.
+
+ .. versionadded:: 8.0.12
+ """
+ return self._schema
+
+ @property
+ def table_name(self):
+ """str: The table name.
+
+ .. versionadded:: 8.0.12
+ """
+ return self._original_table or self._table
+
+ @property
+ def table_label(self):
+ """str: The table label.
+
+ .. versionadded:: 8.0.12
+ """
+ return self._table or self._original_table
+
+ @property
+ def column_name(self):
+ """str: The column name.
+
+ .. versionadded:: 8.0.12
+ """
+ return self._original_name or self._name
+
+ @property
+ def column_label(self):
+ """str: The column label.
+
+ .. versionadded:: 8.0.12
+ """
+ return self._name or self._original_name
+
+ @property
+ def type(self):
+ """int: The column type.
+
+ .. versionadded:: 8.0.12
+ """
+ return self._col_type
+
+ @property
+ def length(self):
+ """int. The column length.
+
+ .. versionadded:: 8.0.12
+ """
+ return self._length
+
+ @property
+ def fractional_digits(self):
+ """int: The column fractional digits.
+
+ .. versionadded:: 8.0.12
+ """
+ return self._fractional_digits
+
+ @property
+ def collation_name(self):
+ """str: The collation name.
+
+ .. versionadded:: 8.0.12
+ """
+ return self._collation_name
+
+ @property
+ def character_set_name(self):
+ """str: The character set name.
+
+ .. versionadded:: 8.0.12
+ """
+ return self._character_set_name
+
+ def get_schema_name(self):
+ """Returns the schema name.
+
+ Returns:
+ str: The schema name.
+ """
+ return self._schema
+
+ def get_table_name(self):
+ """Returns the table name.
+
+ Returns:
+ str: The table name.
+ """
+ return self._original_table or self._table
+
+ def get_table_label(self):
+ """Returns the table label.
+
+ Returns:
+ str: The table label.
+ """
+ return self._table or self._original_table
+
+ def get_column_name(self):
+ """Returns the column name.
+
+ Returns:
+ str: The column name.
+ """
+ return self._original_name or self._name
+
+ def get_column_label(self):
+ """Returns the column label.
+
+ Returns:
+ str: The column label.
+ """
+ return self._name or self._original_name
+
+ def get_proto_type(self):
+ """Returns the column proto type.
+
+ Returns:
+ int: The column proto type.
+ """
+ return self._proto_type
+
+ def get_type(self):
+ """Returns the column type.
+
+ Returns:
+ int: The column type.
+ """
+ return self._col_type
+
+ def get_length(self):
+ """Returns the column length.
+
+ Returns:
+ int: The column length.
+ """
+ return self._length
+
+ def get_fractional_digits(self):
+ """Returns the column fractional digits.
+
+ Returns:
+ int: The column fractional digits.
+ """
+ return self._fractional_digits
+
+ def get_collation_name(self):
+ """Returns the collation name.
+
+ Returns:
+ str: The collation name.
+ """
+ return self._collation_name
+
+ def get_character_set_name(self):
+ """Returns the character set name.
+
+ Returns:
+ str: The character set name.
+ """
+ return self._character_set_name
+
+ def is_number_signed(self):
+ """Returns `True` if is a number signed.
+
+ Returns:
+ bool: Returns `True` if is a number signed.
+ """
+ return self._number_signed
+
+ def is_padded(self):
+ """Returns `True` if is padded.
+
+ Returns:
+ bool: Returns `True` if is padded.
+ """
+ return self._is_padded
+
+ def is_bytes(self):
+ """Returns `True` if is bytes.
+
+ Returns:
+ bool: Returns `True` if is bytes.
+ """
+ return self._is_bytes
+
+
+class Row(object):
+ """Represents a row element returned from a SELECT query.
+
+ Args:
+ rs (mysqlx.SqlResult or mysqlx.RowResult): The result set.
+ fields (`list`): The list of fields.
+ """
+ def __init__(self, rs, fields):
+ self._fields = fields
+ self._resultset = rs
+
+ def __repr__(self):
+ return repr(self._fields)
+
+ def __getitem__(self, index):
+ """Returns the value of a column by name or index.
+
+ .. versionchanged:: 8.0.12
+ """
+ int_index = self._resultset.index_of(index) \
+ if isinstance(index, str) else index
+ if int_index == -1 and isinstance(index, str):
+ raise ValueError("Column name '{0}' not found".format(index))
+ if int_index >= len(self._fields) or int_index < 0:
+ raise IndexError("Index out of range")
+ return self._fields[int_index]
+
+ @deprecated("8.0.12")
+ def get_string(self, str_index):
+ """Returns the value using the column name.
+
+ Args:
+ str_index (str): The column name.
+
+ .. deprecated:: 8.0.12
+ """
+ int_index = self._resultset.index_of(str_index)
+ if int_index >= len(self._fields):
+ raise IndexError("Argument out of range")
+ if int_index == -1:
+ raise ValueError("Column name '{0}' not found".format(str_index))
+ return str(self._fields[int_index])
+
+
+class BaseResult(object):
+ """Provides base functionality for result objects.
+
+ Args:
+ connection (mysqlx.connection.Connection): The Connection object.
+ """
+ def __init__(self, connection):
+ self._connection = connection
+ self._closed = False
+ self._rows_affected = 0
+ self._generated_id = -1
+ self._generated_ids = []
+ self._warnings = []
+
+ if connection is None:
+ self._protocol = None
+ else:
+ self._protocol = connection.protocol
+ connection.fetch_active_result()
+
+ def get_affected_items_count(self):
+ """Returns the number of affected items for the last operation.
+
+ Returns:
+ int: The number of affected items.
+ """
+ return self._rows_affected
+
+ def get_warnings(self):
+ """Returns the warnings.
+
+ Returns:
+ `list`: The list of warnings.
+ """
+ return self._warnings
+
+ def get_warnings_count(self):
+ """Returns the number of warnings.
+
+ Returns:
+ int: The number of warnings.
+ """
+ return len(self._warnings)
+
+ def set_closed(self, flag):
+ """Sets if resultset fetch is done.
+ """
+ self._closed = flag
+
+ def append_warning(self, level, code, msg):
+ """Append a warning.
+
+ Args:
+ level (int): The warning level.
+ code (int): The warning code.
+ msg (str): The warning message.
+ """
+ self._warnings.append({"level": level, "code": code, "msg": msg})
+
+ def set_generated_ids(self, generated_ids):
+ """Sets the generated ids.
+ """
+ self._generated_ids = generated_ids
+
+ def set_generated_insert_id(self, generated_id):
+ """Sets the generated insert id.
+ """
+ self._generated_id = generated_id
+
+ def set_rows_affected(self, total):
+ """Sets the number of rows affected.
+ """
+ self._rows_affected = total
+
+
+class Result(BaseResult):
+ """Allows retrieving information about non query operations performed on
+ the database.
+
+ Args:
+ connection (mysqlx.connection.Connection): The Connection object.
+ ids (`list`): A list of IDs.
+ """
+ def __init__(self, connection=None, ids=None):
+ super(Result, self).__init__(connection)
+ self._ids = ids
+
+ if connection is not None:
+ self._connection.close_result(self)
+
+ def get_autoincrement_value(self):
+ """Returns the last insert id auto generated.
+
+ Returns:
+ int: The last insert id.
+ """
+ return self._generated_id
+
+ @deprecated("8.0.12")
+ def get_document_id(self):
+ """Returns ID of the last document inserted into a collection.
+
+ .. deprecated:: 8.0.12
+ """
+ if self._ids is None or len(self._ids) == 0:
+ return None
+ return self._ids[0]
+
+ @deprecated("8.0.12")
+ def get_generated_insert_id(self):
+ """Returns the generated insert id.
+
+ .. deprecated:: 8.0.12
+ """
+ return self._generated_id
+
+ def get_generated_ids(self):
+ """Returns the generated ids.
+ """
+ return self._generated_ids
+
+
+class BufferingResult(BaseResult):
+ """Provides base functionality for buffering result objects.
+
+ Args:
+ connection (mysqlx.connection.Connection): The Connection object.
+ ids (`list`): A list of IDs.
+ """
+ def __init__(self, connection):
+ super(BufferingResult, self).__init__(connection)
+ self._columns = []
+ self._has_data = False
+ self._has_more_results = False
+ self._items = []
+ self._page_size = 0
+ self._position = -1
+ self._init_result()
+
+ def __getitem__(self, index):
+ return self._items[index]
+
+ @property
+ def count(self):
+ """int: The total of items."""
+ return len(self._items)
+
+ def _init_result(self):
+ """Initialize the result."""
+ self._columns = self._connection.get_column_metadata(self)
+ self._has_more_data = True if len(self._columns) > 0 else False
+ self._items = []
+ self._page_size = 20
+ self._position = -1
+ self._connection.set_active_result(self if self._has_more_data
+ else None)
+
+ def _read_item(self, dumping):
+ """Read item.
+
+ Args:
+ dumping (bool): `True` for dumping.
+
+ Returns:
+ :class:`mysqlx.Row`: A `Row` object.
+ """
+ row = self._connection.read_row(self)
+ if row is None:
+ return None
+ item = [None] * len(row["field"])
+ if not dumping:
+ for key in range(len(row["field"])):
+ column = self._columns[key]
+ item[key] = from_protobuf(column, row["field"][key])
+ return Row(self, item)
+
+ def _page_in_items(self):
+ """Reads the page items.
+
+ Returns:
+ int: Total items read.
+ """
+ if self._closed:
+ return False
+
+ count = 0
+ for _ in range(self._page_size):
+ item = self._read_item(False)
+ if item is None:
+ break
+ self._items.append(item)
+ count += 1
+ return count
+
+ def index_of(self, col_name):
+ """Returns the index of the column.
+
+ Returns:
+ int: The index of the column.
+ """
+ index = 0
+ for col in self._columns:
+ if col.get_column_label() == col_name:
+ return index
+ index += 1
+ return -1
+
+ def fetch_one(self):
+ """Fetch one item.
+
+ Returns:
+ :class:`mysqlx.Row` or :class:`mysqlx.DbDoc`: one result item.
+ """
+ if self._closed:
+ return None
+
+ return self._read_item(False)
+
+ def fetch_all(self):
+ """Fetch all items.
+
+ Returns:
+ `list`: The list of items of :class:`mysqlx.DbDoc` or
+ :class:`mysqlx.Row`.
+ """
+ while True:
+ if not self._page_in_items():
+ break
+ return self._items
+
+ def set_has_data(self, flag):
+ """Sets if result has data.
+
+ Args:
+ flag (bool): `True` if result has data.
+ """
+ self._has_data = flag
+
+ def set_has_more_results(self, flag):
+ """Sets if has more results.
+
+ Args:
+ flag (bool): `True` if has more results.
+ """
+ self._has_more_results = flag
+
+
+class RowResult(BufferingResult):
+ """Allows traversing the Row objects returned by a Table.select operation.
+
+ Args:
+ connection (mysqlx.connection.Connection): The Connection object.
+ """
+ def __init__(self, connection):
+ super(RowResult, self).__init__(connection)
+
+ @property
+ def columns(self):
+ """`list`: The list of columns."""
+ return self._columns
+
+ def get_columns(self):
+ """Returns the list of columns.
+
+ Returns:
+ `list`: The list of columns.
+
+ .. versionadded:: 8.0.12
+ """
+ return self._columns
+
+
+class SqlResult(RowResult):
+ """Represents a result from a SQL statement.
+
+ Args:
+ connection (mysqlx.connection.Connection): The Connection object.
+ """
+ def __init__(self, connection):
+ super(SqlResult, self).__init__(connection)
+
+ def get_autoincrement_value(self):
+ """Returns the identifier for the last record inserted.
+
+ Returns:
+ str: The identifier of the last record inserted.
+ """
+ return self._generated_id
+
+ def next_result(self):
+ """Process the next result.
+
+ Returns:
+ bool: Returns `True` if the fetch is done.
+ """
+ if self._closed:
+ return False
+ self._has_more_results = False
+ self._init_result()
+ return True
+
+ def has_data(self):
+ """Returns True if result has data.
+
+ Returns:
+ bool: Returns `True` if result has data.
+
+ .. versionadded:: 8.0.12
+ """
+ return self._has_data
+
+class DocResult(BufferingResult):
+ """Allows traversing the DbDoc objects returned by a Collection.find
+ operation.
+
+ Args:
+ connection (mysqlx.connection.Connection): The Connection object.
+ """
+ def __init__(self, connection):
+ super(DocResult, self).__init__(connection)
+
+ def _read_item(self, dumping):
+ """Read item.
+
+ Args:
+ dumping (bool): `True` for dumping.
+
+ Returns:
+ :class:`mysqlx.DbDoc`: A `DbDoc` object.
+ """
+ row = super(DocResult, self)._read_item(dumping)
+ if row is None:
+ return None
+ return DbDoc(decode_from_bytes(row[0]))
diff --git a/monEnvTP/lib/python3.8/site-packages/mysqlx/statement.py b/monEnvTP/lib/python3.8/site-packages/mysqlx/statement.py
new file mode 100644
index 0000000000000000000000000000000000000000..f36c31647db5e4e22537bd0576b250cf8abf4756
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/mysqlx/statement.py
@@ -0,0 +1,1435 @@
+# Copyright (c) 2016, 2020, Oracle and/or its affiliates.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License, version 2.0, as
+# published by the Free Software Foundation.
+#
+# This program is also distributed with certain software (including
+# but not limited to OpenSSL) that is licensed under separate terms,
+# as designated in a particular file or component or in included license
+# documentation. The authors of MySQL hereby grant you an
+# additional permission to link the program and your derivative works
+# with the separately licensed software that they have included with
+# MySQL.
+#
+# Without limiting anything contained in the foregoing, this file,
+# which is part of MySQL Connector/Python, is also subject to the
+# Universal FOSS Exception, version 1.0, a copy of which can be found at
+# http://oss.oracle.com/licenses/universal-foss-exception.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU General Public License, version 2.0, for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+"""Implementation of Statements."""
+
+import copy
+import json
+import warnings
+
+from .errors import ProgrammingError, NotSupportedError
+from .expr import ExprParser
+from .constants import LockContention
+from .dbdoc import DbDoc
+from .helpers import deprecated
+from .result import Result
+from .protobuf import mysqlxpb_enum
+
+ERR_INVALID_INDEX_NAME = 'The given index name "{}" is not valid'
+
+
+class Expr(object):
+ """Expression wrapper."""
+ def __init__(self, expr):
+ self.expr = expr
+
+
+def flexible_params(*values):
+ """Parse flexible parameters."""
+ if len(values) == 1 and isinstance(values[0], (list, tuple,)):
+ return values[0]
+ return values
+
+
+def is_quoted_identifier(identifier, sql_mode=""):
+ """Check if the given identifier is quoted.
+
+ Args:
+ identifier (string): Identifier to check.
+ sql_mode (Optional[string]): SQL mode.
+
+ Returns:
+ `True` if the identifier has backtick quotes, and False otherwise.
+ """
+ if "ANSI_QUOTES" in sql_mode:
+ return ((identifier[0] == "`" and identifier[-1] == "`") or
+ (identifier[0] == '"' and identifier[-1] == '"'))
+ return identifier[0] == "`" and identifier[-1] == "`"
+
+
+def quote_identifier(identifier, sql_mode=""):
+ """Quote the given identifier with backticks, converting backticks (`) in
+ the identifier name with the correct escape sequence (``).
+
+ Args:
+ identifier (string): Identifier to quote.
+ sql_mode (Optional[string]): SQL mode.
+
+ Returns:
+ A string with the identifier quoted with backticks.
+ """
+ if len(identifier) == 0:
+ return "``"
+ if "ANSI_QUOTES" in sql_mode:
+ return '"{0}"'.format(identifier.replace('"', '""'))
+ return "`{0}`".format(identifier.replace("`", "``"))
+
+
+def quote_multipart_identifier(identifiers, sql_mode=""):
+ """Quote the given multi-part identifier with backticks.
+
+ Args:
+ identifiers (iterable): List of identifiers to quote.
+ sql_mode (Optional[string]): SQL mode.
+
+ Returns:
+ A string with the multi-part identifier quoted with backticks.
+ """
+ return ".".join([quote_identifier(identifier, sql_mode)
+ for identifier in identifiers])
+
+
+def parse_table_name(default_schema, table_name, sql_mode=""):
+ """Parse table name.
+
+ Args:
+ default_schema (str): The default schema.
+ table_name (str): The table name.
+ sql_mode(Optional[str]): The SQL mode.
+
+ Returns:
+ str: The parsed table name.
+ """
+ quote = '"' if "ANSI_QUOTES" in sql_mode else "`"
+ delimiter = ".{0}".format(quote) if quote in table_name else "."
+ temp = table_name.split(delimiter, 1)
+ return (default_schema if len(temp) == 1 else temp[0].strip(quote),
+ temp[-1].strip(quote),)
+
+
+class Statement(object):
+ """Provides base functionality for statement objects.
+
+ Args:
+ target (object): The target database object, it can be
+ :class:`mysqlx.Collection` or :class:`mysqlx.Table`.
+ doc_based (bool): `True` if it is document based.
+ """
+ def __init__(self, target, doc_based=True):
+ self._target = target
+ self._doc_based = doc_based
+ self._connection = target.get_connection() if target else None
+ self._stmt_id = None
+ self._exec_counter = 0
+ self._changed = True
+ self._prepared = False
+ self._deallocate_prepare_execute = False
+
+ @property
+ def target(self):
+ """object: The database object target."""
+ return self._target
+
+ @property
+ def schema(self):
+ """:class:`mysqlx.Schema`: The Schema object."""
+ return self._target.schema
+
+ @property
+ def stmt_id(self):
+ """Returns this statement ID.
+
+ Returns:
+ int: The statement ID.
+ """
+ return self._stmt_id
+
+ @stmt_id.setter
+ def stmt_id(self, value):
+ self._stmt_id = value
+
+ @property
+ def exec_counter(self):
+ """int: The number of times this statement was executed."""
+ return self._exec_counter
+
+ @property
+ def changed(self):
+ """bool: `True` if this statement has changes."""
+ return self._changed
+
+ @changed.setter
+ def changed(self, value):
+ self._changed = value
+
+ @property
+ def prepared(self):
+ """bool: `True` if this statement has been prepared."""
+ return self._prepared
+
+ @prepared.setter
+ def prepared(self, value):
+ self._prepared = value
+
+ @property
+ def repeated(self):
+ """bool: `True` if this statement was executed more than once.
+ """
+ return self._exec_counter > 1
+
+ @property
+ def deallocate_prepare_execute(self):
+ """bool: `True` to deallocate + prepare + execute statement.
+ """
+ return self._deallocate_prepare_execute
+
+ @deallocate_prepare_execute.setter
+ def deallocate_prepare_execute(self, value):
+ self._deallocate_prepare_execute = value
+
+ def is_doc_based(self):
+ """Check if it is document based.
+
+ Returns:
+ bool: `True` if it is document based.
+ """
+ return self._doc_based
+
+ def increment_exec_counter(self):
+ """Increments the number of times this statement has been executed."""
+ self._exec_counter += 1
+
+ def reset_exec_counter(self):
+ """Resets the number of times this statement has been executed."""
+ self._exec_counter = 0
+
+ def execute(self):
+ """Execute the statement.
+
+ Raises:
+ NotImplementedError: This method must be implemented.
+ """
+ raise NotImplementedError
+
+
+class FilterableStatement(Statement):
+ """A statement to be used with filterable statements.
+
+ Args:
+ target (object): The target database object, it can be
+ :class:`mysqlx.Collection` or :class:`mysqlx.Table`.
+ doc_based (Optional[bool]): `True` if it is document based
+ (default: `True`).
+ condition (Optional[str]): Sets the search condition to filter
+ documents or records.
+ """
+ def __init__(self, target, doc_based=True, condition=None):
+ super(FilterableStatement, self).__init__(target=target,
+ doc_based=doc_based)
+ self._binding_map = {}
+ self._bindings = {}
+ self._having = None
+ self._grouping_str = ""
+ self._grouping = None
+ self._limit_offset = 0
+ self._limit_row_count = None
+ self._projection_str = ""
+ self._projection_expr = None
+ self._sort_str = ""
+ self._sort_expr = None
+ self._where_str = ""
+ self._where_expr = None
+ self.has_bindings = False
+ self.has_limit = False
+ self.has_group_by = False
+ self.has_having = False
+ self.has_projection = False
+ self.has_sort = False
+ self.has_where = False
+ if condition:
+ self._set_where(condition)
+
+ def _bind_single(self, obj):
+ """Bind single object.
+
+ Args:
+ obj (:class:`mysqlx.DbDoc` or str): DbDoc or JSON string object.
+
+ Raises:
+ :class:`mysqlx.ProgrammingError`: If invalid JSON string to bind.
+ ValueError: If JSON loaded is not a dictionary.
+ """
+ if isinstance(obj, dict):
+ self.bind(DbDoc(obj).as_str())
+ elif isinstance(obj, DbDoc):
+ self.bind(obj.as_str())
+ elif isinstance(obj, str):
+ try:
+ res = json.loads(obj)
+ if not isinstance(res, dict):
+ raise ValueError
+ except ValueError:
+ raise ProgrammingError("Invalid JSON string to bind")
+ for key in res.keys():
+ self.bind(key, res[key])
+ else:
+ raise ProgrammingError("Invalid JSON string or object to bind")
+
+ def _sort(self, *clauses):
+ """Sets the sorting criteria.
+
+ Args:
+ *clauses: The expression strings defining the sort criteria.
+
+ Returns:
+ mysqlx.FilterableStatement: FilterableStatement object.
+ """
+ self.has_sort = True
+ self._sort_str = ",".join(flexible_params(*clauses))
+ self._sort_expr = ExprParser(self._sort_str,
+ not self._doc_based).parse_order_spec()
+ self._changed = True
+ return self
+
+ def _set_where(self, condition):
+ """Sets the search condition to filter.
+
+ Args:
+ condition (str): Sets the search condition to filter documents or
+ records.
+
+ Returns:
+ mysqlx.FilterableStatement: FilterableStatement object.
+ """
+ self.has_where = True
+ self._where_str = condition
+ try:
+ expr = ExprParser(condition, not self._doc_based)
+ self._where_expr = expr.expr()
+ except ValueError:
+ raise ProgrammingError("Invalid condition")
+ self._binding_map = expr.placeholder_name_to_position
+ self._changed = True
+ return self
+
+ def _set_group_by(self, *fields):
+ """Set group by.
+
+ Args:
+ *fields: List of fields.
+ """
+ fields = flexible_params(*fields)
+ self.has_group_by = True
+ self._grouping_str = ",".join(fields)
+ self._grouping = ExprParser(self._grouping_str,
+ not self._doc_based).parse_expr_list()
+ self._changed = True
+
+ def _set_having(self, condition):
+ """Set having.
+
+ Args:
+ condition (str): The condition.
+ """
+ self.has_having = True
+ self._having = ExprParser(condition, not self._doc_based).expr()
+ self._changed = True
+
+ def _set_projection(self, *fields):
+ """Set the projection.
+
+ Args:
+ *fields: List of fields.
+
+ Returns:
+ :class:`mysqlx.FilterableStatement`: Returns self.
+ """
+ fields = flexible_params(*fields)
+ self.has_projection = True
+ self._projection_str = ",".join(fields)
+ self._projection_expr = ExprParser(
+ self._projection_str,
+ not self._doc_based).parse_table_select_projection()
+ self._changed = True
+ return self
+
+ def get_binding_map(self):
+ """Returns the binding map dictionary.
+
+ Returns:
+ dict: The binding map dictionary.
+ """
+ return self._binding_map
+
+ def get_bindings(self):
+ """Returns the bindings list.
+
+ Returns:
+ `list`: The bindings list.
+ """
+ return self._bindings
+
+ def get_grouping(self):
+ """Returns the grouping expression list.
+
+ Returns:
+ `list`: The grouping expression list.
+ """
+ return self._grouping
+
+ def get_having(self):
+ """Returns the having expression.
+
+ Returns:
+ object: The having expression.
+ """
+ return self._having
+
+ def get_limit_row_count(self):
+ """Returns the limit row count.
+
+ Returns:
+ int: The limit row count.
+ """
+ return self._limit_row_count
+
+ def get_limit_offset(self):
+ """Returns the limit offset.
+
+ Returns:
+ int: The limit offset.
+ """
+ return self._limit_offset
+
+ def get_where_expr(self):
+ """Returns the where expression.
+
+ Returns:
+ object: The where expression.
+ """
+ return self._where_expr
+
+ def get_projection_expr(self):
+ """Returns the projection expression.
+
+ Returns:
+ object: The projection expression.
+ """
+ return self._projection_expr
+
+ def get_sort_expr(self):
+ """Returns the sort expression.
+
+ Returns:
+ object: The sort expression.
+ """
+ return self._sort_expr
+
+ @deprecated("8.0.12")
+ def where(self, condition):
+ """Sets the search condition to filter.
+
+ Args:
+ condition (str): Sets the search condition to filter documents or
+ records.
+
+ Returns:
+ mysqlx.FilterableStatement: FilterableStatement object.
+
+ .. deprecated:: 8.0.12
+ """
+ return self._set_where(condition)
+
+ @deprecated("8.0.12")
+ def sort(self, *clauses):
+ """Sets the sorting criteria.
+
+ Args:
+ *clauses: The expression strings defining the sort criteria.
+
+ Returns:
+ mysqlx.FilterableStatement: FilterableStatement object.
+
+ .. deprecated:: 8.0.12
+ """
+ return self._sort(*clauses)
+
+ def limit(self, row_count, offset=None):
+ """Sets the maximum number of items to be returned.
+
+ Args:
+ row_count (int): The maximum number of items.
+
+ Returns:
+ mysqlx.FilterableStatement: FilterableStatement object.
+
+ Raises:
+ ValueError: If ``row_count`` is not a positive integer.
+
+ .. versionchanged:: 8.0.12
+ The usage of ``offset`` was deprecated.
+ """
+ if not isinstance(row_count, int) or row_count < 0:
+ raise ValueError("The 'row_count' value must be a positive integer")
+ if not self.has_limit:
+ self._changed = bool(self._exec_counter == 0)
+ self._deallocate_prepare_execute = bool(not self._exec_counter == 0)
+
+ self._limit_row_count = row_count
+ self.has_limit = True
+ if offset:
+ self.offset(offset)
+ warnings.warn("'limit(row_count, offset)' is deprecated, please "
+ "use 'offset(offset)' to set the number of items to "
+ "skip", category=DeprecationWarning)
+ return self
+
+ def offset(self, offset):
+ """Sets the number of items to skip.
+
+ Args:
+ offset (int): The number of items to skip.
+
+ Returns:
+ mysqlx.FilterableStatement: FilterableStatement object.
+
+ Raises:
+ ValueError: If ``offset`` is not a positive integer.
+
+ .. versionadded:: 8.0.12
+ """
+ if not isinstance(offset, int) or offset < 0:
+ raise ValueError("The 'offset' value must be a positive integer")
+ self._limit_offset = offset
+ return self
+
+ def bind(self, *args):
+ """Binds value(s) to a specific placeholder(s).
+
+ Args:
+ *args: The name of the placeholder and the value to bind.
+ A :class:`mysqlx.DbDoc` object or a JSON string
+ representation can be used.
+
+ Returns:
+ mysqlx.FilterableStatement: FilterableStatement object.
+
+ Raises:
+ ProgrammingError: If the number of arguments is invalid.
+ """
+ self.has_bindings = True
+ count = len(args)
+ if count == 1:
+ self._bind_single(args[0])
+ elif count == 2:
+ self._bindings[args[0]] = args[1]
+ else:
+ raise ProgrammingError("Invalid number of arguments to bind")
+ return self
+
+ def execute(self):
+ """Execute the statement.
+
+ Raises:
+ NotImplementedError: This method must be implemented.
+ """
+ raise NotImplementedError
+
+
+class SqlStatement(Statement):
+ """A statement for SQL execution.
+
+ Args:
+ connection (mysqlx.connection.Connection): Connection object.
+ sql (string): The sql statement to be executed.
+ """
+ def __init__(self, connection, sql):
+ super(SqlStatement, self).__init__(target=None, doc_based=False)
+ self._connection = connection
+ self._sql = sql
+ self._binding_map = None
+ self._bindings = []
+ self.has_bindings = False
+ self.has_limit = False
+
+ @property
+ def sql(self):
+ """string: The SQL text statement."""
+ return self._sql
+
+ def get_binding_map(self):
+ """Returns the binding map dictionary.
+
+ Returns:
+ dict: The binding map dictionary.
+ """
+ return self._binding_map
+
+ def get_bindings(self):
+ """Returns the bindings list.
+
+ Returns:
+ `list`: The bindings list.
+ """
+ return self._bindings
+
+ def bind(self, *args):
+ """Binds value(s) to a specific placeholder(s).
+
+ Args:
+ *args: The value(s) to bind.
+
+ Returns:
+ mysqlx.SqlStatement: SqlStatement object.
+ """
+ if len(args) == 0:
+ raise ProgrammingError("Invalid number of arguments to bind")
+ self.has_bindings = True
+ bindings = flexible_params(*args)
+ if isinstance(bindings, (list, tuple)):
+ self._bindings = bindings
+ else:
+ self._bindings.append(bindings)
+ return self
+
+ def execute(self):
+ """Execute the statement.
+
+ Returns:
+ mysqlx.SqlResult: SqlResult object.
+ """
+ return self._connection.send_sql(self)
+
+
+class WriteStatement(Statement):
+ """Provide common write operation attributes.
+ """
+ def __init__(self, target, doc_based):
+ super(WriteStatement, self).__init__(target, doc_based)
+ self._values = []
+
+ def get_values(self):
+ """Returns the list of values.
+
+ Returns:
+ `list`: The list of values.
+ """
+ return self._values
+
+ def execute(self):
+ """Execute the statement.
+
+ Raises:
+ NotImplementedError: This method must be implemented.
+ """
+ raise NotImplementedError
+
+
+class AddStatement(WriteStatement):
+ """A statement for document addition on a collection.
+
+ Args:
+ collection (mysqlx.Collection): The Collection object.
+ """
+ def __init__(self, collection):
+ super(AddStatement, self).__init__(collection, True)
+ self._upsert = False
+ self.ids = []
+
+ def is_upsert(self):
+ """Returns `True` if it's an upsert.
+
+ Returns:
+ bool: `True` if it's an upsert.
+ """
+ return self._upsert
+
+ def upsert(self, value=True):
+ """Sets the upset flag to the boolean of the value provided.
+ Setting of this flag allows updating of the matched rows/documents
+ with the provided value.
+
+ Args:
+ value (optional[bool]): Set or unset the upsert flag.
+ """
+ self._upsert = value
+ return self
+
+ def add(self, *values):
+ """Adds a list of documents into a collection.
+
+ Args:
+ *values: The documents to be added into the collection.
+
+ Returns:
+ mysqlx.AddStatement: AddStatement object.
+ """
+ for val in flexible_params(*values):
+ if isinstance(val, DbDoc):
+ self._values.append(val)
+ else:
+ self._values.append(DbDoc(val))
+ return self
+
+ def execute(self):
+ """Execute the statement.
+
+ Returns:
+ mysqlx.Result: Result object.
+ """
+ if len(self._values) == 0:
+ return Result()
+
+ return self._connection.send_insert(self)
+
+
+class UpdateSpec(object):
+ """Update specification class implementation.
+
+ Args:
+ update_type (int): The update type.
+ source (str): The source.
+ value (Optional[str]): The value.
+ """
+ def __init__(self, update_type, source, value=None):
+ if update_type == mysqlxpb_enum(
+ "Mysqlx.Crud.UpdateOperation.UpdateType.SET"):
+ self._table_set(source, value)
+ else:
+ self.update_type = update_type
+ self.source = source
+ if len(source) > 0 and source[0] == '$':
+ self.source = source[1:]
+ self.source = ExprParser(self.source,
+ False).document_field().identifier
+ self.value = value
+
+ def _table_set(self, source, value):
+ """Table set.
+
+ Args:
+ source (str): The source.
+ value (str): The value.
+ """
+ self.update_type = mysqlxpb_enum(
+ "Mysqlx.Crud.UpdateOperation.UpdateType.SET")
+ self.source = ExprParser(source, True).parse_table_update_field()
+ self.value = value
+
+
+class ModifyStatement(FilterableStatement):
+ """A statement for document update operations on a Collection.
+
+ Args:
+ collection (mysqlx.Collection): The Collection object.
+ condition (str): Sets the search condition to identify the documents
+ to be modified.
+
+ .. versionchanged:: 8.0.12
+ The ``condition`` parameter is now mandatory.
+ """
+ def __init__(self, collection, condition):
+ super(ModifyStatement, self).__init__(target=collection,
+ condition=condition)
+ self._update_ops = {}
+
+ def sort(self, *clauses):
+ """Sets the sorting criteria.
+
+ Args:
+ *clauses: The expression strings defining the sort criteria.
+
+ Returns:
+ mysqlx.ModifyStatement: ModifyStatement object.
+ """
+ return self._sort(*clauses)
+
+ def get_update_ops(self):
+ """Returns the list of update operations.
+
+ Returns:
+ `list`: The list of update operations.
+ """
+ return self._update_ops
+
+ def set(self, doc_path, value):
+ """Sets or updates attributes on documents in a collection.
+
+ Args:
+ doc_path (string): The document path of the item to be set.
+ value (string): The value to be set on the specified attribute.
+
+ Returns:
+ mysqlx.ModifyStatement: ModifyStatement object.
+ """
+ self._update_ops[doc_path] = UpdateSpec(mysqlxpb_enum(
+ "Mysqlx.Crud.UpdateOperation.UpdateType.ITEM_SET"),
+ doc_path, value)
+ self._changed = True
+ return self
+
+ @deprecated("8.0.12")
+ def change(self, doc_path, value):
+ """Add an update to the statement setting the field, if it exists at
+ the document path, to the given value.
+
+ Args:
+ doc_path (string): The document path of the item to be set.
+ value (object): The value to be set on the specified attribute.
+
+ Returns:
+ mysqlx.ModifyStatement: ModifyStatement object.
+
+ .. deprecated:: 8.0.12
+ """
+ self._update_ops[doc_path] = UpdateSpec(mysqlxpb_enum(
+ "Mysqlx.Crud.UpdateOperation.UpdateType.ITEM_REPLACE"),
+ doc_path, value)
+ self._changed = True
+ return self
+
+ def unset(self, *doc_paths):
+ """Removes attributes from documents in a collection.
+
+ Args:
+ doc_paths (list): The list of document paths of the attributes to be
+ removed.
+
+ Returns:
+ mysqlx.ModifyStatement: ModifyStatement object.
+ """
+ for item in flexible_params(*doc_paths):
+ self._update_ops[item] = UpdateSpec(mysqlxpb_enum(
+ "Mysqlx.Crud.UpdateOperation.UpdateType.ITEM_REMOVE"), item)
+ self._changed = True
+ return self
+
+ def array_insert(self, field, value):
+ """Insert a value into the specified array in documents of a
+ collection.
+
+ Args:
+ field (string): A document path that identifies the array attribute
+ and position where the value will be inserted.
+ value (object): The value to be inserted.
+
+ Returns:
+ mysqlx.ModifyStatement: ModifyStatement object.
+ """
+ self._update_ops[field] = UpdateSpec(mysqlxpb_enum(
+ "Mysqlx.Crud.UpdateOperation.UpdateType.ARRAY_INSERT"),
+ field, value)
+ self._changed = True
+ return self
+
+ def array_append(self, doc_path, value):
+ """Inserts a value into a specific position in an array attribute in
+ documents of a collection.
+
+ Args:
+ doc_path (string): A document path that identifies the array
+ attribute and position where the value will be
+ inserted.
+ value (object): The value to be inserted.
+
+ Returns:
+ mysqlx.ModifyStatement: ModifyStatement object.
+ """
+ self._update_ops[doc_path] = UpdateSpec(mysqlxpb_enum(
+ "Mysqlx.Crud.UpdateOperation.UpdateType.ARRAY_APPEND"),
+ doc_path, value)
+ self._changed = True
+ return self
+
+ def patch(self, doc):
+ """Takes a :class:`mysqlx.DbDoc`, string JSON format or a dict with the
+ changes and applies it on all matching documents.
+
+ Args:
+ doc (object): A generic document (DbDoc), string in JSON format or
+ dict, with the changes to apply to the matching
+ documents.
+
+ Returns:
+ mysqlx.ModifyStatement: ModifyStatement object.
+ """
+ if doc is None:
+ doc = ''
+ if not isinstance(doc, (ExprParser, dict, DbDoc, str)):
+ raise ProgrammingError(
+ "Invalid data for update operation on document collection "
+ "table")
+ self._update_ops["patch"] = UpdateSpec(
+ mysqlxpb_enum("Mysqlx.Crud.UpdateOperation.UpdateType.MERGE_PATCH"),
+ '', doc.expr() if isinstance(doc, ExprParser) else doc)
+ self._changed = True
+ return self
+
+ def execute(self):
+ """Execute the statement.
+
+ Returns:
+ mysqlx.Result: Result object.
+
+ Raises:
+ ProgrammingError: If condition was not set.
+ """
+ if not self.has_where:
+ raise ProgrammingError("No condition was found for modify")
+ return self._connection.send_update(self)
+
+
+class ReadStatement(FilterableStatement):
+ """Provide base functionality for Read operations
+
+ Args:
+ target (object): The target database object, it can be
+ :class:`mysqlx.Collection` or :class:`mysqlx.Table`.
+ doc_based (Optional[bool]): `True` if it is document based
+ (default: `True`).
+ condition (Optional[str]): Sets the search condition to filter
+ documents or records.
+ """
+ def __init__(self, target, doc_based=True, condition=None):
+ super(ReadStatement, self).__init__(target, doc_based, condition)
+ self._lock_exclusive = False
+ self._lock_shared = False
+ self._lock_contention = LockContention.DEFAULT
+
+ @property
+ def lock_contention(self):
+ """:class:`mysqlx.LockContention`: The lock contention value."""
+ return self._lock_contention
+
+ def _set_lock_contention(self, lock_contention):
+ """Set the lock contention.
+
+ Args:
+ lock_contention (:class:`mysqlx.LockContention`): Lock contention.
+
+ Raises:
+ ProgrammingError: If is an invalid lock contention value.
+ """
+ try:
+ # Check if is a valid lock contention value
+ _ = LockContention.index(lock_contention)
+ except ValueError:
+ raise ProgrammingError("Invalid lock contention mode. Use 'NOWAIT' "
+ "or 'SKIP_LOCKED'")
+ self._lock_contention = lock_contention
+
+ def is_lock_exclusive(self):
+ """Returns `True` if is `EXCLUSIVE LOCK`.
+
+ Returns:
+ bool: `True` if is `EXCLUSIVE LOCK`.
+ """
+ return self._lock_exclusive
+
+ def is_lock_shared(self):
+ """Returns `True` if is `SHARED LOCK`.
+
+ Returns:
+ bool: `True` if is `SHARED LOCK`.
+ """
+ return self._lock_shared
+
+ def lock_shared(self, lock_contention=LockContention.DEFAULT):
+ """Execute a read operation with `SHARED LOCK`. Only one lock can be
+ active at a time.
+
+ Args:
+ lock_contention (:class:`mysqlx.LockContention`): Lock contention.
+ """
+ self._lock_exclusive = False
+ self._lock_shared = True
+ self._set_lock_contention(lock_contention)
+ return self
+
+ def lock_exclusive(self, lock_contention=LockContention.DEFAULT):
+ """Execute a read operation with `EXCLUSIVE LOCK`. Only one lock can be
+ active at a time.
+
+ Args:
+ lock_contention (:class:`mysqlx.LockContention`): Lock contention.
+ """
+ self._lock_exclusive = True
+ self._lock_shared = False
+ self._set_lock_contention(lock_contention)
+ return self
+
+ def group_by(self, *fields):
+ """Sets a grouping criteria for the resultset.
+
+ Args:
+ *fields: The string expressions identifying the grouping criteria.
+
+ Returns:
+ mysqlx.ReadStatement: ReadStatement object.
+ """
+ self._set_group_by(*fields)
+ return self
+
+ def having(self, condition):
+ """Sets a condition for records to be considered in agregate function
+ operations.
+
+ Args:
+ condition (string): A condition on the agregate functions used on
+ the grouping criteria.
+
+ Returns:
+ mysqlx.ReadStatement: ReadStatement object.
+ """
+ self._set_having(condition)
+ return self
+
+ def execute(self):
+ """Execute the statement.
+
+ Returns:
+ mysqlx.Result: Result object.
+ """
+ return self._connection.send_find(self)
+
+
+class FindStatement(ReadStatement):
+ """A statement document selection on a Collection.
+
+ Args:
+ collection (mysqlx.Collection): The Collection object.
+ condition (Optional[str]): An optional expression to identify the
+ documents to be retrieved. If not specified
+ all the documents will be included on the
+ result unless a limit is set.
+ """
+ def __init__(self, collection, condition=None):
+ super(FindStatement, self).__init__(collection, True, condition)
+
+ def fields(self, *fields):
+ """Sets a document field filter.
+
+ Args:
+ *fields: The string expressions identifying the fields to be
+ extracted.
+
+ Returns:
+ mysqlx.FindStatement: FindStatement object.
+ """
+ return self._set_projection(*fields)
+
+ def sort(self, *clauses):
+ """Sets the sorting criteria.
+
+ Args:
+ *clauses: The expression strings defining the sort criteria.
+
+ Returns:
+ mysqlx.FindStatement: FindStatement object.
+ """
+ return self._sort(*clauses)
+
+
+class SelectStatement(ReadStatement):
+ """A statement for record retrieval operations on a Table.
+
+ Args:
+ table (mysqlx.Table): The Table object.
+ *fields: The fields to be retrieved.
+ """
+ def __init__(self, table, *fields):
+ super(SelectStatement, self).__init__(table, False)
+ self._set_projection(*fields)
+
+ def where(self, condition):
+ """Sets the search condition to filter.
+
+ Args:
+ condition (str): Sets the search condition to filter records.
+
+ Returns:
+ mysqlx.SelectStatement: SelectStatement object.
+ """
+ return self._set_where(condition)
+
+ def order_by(self, *clauses):
+ """Sets the order by criteria.
+
+ Args:
+ *clauses: The expression strings defining the order by criteria.
+
+ Returns:
+ mysqlx.SelectStatement: SelectStatement object.
+ """
+ return self._sort(*clauses)
+
+ def get_sql(self):
+ """Returns the generated SQL.
+
+ Returns:
+ str: The generated SQL.
+ """
+ where = " WHERE {0}".format(self._where_str) if self.has_where else ""
+ group_by = " GROUP BY {0}".format(self._grouping_str) if \
+ self.has_group_by else ""
+ having = " HAVING {0}".format(self._having) if self.has_having else ""
+ order_by = " ORDER BY {0}".format(self._sort_str) if self.has_sort \
+ else ""
+ limit = " LIMIT {0} OFFSET {1}".format(self._limit_row_count,
+ self._limit_offset) \
+ if self.has_limit else ""
+ stmt = ("SELECT {select} FROM {schema}.{table}{where}{group}{having}"
+ "{order}{limit}".format(select=self._projection_str or "*",
+ schema=self.schema.name,
+ table=self.target.name, limit=limit,
+ where=where, group=group_by,
+ having=having, order=order_by))
+ return stmt
+
+
+class InsertStatement(WriteStatement):
+ """A statement for insert operations on Table.
+
+ Args:
+ table (mysqlx.Table): The Table object.
+ *fields: The fields to be inserted.
+ """
+ def __init__(self, table, *fields):
+ super(InsertStatement, self).__init__(table, False)
+ self._fields = flexible_params(*fields)
+
+ def values(self, *values):
+ """Set the values to be inserted.
+
+ Args:
+ *values: The values of the columns to be inserted.
+
+ Returns:
+ mysqlx.InsertStatement: InsertStatement object.
+ """
+ self._values.append(list(flexible_params(*values)))
+ return self
+
+ def execute(self):
+ """Execute the statement.
+
+ Returns:
+ mysqlx.Result: Result object.
+ """
+ return self._connection.send_insert(self)
+
+
+class UpdateStatement(FilterableStatement):
+ """A statement for record update operations on a Table.
+
+ Args:
+ table (mysqlx.Table): The Table object.
+
+ .. versionchanged:: 8.0.12
+ The ``fields`` parameters were removed.
+ """
+ def __init__(self, table):
+ super(UpdateStatement, self).__init__(target=table, doc_based=False)
+ self._update_ops = {}
+
+ def where(self, condition):
+ """Sets the search condition to filter.
+
+ Args:
+ condition (str): Sets the search condition to filter records.
+
+ Returns:
+ mysqlx.UpdateStatement: UpdateStatement object.
+ """
+ return self._set_where(condition)
+
+ def order_by(self, *clauses):
+ """Sets the order by criteria.
+
+ Args:
+ *clauses: The expression strings defining the order by criteria.
+
+ Returns:
+ mysqlx.UpdateStatement: UpdateStatement object.
+ """
+ return self._sort(*clauses)
+
+ def get_update_ops(self):
+ """Returns the list of update operations.
+
+ Returns:
+ `list`: The list of update operations.
+ """
+ return self._update_ops
+
+ def set(self, field, value):
+ """Updates the column value on records in a table.
+
+ Args:
+ field (string): The column name to be updated.
+ value (object): The value to be set on the specified column.
+
+ Returns:
+ mysqlx.UpdateStatement: UpdateStatement object.
+ """
+ self._update_ops[field] = UpdateSpec(mysqlxpb_enum(
+ "Mysqlx.Crud.UpdateOperation.UpdateType.SET"), field, value)
+ self._changed = True
+ return self
+
+ def execute(self):
+ """Execute the statement.
+
+ Returns:
+ mysqlx.Result: Result object
+
+ Raises:
+ ProgrammingError: If condition was not set.
+ """
+ if not self.has_where:
+ raise ProgrammingError("No condition was found for update")
+ return self._connection.send_update(self)
+
+
+class RemoveStatement(FilterableStatement):
+ """A statement for document removal from a collection.
+
+ Args:
+ collection (mysqlx.Collection): The Collection object.
+ condition (str): Sets the search condition to identify the documents
+ to be removed.
+
+ .. versionchanged:: 8.0.12
+ The ``condition`` parameter was added.
+ """
+ def __init__(self, collection, condition):
+ super(RemoveStatement, self).__init__(target=collection,
+ condition=condition)
+
+ def sort(self, *clauses):
+ """Sets the sorting criteria.
+
+ Args:
+ *clauses: The expression strings defining the sort criteria.
+
+ Returns:
+ mysqlx.FindStatement: FindStatement object.
+ """
+ return self._sort(*clauses)
+
+ def execute(self):
+ """Execute the statement.
+
+ Returns:
+ mysqlx.Result: Result object.
+
+ Raises:
+ ProgrammingError: If condition was not set.
+ """
+ if not self.has_where:
+ raise ProgrammingError("No condition was found for remove")
+ return self._connection.send_delete(self)
+
+
+class DeleteStatement(FilterableStatement):
+ """A statement that drops a table.
+
+ Args:
+ table (mysqlx.Table): The Table object.
+
+ .. versionchanged:: 8.0.12
+ The ``condition`` parameter was removed.
+ """
+ def __init__(self, table):
+ super(DeleteStatement, self).__init__(target=table, doc_based=False)
+
+ def where(self, condition):
+ """Sets the search condition to filter.
+
+ Args:
+ condition (str): Sets the search condition to filter records.
+
+ Returns:
+ mysqlx.DeleteStatement: DeleteStatement object.
+ """
+ return self._set_where(condition)
+
+ def order_by(self, *clauses):
+ """Sets the order by criteria.
+
+ Args:
+ *clauses: The expression strings defining the order by criteria.
+
+ Returns:
+ mysqlx.DeleteStatement: DeleteStatement object.
+ """
+ return self._sort(*clauses)
+
+ def execute(self):
+ """Execute the statement.
+
+ Returns:
+ mysqlx.Result: Result object.
+
+ Raises:
+ ProgrammingError: If condition was not set.
+ """
+ if not self.has_where:
+ raise ProgrammingError("No condition was found for delete")
+ return self._connection.send_delete(self)
+
+
+class CreateCollectionIndexStatement(Statement):
+ """A statement that creates an index on a collection.
+
+ Args:
+ collection (mysqlx.Collection): Collection.
+ index_name (string): Index name.
+ index_desc (dict): A dictionary containing the fields members that
+ constraints the index to be created. It must have
+ the form as shown in the following::
+
+ {"fields": [{"field": member_path,
+ "type": member_type,
+ "required": member_required,
+ "collation": collation,
+ "options": options,
+ "srid": srid},
+ # {... more members,
+ # repeated as many times
+ # as needed}
+ ],
+ "type": type}
+ """
+ def __init__(self, collection, index_name, index_desc):
+ super(CreateCollectionIndexStatement, self).__init__(target=collection)
+ self._index_desc = copy.deepcopy(index_desc)
+ self._index_name = index_name
+ self._fields_desc = self._index_desc.pop("fields", [])
+
+ def execute(self):
+ """Execute the statement.
+
+ Returns:
+ mysqlx.Result: Result object.
+ """
+ # Validate index name is a valid identifier
+ if self._index_name is None:
+ raise ProgrammingError(
+ ERR_INVALID_INDEX_NAME.format(self._index_name))
+ try:
+ parsed_ident = ExprParser(self._index_name).expr().get_message()
+
+ # The message is type dict when the Protobuf cext is used
+ if isinstance(parsed_ident, dict):
+ if parsed_ident["type"] != mysqlxpb_enum(
+ "Mysqlx.Expr.Expr.Type.IDENT"):
+ raise ProgrammingError(
+ ERR_INVALID_INDEX_NAME.format(self._index_name))
+ else:
+ if parsed_ident.type != mysqlxpb_enum(
+ "Mysqlx.Expr.Expr.Type.IDENT"):
+ raise ProgrammingError(
+ ERR_INVALID_INDEX_NAME.format(self._index_name))
+
+ except (ValueError, AttributeError):
+ raise ProgrammingError(
+ ERR_INVALID_INDEX_NAME.format(self._index_name))
+
+ # Validate members that constraint the index
+ if not self._fields_desc:
+ raise ProgrammingError("Required member 'fields' not found in "
+ "the given index description: {}"
+ "".format(self._index_desc))
+
+ if not isinstance(self._fields_desc, list):
+ raise ProgrammingError("Required member 'fields' must contain a "
+ "list.")
+
+ args = {}
+ args["name"] = self._index_name
+ args["collection"] = self._target.name
+ args["schema"] = self._target.schema.name
+ if "type" in self._index_desc:
+ args["type"] = self._index_desc.pop("type")
+ else:
+ args["type"] = "INDEX"
+ args["unique"] = self._index_desc.pop("unique", False)
+ # Currently unique indexes are not supported:
+ if args["unique"]:
+ raise NotSupportedError("Unique indexes are not supported.")
+ args["constraint"] = []
+
+ if self._index_desc:
+ raise ProgrammingError("Unidentified fields: {}"
+ "".format(self._index_desc))
+
+ try:
+ for field_desc in self._fields_desc:
+ constraint = {}
+ constraint["member"] = field_desc.pop("field")
+ constraint["type"] = field_desc.pop("type")
+ constraint["required"] = field_desc.pop("required", False)
+ constraint["array"] = field_desc.pop("array", False)
+ if not isinstance(constraint["required"], bool):
+ raise TypeError("Field member 'required' must be Boolean")
+ if not isinstance(constraint["array"], bool):
+ raise TypeError("Field member 'array' must be Boolean")
+ if args["type"].upper() == "SPATIAL" and \
+ not constraint["required"]:
+ raise ProgrammingError(
+ "Field member 'required' must be set to 'True' when "
+ "index type is set to 'SPATIAL'")
+ if args["type"].upper() == "INDEX" and \
+ constraint["type"] == "GEOJSON":
+ raise ProgrammingError(
+ "Index 'type' must be set to 'SPATIAL' when field "
+ "type is set to 'GEOJSON'")
+ if "collation" in field_desc:
+ if not constraint["type"].upper().startswith("TEXT"):
+ raise ProgrammingError(
+ "The 'collation' member can only be used when "
+ "field type is set to '{}'"
+ "".format(constraint["type"].upper()))
+ constraint["collation"] = field_desc.pop("collation")
+ # "options" and "srid" fields in IndexField can be
+ # present only if "type" is set to "GEOJSON"
+ if "options" in field_desc:
+ if constraint["type"].upper() != "GEOJSON":
+ raise ProgrammingError(
+ "The 'options' member can only be used when "
+ "index type is set to 'GEOJSON'")
+ constraint["options"] = field_desc.pop("options")
+ if "srid" in field_desc:
+ if constraint["type"].upper() != "GEOJSON":
+ raise ProgrammingError(
+ "The 'srid' member can only be used when index "
+ "type is set to 'GEOJSON'")
+ constraint["srid"] = field_desc.pop("srid")
+ args["constraint"].append(constraint)
+ except KeyError as err:
+ raise ProgrammingError("Required inner member {} not found in "
+ "constraint: {}".format(err, field_desc))
+
+ for field_desc in self._fields_desc:
+ if field_desc:
+ raise ProgrammingError("Unidentified inner fields:{}"
+ "".format(field_desc))
+
+ return self._connection.execute_nonquery(
+ "mysqlx", "create_collection_index", True, args)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/INSTALLER b/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/LICENSE.txt b/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/LICENSE.txt
new file mode 100644
index 0000000000000000000000000000000000000000..737fec5c5352af3d9a6a47a0670da4bdb52c5725
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/LICENSE.txt
@@ -0,0 +1,20 @@
+Copyright (c) 2008-2019 The pip developers (see AUTHORS.txt file)
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/METADATA b/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..5183c4e68338e1f31d47c924c974b5d85f917770
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/METADATA
@@ -0,0 +1,84 @@
+Metadata-Version: 2.1
+Name: pip
+Version: 20.0.2
+Summary: The PyPA recommended tool for installing Python packages.
+Home-page: https://pip.pypa.io/
+Author: The pip developers
+Author-email: pypa-dev@groups.google.com
+License: MIT
+Project-URL: Documentation, https://pip.pypa.io
+Project-URL: Source, https://github.com/pypa/pip
+Keywords: distutils easy_install egg setuptools wheel virtualenv
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Topic :: Software Development :: Build Tools
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*
+
+pip - The Python Package Installer
+==================================
+
+.. image:: https://img.shields.io/pypi/v/pip.svg
+ :target: https://pypi.org/project/pip/
+
+.. image:: https://readthedocs.org/projects/pip/badge/?version=latest
+ :target: https://pip.pypa.io/en/latest
+
+pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.
+
+Please take a look at our documentation for how to install and use pip:
+
+* `Installation`_
+* `Usage`_
+
+Updates are released regularly, with a new version every 3 months. More details can be found in our documentation:
+
+* `Release notes`_
+* `Release process`_
+
+If you find bugs, need help, or want to talk to the developers please use our mailing lists or chat rooms:
+
+* `Issue tracking`_
+* `Discourse channel`_
+* `User IRC`_
+
+If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms:
+
+* `GitHub page`_
+* `Dev documentation`_
+* `Dev mailing list`_
+* `Dev IRC`_
+
+Code of Conduct
+---------------
+
+Everyone interacting in the pip project's codebases, issue trackers, chat
+rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_.
+
+.. _package installer: https://packaging.python.org/guides/tool-recommendations/
+.. _Python Package Index: https://pypi.org
+.. _Installation: https://pip.pypa.io/en/stable/installing.html
+.. _Usage: https://pip.pypa.io/en/stable/
+.. _Release notes: https://pip.pypa.io/en/stable/news.html
+.. _Release process: https://pip.pypa.io/en/latest/development/release-process/
+.. _GitHub page: https://github.com/pypa/pip
+.. _Dev documentation: https://pip.pypa.io/en/latest/development
+.. _Issue tracking: https://github.com/pypa/pip/issues
+.. _Discourse channel: https://discuss.python.org/c/packaging
+.. _Dev mailing list: https://groups.google.com/forum/#!forum/pypa-dev
+.. _User IRC: https://webchat.freenode.net/?channels=%23pypa
+.. _Dev IRC: https://webchat.freenode.net/?channels=%23pypa-dev
+.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/
+
+
diff --git a/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/RECORD b/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..60b797c183cf8ea31f23f7ab55844d42cdb9bfea
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/RECORD
@@ -0,0 +1,246 @@
+../../../bin/pip,sha256=6xbYpbQEH_YYiFbhaPWnckN4IxVyIXy0HRFJ82qfL1s,259
+../../../bin/pip3,sha256=6xbYpbQEH_YYiFbhaPWnckN4IxVyIXy0HRFJ82qfL1s,259
+../../../bin/pip3.8,sha256=6xbYpbQEH_YYiFbhaPWnckN4IxVyIXy0HRFJ82qfL1s,259
+pip-20.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+pip-20.0.2.dist-info/LICENSE.txt,sha256=W6Ifuwlk-TatfRU2LR7W1JMcyMj5_y1NkRkOEJvnRDE,1090
+pip-20.0.2.dist-info/METADATA,sha256=MSgjT2JTt8usp4Hopp5AGEmc-7sKR2Jd7HTMJqCoRhw,3352
+pip-20.0.2.dist-info/RECORD,,
+pip-20.0.2.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
+pip-20.0.2.dist-info/entry_points.txt,sha256=HtfDOwpUlr9s73jqLQ6wF9V0_0qvUXJwCBz7Vwx0Ue0,125
+pip-20.0.2.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+pip/__init__.py,sha256=U1AM82iShMaw90K6Yq0Q2-AZ1EsOcqQLQRB-rxwFtII,455
+pip/__main__.py,sha256=NM95x7KuQr-lwPoTjAC0d_QzLJsJjpmAoxZg0mP8s98,632
+pip/__pycache__/__init__.cpython-38.pyc,,
+pip/__pycache__/__main__.cpython-38.pyc,,
+pip/_internal/__init__.py,sha256=j5fiII6yCeZjpW7_7wAVRMM4DwE-gyARGVU4yAADDeE,517
+pip/_internal/__pycache__/__init__.cpython-38.pyc,,
+pip/_internal/__pycache__/build_env.cpython-38.pyc,,
+pip/_internal/__pycache__/cache.cpython-38.pyc,,
+pip/_internal/__pycache__/configuration.cpython-38.pyc,,
+pip/_internal/__pycache__/exceptions.cpython-38.pyc,,
+pip/_internal/__pycache__/legacy_resolve.cpython-38.pyc,,
+pip/_internal/__pycache__/locations.cpython-38.pyc,,
+pip/_internal/__pycache__/main.cpython-38.pyc,,
+pip/_internal/__pycache__/pep425tags.cpython-38.pyc,,
+pip/_internal/__pycache__/pyproject.cpython-38.pyc,,
+pip/_internal/__pycache__/self_outdated_check.cpython-38.pyc,,
+pip/_internal/__pycache__/wheel_builder.cpython-38.pyc,,
+pip/_internal/build_env.py,sha256=--aNgzIdYrCOclHMwoAdpclCpfdFE_jooRuCy5gczwg,7532
+pip/_internal/cache.py,sha256=16GrnDRLBQNlfKWIuIF6Sa-EFS78kez_w1WEjT3ykTI,11605
+pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132
+pip/_internal/cli/__pycache__/__init__.cpython-38.pyc,,
+pip/_internal/cli/__pycache__/autocompletion.cpython-38.pyc,,
+pip/_internal/cli/__pycache__/base_command.cpython-38.pyc,,
+pip/_internal/cli/__pycache__/cmdoptions.cpython-38.pyc,,
+pip/_internal/cli/__pycache__/command_context.cpython-38.pyc,,
+pip/_internal/cli/__pycache__/main.cpython-38.pyc,,
+pip/_internal/cli/__pycache__/main_parser.cpython-38.pyc,,
+pip/_internal/cli/__pycache__/parser.cpython-38.pyc,,
+pip/_internal/cli/__pycache__/req_command.cpython-38.pyc,,
+pip/_internal/cli/__pycache__/status_codes.cpython-38.pyc,,
+pip/_internal/cli/autocompletion.py,sha256=ekGNtcDI0p7rFVc-7s4T9Tbss4Jgb7vsB649XJIblRg,6547
+pip/_internal/cli/base_command.py,sha256=v6yl5XNRqye8BT9ep8wvpMu6lylP_Hu6D95r_HqbpbQ,7948
+pip/_internal/cli/cmdoptions.py,sha256=f1TVHuu_fR3lLlMo6b367H_GsWFv26tLI9cAS-kZfE0,28114
+pip/_internal/cli/command_context.py,sha256=ygMVoTy2jpNilKT-6416gFSQpaBtrKRBbVbi2fy__EU,975
+pip/_internal/cli/main.py,sha256=8iq3bHe5lxJTB2EvKOqZ38NS0MmoS79_S1kgj4QuH8A,2610
+pip/_internal/cli/main_parser.py,sha256=W9OWeryh7ZkqELohaFh0Ko9sB98ZkSeDmnYbOZ1imBc,2819
+pip/_internal/cli/parser.py,sha256=O9djTuYQuSfObiY-NU6p4MJCfWsRUnDpE2YGA_fwols,9487
+pip/_internal/cli/req_command.py,sha256=pAUAglpTn0mUA6lRs7KN71yOm1KDabD0ySVTQTqWTSA,12463
+pip/_internal/cli/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156
+pip/_internal/commands/__init__.py,sha256=uTSj58QlrSKeXqCUSdL-eAf_APzx5BHy1ABxb0j5ZNE,3714
+pip/_internal/commands/__pycache__/__init__.cpython-38.pyc,,
+pip/_internal/commands/__pycache__/check.cpython-38.pyc,,
+pip/_internal/commands/__pycache__/completion.cpython-38.pyc,,
+pip/_internal/commands/__pycache__/configuration.cpython-38.pyc,,
+pip/_internal/commands/__pycache__/debug.cpython-38.pyc,,
+pip/_internal/commands/__pycache__/download.cpython-38.pyc,,
+pip/_internal/commands/__pycache__/freeze.cpython-38.pyc,,
+pip/_internal/commands/__pycache__/hash.cpython-38.pyc,,
+pip/_internal/commands/__pycache__/help.cpython-38.pyc,,
+pip/_internal/commands/__pycache__/install.cpython-38.pyc,,
+pip/_internal/commands/__pycache__/list.cpython-38.pyc,,
+pip/_internal/commands/__pycache__/search.cpython-38.pyc,,
+pip/_internal/commands/__pycache__/show.cpython-38.pyc,,
+pip/_internal/commands/__pycache__/uninstall.cpython-38.pyc,,
+pip/_internal/commands/__pycache__/wheel.cpython-38.pyc,,
+pip/_internal/commands/check.py,sha256=mgLNYT3bd6Kmynwh4zzcBmVlFZ-urMo40jTgk6U405E,1505
+pip/_internal/commands/completion.py,sha256=UFQvq0Q4_B96z1bvnQyMOq82aPSu05RejbLmqeTZjC0,2975
+pip/_internal/commands/configuration.py,sha256=6riioZjMhsNSEct7dE-X8SobGodk3WERKJvuyjBje4Q,7226
+pip/_internal/commands/debug.py,sha256=a8llax2hRkxgK-tvwdJgaCaZCYPIx0fDvrlMDoYr8bQ,4209
+pip/_internal/commands/download.py,sha256=zX_0-IeFb4C8dxSmGHxk-6H5kehtyTSsdWpjNpAhSww,5007
+pip/_internal/commands/freeze.py,sha256=QS-4ib8jbKJ2wrDaDbTuyaB3Y_iJ5CQC2gAVHuAv9QU,3481
+pip/_internal/commands/hash.py,sha256=47teimfAPhpkaVbSDaafck51BT3XXYuL83lAqc5lOcE,1735
+pip/_internal/commands/help.py,sha256=Nhecq--ydFn80Gm1Zvbf9943EcRJfO0TnXUhsF0RO7s,1181
+pip/_internal/commands/install.py,sha256=T4P3J1rw7CQrZX4OUamtcoWMkTrJBfUe6gWpTfZW1bQ,27286
+pip/_internal/commands/list.py,sha256=2l0JiqHxjxDHNTCb2HZOjwwdo4duS1R0MsqZb6HSMKk,10660
+pip/_internal/commands/search.py,sha256=7Il8nKZ9mM7qF5jlnBoPvSIFY9f-0-5IbYoX3miTuZY,5148
+pip/_internal/commands/show.py,sha256=Vzsj2oX0JBl94MPyF3LV8YoMcigl8B2UsMM8zp0pH2s,6792
+pip/_internal/commands/uninstall.py,sha256=8mldFbrQecSoWDZRqxBgJkrlvx6Y9Iy7cs-2BIgtXt4,2983
+pip/_internal/commands/wheel.py,sha256=TMU5ZhjLo7BIZQApGPsYfoCsbGTnvP-N9jkgPJXhj1Y,7170
+pip/_internal/configuration.py,sha256=MgKrLFBJBkF3t2VJM4tvlnEspfSuS4scp_LhHWh53nY,14222
+pip/_internal/distributions/__init__.py,sha256=ECBUW5Gtu9TjJwyFLvim-i6kUMYVuikNh9I5asL6tbA,959
+pip/_internal/distributions/__pycache__/__init__.cpython-38.pyc,,
+pip/_internal/distributions/__pycache__/base.cpython-38.pyc,,
+pip/_internal/distributions/__pycache__/installed.cpython-38.pyc,,
+pip/_internal/distributions/__pycache__/sdist.cpython-38.pyc,,
+pip/_internal/distributions/__pycache__/wheel.cpython-38.pyc,,
+pip/_internal/distributions/base.py,sha256=ruprpM_L2T2HNi3KLUHlbHimZ1sWVw-3Q0Lb8O7TDAI,1425
+pip/_internal/distributions/installed.py,sha256=YqlkBKr6TVP1MAYS6SG8ojud21wVOYLMZ8jMLJe9MSU,760
+pip/_internal/distributions/sdist.py,sha256=D4XTMlCwgPlK69l62GLYkNSVTVe99fR5iAcVt2EbGok,4086
+pip/_internal/distributions/wheel.py,sha256=95uD-TfaYoq3KiKBdzk9YMN4RRqJ28LNoSTS2K46gek,1294
+pip/_internal/exceptions.py,sha256=6YRuwXAK6F1iyUWKIkCIpWWN2khkAn1sZOgrFA9S8Ro,10247
+pip/_internal/index/__init__.py,sha256=vpt-JeTZefh8a-FC22ZeBSXFVbuBcXSGiILhQZJaNpQ,30
+pip/_internal/index/__pycache__/__init__.cpython-38.pyc,,
+pip/_internal/index/__pycache__/collector.cpython-38.pyc,,
+pip/_internal/index/__pycache__/package_finder.cpython-38.pyc,,
+pip/_internal/index/collector.py,sha256=YS7Ix4oylU7ZbPTPFugh-244GSRqMvdHsGUG6nmz2gE,17892
+pip/_internal/index/package_finder.py,sha256=2Rg75AOpLj8BN1jyL8EI-Iw-Hv6ibJkrYVARCht3bX8,37542
+pip/_internal/legacy_resolve.py,sha256=L7R72I7CjVgJlPTggmA1j4b-H8NmxNu_dKVhrpGXGps,16277
+pip/_internal/locations.py,sha256=VifFEqhc7FWFV8QGoEM3CpECRY8Doq7kTytytxsEgx0,6734
+pip/_internal/main.py,sha256=IVBnUQ-FG7DK6617uEXRB5_QJqspAsBFmTmTesYkbdQ,437
+pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63
+pip/_internal/models/__pycache__/__init__.cpython-38.pyc,,
+pip/_internal/models/__pycache__/candidate.cpython-38.pyc,,
+pip/_internal/models/__pycache__/format_control.cpython-38.pyc,,
+pip/_internal/models/__pycache__/index.cpython-38.pyc,,
+pip/_internal/models/__pycache__/link.cpython-38.pyc,,
+pip/_internal/models/__pycache__/scheme.cpython-38.pyc,,
+pip/_internal/models/__pycache__/search_scope.cpython-38.pyc,,
+pip/_internal/models/__pycache__/selection_prefs.cpython-38.pyc,,
+pip/_internal/models/__pycache__/target_python.cpython-38.pyc,,
+pip/_internal/models/__pycache__/wheel.cpython-38.pyc,,
+pip/_internal/models/candidate.py,sha256=Y58Bcm6oXUj0iS-yhmerlGo5CQJI2p0Ww9h6hR9zQDw,1150
+pip/_internal/models/format_control.py,sha256=ICzVjjGwfZYdX-eLLKHjMHLutEJlAGpfj09OG_eMqac,2673
+pip/_internal/models/index.py,sha256=K59A8-hVhBM20Xkahr4dTwP7OjkJyEqXH11UwHFVgqM,1060
+pip/_internal/models/link.py,sha256=y0H2ZOk0P6d1lfGUL2Pl09xFgZcRt5HwN2LElMifOpI,6827
+pip/_internal/models/scheme.py,sha256=vvhBrrno7eVDXcdKHiZWwxhPHf4VG5uSCEkC0QDR2RU,679
+pip/_internal/models/search_scope.py,sha256=2LXbU4wV8LwqdtXQXNXFYKv-IxiDI_QwSz9ZgbwtAfk,3898
+pip/_internal/models/selection_prefs.py,sha256=rPeif2KKjhTPXeMoQYffjqh10oWpXhdkxRDaPT1HO8k,1908
+pip/_internal/models/target_python.py,sha256=c-cFi6zCuo5HYbXNS3rVVpKRaHVh5yQlYEjEW23SidQ,3799
+pip/_internal/models/wheel.py,sha256=6KLuLKH5b0C5goWQXGSISRaq2UZtkHUEAU1y1Zsrwms,2766
+pip/_internal/network/__init__.py,sha256=jf6Tt5nV_7zkARBrKojIXItgejvoegVJVKUbhAa5Ioc,50
+pip/_internal/network/__pycache__/__init__.cpython-38.pyc,,
+pip/_internal/network/__pycache__/auth.cpython-38.pyc,,
+pip/_internal/network/__pycache__/cache.cpython-38.pyc,,
+pip/_internal/network/__pycache__/download.cpython-38.pyc,,
+pip/_internal/network/__pycache__/session.cpython-38.pyc,,
+pip/_internal/network/__pycache__/utils.cpython-38.pyc,,
+pip/_internal/network/__pycache__/xmlrpc.cpython-38.pyc,,
+pip/_internal/network/auth.py,sha256=K3G1ukKb3PiH8w_UnpXTz8qQsTULO-qdbfOE9zTo1fE,11119
+pip/_internal/network/cache.py,sha256=51CExcRkXWrgMZ7WsrZ6cmijKfViD5tVgKbBvJHO1IE,2394
+pip/_internal/network/download.py,sha256=3D9vdJmVwmCUMxzC-TaVI_GvVOpQna3BLEYNPCSx3Fc,6260
+pip/_internal/network/session.py,sha256=u1IXQfv21R1xv86ulyiB58-be4sYm90eFB0Wp8fVMYw,14702
+pip/_internal/network/utils.py,sha256=iiixo1OeaQ3niUWiBjg59PN6f1w7vvTww1vFriTD_IU,1959
+pip/_internal/network/xmlrpc.py,sha256=AL115M3vFJ8xiHVJneb8Hi0ZFeRvdPhblC89w25OG5s,1597
+pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip/_internal/operations/__pycache__/__init__.cpython-38.pyc,,
+pip/_internal/operations/__pycache__/check.cpython-38.pyc,,
+pip/_internal/operations/__pycache__/freeze.cpython-38.pyc,,
+pip/_internal/operations/__pycache__/prepare.cpython-38.pyc,,
+pip/_internal/operations/build/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip/_internal/operations/build/__pycache__/__init__.cpython-38.pyc,,
+pip/_internal/operations/build/__pycache__/metadata.cpython-38.pyc,,
+pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-38.pyc,,
+pip/_internal/operations/build/__pycache__/wheel.cpython-38.pyc,,
+pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-38.pyc,,
+pip/_internal/operations/build/metadata.py,sha256=yHMi5gHYXcXyHcvUPWHdO-UyOo3McFWljn_nHfM1O9c,1307
+pip/_internal/operations/build/metadata_legacy.py,sha256=4n6N7BTysqVmEpITzT2UVClyt0Peij_Im8Qm965IWB4,3957
+pip/_internal/operations/build/wheel.py,sha256=ntltdNP6D2Tpr4V0agssu6rE0F9LaBpJkYT6zSdhEbw,1469
+pip/_internal/operations/build/wheel_legacy.py,sha256=DYSxQKutwSZnmNvWkwsl2HzE2XQBxV0i0wTphjtUe90,3349
+pip/_internal/operations/check.py,sha256=a6uHG0daoWpmSPCdL7iYJaGQYZ-CRvPvTnCv2PnIIs0,5353
+pip/_internal/operations/freeze.py,sha256=td4BeRnW10EXFTZrx6VgygO3CrjqD5B9f0BGzjQm-Ew,10180
+pip/_internal/operations/install/__init__.py,sha256=mX7hyD2GNBO2mFGokDQ30r_GXv7Y_PLdtxcUv144e-s,51
+pip/_internal/operations/install/__pycache__/__init__.cpython-38.pyc,,
+pip/_internal/operations/install/__pycache__/editable_legacy.cpython-38.pyc,,
+pip/_internal/operations/install/__pycache__/legacy.cpython-38.pyc,,
+pip/_internal/operations/install/__pycache__/wheel.cpython-38.pyc,,
+pip/_internal/operations/install/editable_legacy.py,sha256=rJ_xs2qtDUjpY2-n6eYlVyZiNoKbOtZXZrYrcnIELt4,1488
+pip/_internal/operations/install/legacy.py,sha256=eBV8gHbO9sBlBc-4nuR3Sd2nikHgEcnC9khfeLiypio,4566
+pip/_internal/operations/install/wheel.py,sha256=xdCjH6uIUyg39Pf8tUaMFUN4a7eozJAFMb_wKcgQlsY,23012
+pip/_internal/operations/prepare.py,sha256=ro2teBlbBpkRJhBKraP9CoJgVLpueSk62ziWhRToXww,20942
+pip/_internal/pep425tags.py,sha256=SlIQokevkoKnXhoK3PZvXiDoj8hFKoJ7thDifDtga3k,5490
+pip/_internal/pyproject.py,sha256=VJKsrXORGiGoDPVKCQhuu4tWlQSTOhoiRlVLRNu4rx4,7400
+pip/_internal/req/__init__.py,sha256=UVaYPlHZVGRBQQPjvGC_6jJDQtewXm0ws-8Lxhg_TiY,2671
+pip/_internal/req/__pycache__/__init__.cpython-38.pyc,,
+pip/_internal/req/__pycache__/constructors.cpython-38.pyc,,
+pip/_internal/req/__pycache__/req_file.cpython-38.pyc,,
+pip/_internal/req/__pycache__/req_install.cpython-38.pyc,,
+pip/_internal/req/__pycache__/req_set.cpython-38.pyc,,
+pip/_internal/req/__pycache__/req_tracker.cpython-38.pyc,,
+pip/_internal/req/__pycache__/req_uninstall.cpython-38.pyc,,
+pip/_internal/req/constructors.py,sha256=w5-kWWVCqlSqcIBitw86yq7XGMPpKrHDfQZSE2mJ_xc,14388
+pip/_internal/req/req_file.py,sha256=ECqRUicCw5Y08R1YynZAAp8dSKQhDXoc1Q-mY3a9b6I,18485
+pip/_internal/req/req_install.py,sha256=wjsIr4lDpbVSLqANKJI9mXwRVHaRxcnj8q30UiHoLRA,30442
+pip/_internal/req/req_set.py,sha256=GsrKmupRKhNMhjkofVfCEHEHfgEvYBxClaQH5xLBQHg,8066
+pip/_internal/req/req_tracker.py,sha256=27fvVG8Y2MJS1KpU2rBMnQyUEMHG4lkHT_bzbzQK-c0,4723
+pip/_internal/req/req_uninstall.py,sha256=DWnOsuyYGju6-sylyoCm7GtUNevn9qMAVhjAGLcdXUE,23609
+pip/_internal/self_outdated_check.py,sha256=3KO1pTJUuYaiV9X0t87I9PimkGL82HbhLWbocqKZpBU,8009
+pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip/_internal/utils/__pycache__/__init__.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/appdirs.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/compat.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/deprecation.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/distutils_args.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/encoding.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/entrypoints.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/filesystem.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/filetypes.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/glibc.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/hashes.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/inject_securetransport.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/logging.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/marker_files.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/misc.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/models.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/packaging.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/pkg_resources.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/setuptools_build.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/subprocess.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/temp_dir.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/typing.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/ui.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/unpacking.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/urls.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/virtualenv.cpython-38.pyc,,
+pip/_internal/utils/__pycache__/wheel.cpython-38.pyc,,
+pip/_internal/utils/appdirs.py,sha256=PVo_7-IQWHa9qNuNbWSFiF2QGqeLbSAR4eLcYYhQ9ek,1307
+pip/_internal/utils/compat.py,sha256=D7FKGLBdQwWH-dHIGaoWMawDZWBYApvtJVL1kFPJ930,8869
+pip/_internal/utils/deprecation.py,sha256=pBnNogoA4UGTxa_JDnPXBRRYpKMbExAhXpBwAwklOBs,3318
+pip/_internal/utils/distutils_args.py,sha256=a56mblNxk9BGifbpEETG61mmBrqhjtjRkJ4HYn-oOEE,1350
+pip/_internal/utils/encoding.py,sha256=hxZz0t3Whw3d4MHQEiofxalTlfKwxFdLc8fpeGfhKo8,1320
+pip/_internal/utils/entrypoints.py,sha256=vHcNpnksCv6mllihU6hfifdsKPEjwcaJ1aLIXEaynaU,1152
+pip/_internal/utils/filesystem.py,sha256=PXa3vMcz4mbEKtkD0joFI8pBwddLQxhfPFOkVH5xjfE,5255
+pip/_internal/utils/filetypes.py,sha256=R2FwzoeX7b-rZALOXx5cuO8VPPMhUQ4ne7wm3n3IcWA,571
+pip/_internal/utils/glibc.py,sha256=LOeNGgawCKS-4ke9fii78fwXD73dtNav3uxz1Bf-Ab8,3297
+pip/_internal/utils/hashes.py,sha256=my-wSnAWEDvl_8rQaOQcVIWjwh1-f_QiEvGy9TPf53U,3942
+pip/_internal/utils/inject_securetransport.py,sha256=M17ZlFVY66ApgeASVjKKLKNz0LAfk-SyU0HZ4ZB6MmI,810
+pip/_internal/utils/logging.py,sha256=aJL7NldPhS5KGFof6Qt3o3MG5cjm5TOoo7bGRu9_wsg,13033
+pip/_internal/utils/marker_files.py,sha256=CO5djQlrPIozJpJybViH_insoAaBGY1aqEt6-cC-iW0,741
+pip/_internal/utils/misc.py,sha256=uIb58Hiu_g2HRORo2aMcgnW_7R5d-5wUAuoW0fA2ZME,26085
+pip/_internal/utils/models.py,sha256=IA0hw_T4awQzui0kqfIEASm5yLtgZAB08ag59Nip5G8,1148
+pip/_internal/utils/packaging.py,sha256=VtiwcAAL7LBi7tGL2je7LeW4bE11KMHGCsJ1NZY5XtM,3035
+pip/_internal/utils/pkg_resources.py,sha256=ZX-k7V5q_aNWyDse92nN7orN1aCpRLsaxzpkBZ1XKzU,1254
+pip/_internal/utils/setuptools_build.py,sha256=DouaVolV9olDDFIIN9IszaL-FHdNaZt10ufOZFH9ZAU,5070
+pip/_internal/utils/subprocess.py,sha256=Ph3x5eHQBxFotyGhpZN8asSMBud-BBkmgaNfARG-di8,9922
+pip/_internal/utils/temp_dir.py,sha256=87Ib8aNic_hoSDEmUYJHTQIn5-prL2AYL5u_yZ3s4sI,7768
+pip/_internal/utils/typing.py,sha256=xkYwOeHlf4zsHXBDC4310HtEqwhQcYXFPq2h35Tcrl0,1401
+pip/_internal/utils/ui.py,sha256=0FNxXlGtbpPtTviv2oXS9t8bQG_NBdfUgP4GbubhS9U,13911
+pip/_internal/utils/unpacking.py,sha256=M944JTSiapBOSKLWu7lbawpVHSE7flfzZTEr3TAG7v8,9438
+pip/_internal/utils/urls.py,sha256=aNV9wq5ClUmrz6sG-al7hEWJ4ToitOy7l82CmFGFNW8,1481
+pip/_internal/utils/virtualenv.py,sha256=Q3S1WPlI7JWpGOT2jUVJ8l2chm_k7VPJ9cHA_cUluEU,3396
+pip/_internal/utils/wheel.py,sha256=grTRwZtMQwApwbbSPmRVLtac6FKy6SVKeCXNkWyyePA,7302
+pip/_internal/vcs/__init__.py,sha256=viJxJRqRE_mVScum85bgQIXAd6o0ozFt18VpC-qIJrM,617
+pip/_internal/vcs/__pycache__/__init__.cpython-38.pyc,,
+pip/_internal/vcs/__pycache__/bazaar.cpython-38.pyc,,
+pip/_internal/vcs/__pycache__/git.cpython-38.pyc,,
+pip/_internal/vcs/__pycache__/mercurial.cpython-38.pyc,,
+pip/_internal/vcs/__pycache__/subversion.cpython-38.pyc,,
+pip/_internal/vcs/__pycache__/versioncontrol.cpython-38.pyc,,
+pip/_internal/vcs/bazaar.py,sha256=84q1-kj1_nJ9AMzMu8RmMp-riRZu81M7K9kowcYgi3U,3957
+pip/_internal/vcs/git.py,sha256=CdLz3DTsZsLMLPZpEuUwiS40npvDaVB1CNRzoXgcuJQ,14352
+pip/_internal/vcs/mercurial.py,sha256=2mg7BdYI_Fe00fF6omaNccFQLPHBsDBG5CAEzvqn5sA,5110
+pip/_internal/vcs/subversion.py,sha256=Fpwy71AmuqXnoKi6h1SrXRtPjEMn8fieuM1O4j01IBg,12292
+pip/_internal/vcs/versioncontrol.py,sha256=nqoaM1_rzx24WnHtihXA8RcPpnUae0sV2sR_LS_5HFA,22600
+pip/_internal/wheel_builder.py,sha256=gr9jE14W5ZuYblpldo-tpRuyG0e0AVmHLttImuAvXlE,9441
+pip/_vendor/__init__.py,sha256=RcHf8jwLPL0ZEaa6uMhTSfyCrA_TpWgDWAW5br9xD7Y,4975
+pip/_vendor/__pycache__/__init__.cpython-38.pyc,,
diff --git a/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/WHEEL b/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..ef99c6cf3283b50a273ac4c6d009a0aa85597070
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/entry_points.txt b/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/entry_points.txt
new file mode 100644
index 0000000000000000000000000000000000000000..d48bd8a85e683c7a9607f3f418f50d11445bdf40
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/entry_points.txt
@@ -0,0 +1,5 @@
+[console_scripts]
+pip = pip._internal.cli.main:main
+pip3 = pip._internal.cli.main:main
+pip3.8 = pip._internal.cli.main:main
+
diff --git a/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/top_level.txt b/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip-20.0.2.dist-info/top_level.txt
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/__init__.py b/monEnvTP/lib/python3.8/site-packages/pip/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..827a4e20a7b0a7824ae863f97f0b0c1c38408030
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/__init__.py
@@ -0,0 +1,18 @@
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Optional
+
+
+__version__ = "20.0.2"
+
+
+def main(args=None):
+ # type: (Optional[List[str]]) -> int
+ """This is an internal API only meant for use by pip's own console scripts.
+
+ For additional details, see https://github.com/pypa/pip/issues/7498.
+ """
+ from pip._internal.utils.entrypoints import _wrapper
+
+ return _wrapper(args)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/__main__.py b/monEnvTP/lib/python3.8/site-packages/pip/__main__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e83b9e056b321828cbc8990f719ebb4a729c9bea
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/__main__.py
@@ -0,0 +1,19 @@
+from __future__ import absolute_import
+
+import os
+import sys
+
+# If we are running from a wheel, add the wheel to sys.path
+# This allows the usage python pip-*.whl/pip install pip-*.whl
+if __package__ == '':
+ # __file__ is pip-*.whl/pip/__main__.py
+ # first dirname call strips of '/__main__.py', second strips off '/pip'
+ # Resulting path is the name of the wheel itself
+ # Add that to sys.path so we can import pip
+ path = os.path.dirname(os.path.dirname(__file__))
+ sys.path.insert(0, path)
+
+from pip._internal.cli.main import main as _main # isort:skip # noqa
+
+if __name__ == '__main__':
+ sys.exit(_main())
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9c2139a27a780b4176f3a4512d25cd8e34ae17ef
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/__pycache__/__main__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/__pycache__/__main__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..37fc4f38f83b8fbad9454b2767d794de3fe45033
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/__pycache__/__main__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/__init__.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..3aa8a4693ff0893a87364964f06bad8075e4834b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__init__.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+import pip._internal.utils.inject_securetransport # noqa
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, List
+
+
+def main(args=None):
+ # type: (Optional[List[str]]) -> int
+ """This is preserved for old console scripts that may still be referencing
+ it.
+
+ For additional details, see https://github.com/pypa/pip/issues/7498.
+ """
+ from pip._internal.utils.entrypoints import _wrapper
+
+ return _wrapper(args)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..788c712ac53aecda04ddf7d794c84bea07914287
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/build_env.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/build_env.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..917efd5c106cbbbb4f52ed86c0e8705e6885f6d6
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/build_env.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/cache.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/cache.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..afefa26e110993fa7aee227b2b35e691d909b69b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/cache.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/configuration.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/configuration.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9556810d1aeebbf66569f0cb4bd1c266ef2ad42b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/configuration.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/exceptions.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/exceptions.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4316c223e8a0e3376712e55718048214a7d375f9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/exceptions.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/legacy_resolve.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/legacy_resolve.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..00b8b509185e443f323b7c7512e4e52cd36e88d6
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/legacy_resolve.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/locations.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/locations.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4a5cbe3003bf142342291b0286a5063488306d24
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/locations.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/main.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/main.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..32fd8b5958ddf12934921bd869cb4d342ee23d57
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/main.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/pep425tags.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/pep425tags.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5355b1819d00f5ab1cfa23185d90cf9a29fdc755
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/pep425tags.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/pyproject.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/pyproject.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6f980005081017acb0857f8426a76569f370e202
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/pyproject.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..226aa419ce5a4cbea57fb0d4ff132071722a4bd0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a907339aa560d21cea1a27ef0a3dee808087c63f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/build_env.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/build_env.py
new file mode 100644
index 0000000000000000000000000000000000000000..f55f0e6b8d9e73ff9b751ce2f0c2513123d17100
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/build_env.py
@@ -0,0 +1,221 @@
+"""Build Environment used for isolation during sdist building
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+# mypy: disallow-untyped-defs=False
+
+import logging
+import os
+import sys
+import textwrap
+from collections import OrderedDict
+from distutils.sysconfig import get_python_lib
+from sysconfig import get_paths
+
+from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet
+
+from pip import __file__ as pip_location
+from pip._internal.utils.subprocess import call_subprocess
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.ui import open_spinner
+
+if MYPY_CHECK_RUNNING:
+ from typing import Tuple, Set, Iterable, Optional, List
+ from pip._internal.index.package_finder import PackageFinder
+
+logger = logging.getLogger(__name__)
+
+
+class _Prefix:
+
+ def __init__(self, path):
+ # type: (str) -> None
+ self.path = path
+ self.setup = False
+ self.bin_dir = get_paths(
+ 'nt' if os.name == 'nt' else 'posix_prefix',
+ vars={'base': path, 'platbase': path}
+ )['scripts']
+ # Note: prefer distutils' sysconfig to get the
+ # library paths so PyPy is correctly supported.
+ purelib = get_python_lib(plat_specific=False, prefix=path)
+ platlib = get_python_lib(plat_specific=True, prefix=path)
+ if purelib == platlib:
+ self.lib_dirs = [purelib]
+ else:
+ self.lib_dirs = [purelib, platlib]
+
+
+class BuildEnvironment(object):
+ """Creates and manages an isolated environment to install build deps
+ """
+
+ def __init__(self):
+ # type: () -> None
+ self._temp_dir = TempDirectory(kind="build-env")
+
+ self._prefixes = OrderedDict((
+ (name, _Prefix(os.path.join(self._temp_dir.path, name)))
+ for name in ('normal', 'overlay')
+ ))
+
+ self._bin_dirs = [] # type: List[str]
+ self._lib_dirs = [] # type: List[str]
+ for prefix in reversed(list(self._prefixes.values())):
+ self._bin_dirs.append(prefix.bin_dir)
+ self._lib_dirs.extend(prefix.lib_dirs)
+
+ # Customize site to:
+ # - ensure .pth files are honored
+ # - prevent access to system site packages
+ system_sites = {
+ os.path.normcase(site) for site in (
+ get_python_lib(plat_specific=False),
+ get_python_lib(plat_specific=True),
+ )
+ }
+ self._site_dir = os.path.join(self._temp_dir.path, 'site')
+ if not os.path.exists(self._site_dir):
+ os.mkdir(self._site_dir)
+ with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp:
+ fp.write(textwrap.dedent(
+ '''
+ import os, site, sys
+
+ # First, drop system-sites related paths.
+ original_sys_path = sys.path[:]
+ known_paths = set()
+ for path in {system_sites!r}:
+ site.addsitedir(path, known_paths=known_paths)
+ system_paths = set(
+ os.path.normcase(path)
+ for path in sys.path[len(original_sys_path):]
+ )
+ original_sys_path = [
+ path for path in original_sys_path
+ if os.path.normcase(path) not in system_paths
+ ]
+ sys.path = original_sys_path
+
+ # Second, add lib directories.
+ # ensuring .pth file are processed.
+ for path in {lib_dirs!r}:
+ assert not path in sys.path
+ site.addsitedir(path)
+ '''
+ ).format(system_sites=system_sites, lib_dirs=self._lib_dirs))
+
+ def __enter__(self):
+ self._save_env = {
+ name: os.environ.get(name, None)
+ for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH')
+ }
+
+ path = self._bin_dirs[:]
+ old_path = self._save_env['PATH']
+ if old_path:
+ path.extend(old_path.split(os.pathsep))
+
+ pythonpath = [self._site_dir]
+
+ os.environ.update({
+ 'PATH': os.pathsep.join(path),
+ 'PYTHONNOUSERSITE': '1',
+ 'PYTHONPATH': os.pathsep.join(pythonpath),
+ })
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ for varname, old_value in self._save_env.items():
+ if old_value is None:
+ os.environ.pop(varname, None)
+ else:
+ os.environ[varname] = old_value
+
+ def cleanup(self):
+ # type: () -> None
+ self._temp_dir.cleanup()
+
+ def check_requirements(self, reqs):
+ # type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]]
+ """Return 2 sets:
+ - conflicting requirements: set of (installed, wanted) reqs tuples
+ - missing requirements: set of reqs
+ """
+ missing = set()
+ conflicting = set()
+ if reqs:
+ ws = WorkingSet(self._lib_dirs)
+ for req in reqs:
+ try:
+ if ws.find(Requirement.parse(req)) is None:
+ missing.add(req)
+ except VersionConflict as e:
+ conflicting.add((str(e.args[0].as_requirement()),
+ str(e.args[1])))
+ return conflicting, missing
+
+ def install_requirements(
+ self,
+ finder, # type: PackageFinder
+ requirements, # type: Iterable[str]
+ prefix_as_string, # type: str
+ message # type: Optional[str]
+ ):
+ # type: (...) -> None
+ prefix = self._prefixes[prefix_as_string]
+ assert not prefix.setup
+ prefix.setup = True
+ if not requirements:
+ return
+ args = [
+ sys.executable, os.path.dirname(pip_location), 'install',
+ '--ignore-installed', '--no-user', '--prefix', prefix.path,
+ '--no-warn-script-location',
+ ] # type: List[str]
+ if logger.getEffectiveLevel() <= logging.DEBUG:
+ args.append('-v')
+ for format_control in ('no_binary', 'only_binary'):
+ formats = getattr(finder.format_control, format_control)
+ args.extend(('--' + format_control.replace('_', '-'),
+ ','.join(sorted(formats or {':none:'}))))
+
+ index_urls = finder.index_urls
+ if index_urls:
+ args.extend(['-i', index_urls[0]])
+ for extra_index in index_urls[1:]:
+ args.extend(['--extra-index-url', extra_index])
+ else:
+ args.append('--no-index')
+ for link in finder.find_links:
+ args.extend(['--find-links', link])
+
+ for host in finder.trusted_hosts:
+ args.extend(['--trusted-host', host])
+ if finder.allow_all_prereleases:
+ args.append('--pre')
+ args.append('--')
+ args.extend(requirements)
+ with open_spinner(message) as spinner:
+ call_subprocess(args, spinner=spinner)
+
+
+class NoOpBuildEnvironment(BuildEnvironment):
+ """A no-op drop-in replacement for BuildEnvironment
+ """
+
+ def __init__(self):
+ pass
+
+ def __enter__(self):
+ pass
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ pass
+
+ def cleanup(self):
+ pass
+
+ def install_requirements(self, finder, requirements, prefix, message):
+ raise NotImplementedError()
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cache.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cache.py
new file mode 100644
index 0000000000000000000000000000000000000000..abecd78f8d988dd5856855aff3a89ab270ca73a9
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cache.py
@@ -0,0 +1,329 @@
+"""Cache Management
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+import hashlib
+import json
+import logging
+import os
+
+from pip._vendor.packaging.tags import interpreter_name, interpreter_version
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.exceptions import InvalidWheelFilename
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.urls import path_to_url
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Set, List, Any, Dict
+
+ from pip._vendor.packaging.tags import Tag
+
+ from pip._internal.models.format_control import FormatControl
+
+logger = logging.getLogger(__name__)
+
+
+def _hash_dict(d):
+ # type: (Dict[str, str]) -> str
+ """Return a stable sha224 of a dictionary."""
+ s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
+ return hashlib.sha224(s.encode("ascii")).hexdigest()
+
+
+class Cache(object):
+ """An abstract class - provides cache directories for data from links
+
+
+ :param cache_dir: The root of the cache.
+ :param format_control: An object of FormatControl class to limit
+ binaries being read from the cache.
+ :param allowed_formats: which formats of files the cache should store.
+ ('binary' and 'source' are the only allowed values)
+ """
+
+ def __init__(self, cache_dir, format_control, allowed_formats):
+ # type: (str, FormatControl, Set[str]) -> None
+ super(Cache, self).__init__()
+ assert not cache_dir or os.path.isabs(cache_dir)
+ self.cache_dir = cache_dir or None
+ self.format_control = format_control
+ self.allowed_formats = allowed_formats
+
+ _valid_formats = {"source", "binary"}
+ assert self.allowed_formats.union(_valid_formats) == _valid_formats
+
+ def _get_cache_path_parts_legacy(self, link):
+ # type: (Link) -> List[str]
+ """Get parts of part that must be os.path.joined with cache_dir
+
+ Legacy cache key (pip < 20) for compatibility with older caches.
+ """
+
+ # We want to generate an url to use as our cache key, we don't want to
+ # just re-use the URL because it might have other items in the fragment
+ # and we don't care about those.
+ key_parts = [link.url_without_fragment]
+ if link.hash_name is not None and link.hash is not None:
+ key_parts.append("=".join([link.hash_name, link.hash]))
+ key_url = "#".join(key_parts)
+
+ # Encode our key url with sha224, we'll use this because it has similar
+ # security properties to sha256, but with a shorter total output (and
+ # thus less secure). However the differences don't make a lot of
+ # difference for our use case here.
+ hashed = hashlib.sha224(key_url.encode()).hexdigest()
+
+ # We want to nest the directories some to prevent having a ton of top
+ # level directories where we might run out of sub directories on some
+ # FS.
+ parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
+
+ return parts
+
+ def _get_cache_path_parts(self, link):
+ # type: (Link) -> List[str]
+ """Get parts of part that must be os.path.joined with cache_dir
+ """
+
+ # We want to generate an url to use as our cache key, we don't want to
+ # just re-use the URL because it might have other items in the fragment
+ # and we don't care about those.
+ key_parts = {"url": link.url_without_fragment}
+ if link.hash_name is not None and link.hash is not None:
+ key_parts[link.hash_name] = link.hash
+ if link.subdirectory_fragment:
+ key_parts["subdirectory"] = link.subdirectory_fragment
+
+ # Include interpreter name, major and minor version in cache key
+ # to cope with ill-behaved sdists that build a different wheel
+ # depending on the python version their setup.py is being run on,
+ # and don't encode the difference in compatibility tags.
+ # https://github.com/pypa/pip/issues/7296
+ key_parts["interpreter_name"] = interpreter_name()
+ key_parts["interpreter_version"] = interpreter_version()
+
+ # Encode our key url with sha224, we'll use this because it has similar
+ # security properties to sha256, but with a shorter total output (and
+ # thus less secure). However the differences don't make a lot of
+ # difference for our use case here.
+ hashed = _hash_dict(key_parts)
+
+ # We want to nest the directories some to prevent having a ton of top
+ # level directories where we might run out of sub directories on some
+ # FS.
+ parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
+
+ return parts
+
+ def _get_candidates(self, link, canonical_package_name):
+ # type: (Link, Optional[str]) -> List[Any]
+ can_not_cache = (
+ not self.cache_dir or
+ not canonical_package_name or
+ not link
+ )
+ if can_not_cache:
+ return []
+
+ formats = self.format_control.get_allowed_formats(
+ canonical_package_name
+ )
+ if not self.allowed_formats.intersection(formats):
+ return []
+
+ candidates = []
+ path = self.get_path_for_link(link)
+ if os.path.isdir(path):
+ for candidate in os.listdir(path):
+ candidates.append((candidate, path))
+ # TODO remove legacy path lookup in pip>=21
+ legacy_path = self.get_path_for_link_legacy(link)
+ if os.path.isdir(legacy_path):
+ for candidate in os.listdir(legacy_path):
+ candidates.append((candidate, legacy_path))
+ return candidates
+
+ def get_path_for_link_legacy(self, link):
+ # type: (Link) -> str
+ raise NotImplementedError()
+
+ def get_path_for_link(self, link):
+ # type: (Link) -> str
+ """Return a directory to store cached items in for link.
+ """
+ raise NotImplementedError()
+
+ def get(
+ self,
+ link, # type: Link
+ package_name, # type: Optional[str]
+ supported_tags, # type: List[Tag]
+ ):
+ # type: (...) -> Link
+ """Returns a link to a cached item if it exists, otherwise returns the
+ passed link.
+ """
+ raise NotImplementedError()
+
+ def cleanup(self):
+ # type: () -> None
+ pass
+
+
+class SimpleWheelCache(Cache):
+ """A cache of wheels for future installs.
+ """
+
+ def __init__(self, cache_dir, format_control):
+ # type: (str, FormatControl) -> None
+ super(SimpleWheelCache, self).__init__(
+ cache_dir, format_control, {"binary"}
+ )
+
+ def get_path_for_link_legacy(self, link):
+ # type: (Link) -> str
+ parts = self._get_cache_path_parts_legacy(link)
+ return os.path.join(self.cache_dir, "wheels", *parts)
+
+ def get_path_for_link(self, link):
+ # type: (Link) -> str
+ """Return a directory to store cached wheels for link
+
+ Because there are M wheels for any one sdist, we provide a directory
+ to cache them in, and then consult that directory when looking up
+ cache hits.
+
+ We only insert things into the cache if they have plausible version
+ numbers, so that we don't contaminate the cache with things that were
+ not unique. E.g. ./package might have dozens of installs done for it
+ and build a version of 0.0...and if we built and cached a wheel, we'd
+ end up using the same wheel even if the source has been edited.
+
+ :param link: The link of the sdist for which this will cache wheels.
+ """
+ parts = self._get_cache_path_parts(link)
+
+ # Store wheels within the root cache_dir
+ return os.path.join(self.cache_dir, "wheels", *parts)
+
+ def get(
+ self,
+ link, # type: Link
+ package_name, # type: Optional[str]
+ supported_tags, # type: List[Tag]
+ ):
+ # type: (...) -> Link
+ candidates = []
+
+ if not package_name:
+ return link
+
+ canonical_package_name = canonicalize_name(package_name)
+ for wheel_name, wheel_dir in self._get_candidates(
+ link, canonical_package_name
+ ):
+ try:
+ wheel = Wheel(wheel_name)
+ except InvalidWheelFilename:
+ continue
+ if canonicalize_name(wheel.name) != canonical_package_name:
+ logger.debug(
+ "Ignoring cached wheel {} for {} as it "
+ "does not match the expected distribution name {}.".format(
+ wheel_name, link, package_name
+ )
+ )
+ continue
+ if not wheel.supported(supported_tags):
+ # Built for a different python/arch/etc
+ continue
+ candidates.append(
+ (
+ wheel.support_index_min(supported_tags),
+ wheel_name,
+ wheel_dir,
+ )
+ )
+
+ if not candidates:
+ return link
+
+ _, wheel_name, wheel_dir = min(candidates)
+ return Link(path_to_url(os.path.join(wheel_dir, wheel_name)))
+
+
+class EphemWheelCache(SimpleWheelCache):
+ """A SimpleWheelCache that creates it's own temporary cache directory
+ """
+
+ def __init__(self, format_control):
+ # type: (FormatControl) -> None
+ self._temp_dir = TempDirectory(kind="ephem-wheel-cache")
+
+ super(EphemWheelCache, self).__init__(
+ self._temp_dir.path, format_control
+ )
+
+ def cleanup(self):
+ # type: () -> None
+ self._temp_dir.cleanup()
+
+
+class WheelCache(Cache):
+ """Wraps EphemWheelCache and SimpleWheelCache into a single Cache
+
+ This Cache allows for gracefully degradation, using the ephem wheel cache
+ when a certain link is not found in the simple wheel cache first.
+ """
+
+ def __init__(self, cache_dir, format_control):
+ # type: (str, FormatControl) -> None
+ super(WheelCache, self).__init__(
+ cache_dir, format_control, {'binary'}
+ )
+ self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
+ self._ephem_cache = EphemWheelCache(format_control)
+
+ def get_path_for_link_legacy(self, link):
+ # type: (Link) -> str
+ return self._wheel_cache.get_path_for_link_legacy(link)
+
+ def get_path_for_link(self, link):
+ # type: (Link) -> str
+ return self._wheel_cache.get_path_for_link(link)
+
+ def get_ephem_path_for_link(self, link):
+ # type: (Link) -> str
+ return self._ephem_cache.get_path_for_link(link)
+
+ def get(
+ self,
+ link, # type: Link
+ package_name, # type: Optional[str]
+ supported_tags, # type: List[Tag]
+ ):
+ # type: (...) -> Link
+ retval = self._wheel_cache.get(
+ link=link,
+ package_name=package_name,
+ supported_tags=supported_tags,
+ )
+ if retval is not link:
+ return retval
+
+ return self._ephem_cache.get(
+ link=link,
+ package_name=package_name,
+ supported_tags=supported_tags,
+ )
+
+ def cleanup(self):
+ # type: () -> None
+ self._wheel_cache.cleanup()
+ self._ephem_cache.cleanup()
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__init__.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e589bb917e23823e25f9fff7e0849c4d6d4a62bc
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__init__.py
@@ -0,0 +1,4 @@
+"""Subpackage containing all of pip's command line interface related code
+"""
+
+# This file intentionally does not import submodules
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..20af11735ca183196cde98e90d094f68f4bd128f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f189eff8060b2a9ce9510bb0f2132b54984fec3c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f80ada8e358307795ce2228b8d2bcf4c7662d8e9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b5117e7b05a74c5d6f729fd127ef4d128afdb5fc
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8e05af4907a252cd56fb950e8ce5488ddafe6f69
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/main.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/main.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f8f85b04ee23cb78f6d8ac2f3dad5cebbafaa739
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/main.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c08105fe77b1a894213fc3ab4016b08c0250ef8d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/parser.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/parser.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e9bab1ec9d95aa207d6bf28cd9eb55def8e3b1df
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/parser.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..068c3505047e0a84d62a1e3ab30f6eb4ef91d69c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c0292aafb86955f6497148c097d08bcfc78c1189
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/autocompletion.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/autocompletion.py
new file mode 100644
index 0000000000000000000000000000000000000000..329de602513d7bb868799a49d36d3f081a79e441
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/autocompletion.py
@@ -0,0 +1,164 @@
+"""Logic that powers autocompletion installed by ``pip completion``.
+"""
+
+import optparse
+import os
+import sys
+from itertools import chain
+
+from pip._internal.cli.main_parser import create_main_parser
+from pip._internal.commands import commands_dict, create_command
+from pip._internal.utils.misc import get_installed_distributions
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, Iterable, List, Optional
+
+
+def autocomplete():
+ # type: () -> None
+ """Entry Point for completion of main and subcommand options.
+ """
+ # Don't complete if user hasn't sourced bash_completion file.
+ if 'PIP_AUTO_COMPLETE' not in os.environ:
+ return
+ cwords = os.environ['COMP_WORDS'].split()[1:]
+ cword = int(os.environ['COMP_CWORD'])
+ try:
+ current = cwords[cword - 1]
+ except IndexError:
+ current = ''
+
+ parser = create_main_parser()
+ subcommands = list(commands_dict)
+ options = []
+
+ # subcommand
+ subcommand_name = None # type: Optional[str]
+ for word in cwords:
+ if word in subcommands:
+ subcommand_name = word
+ break
+ # subcommand options
+ if subcommand_name is not None:
+ # special case: 'help' subcommand has no options
+ if subcommand_name == 'help':
+ sys.exit(1)
+ # special case: list locally installed dists for show and uninstall
+ should_list_installed = (
+ subcommand_name in ['show', 'uninstall'] and
+ not current.startswith('-')
+ )
+ if should_list_installed:
+ installed = []
+ lc = current.lower()
+ for dist in get_installed_distributions(local_only=True):
+ if dist.key.startswith(lc) and dist.key not in cwords[1:]:
+ installed.append(dist.key)
+ # if there are no dists installed, fall back to option completion
+ if installed:
+ for dist in installed:
+ print(dist)
+ sys.exit(1)
+
+ subcommand = create_command(subcommand_name)
+
+ for opt in subcommand.parser.option_list_all:
+ if opt.help != optparse.SUPPRESS_HELP:
+ for opt_str in opt._long_opts + opt._short_opts:
+ options.append((opt_str, opt.nargs))
+
+ # filter out previously specified options from available options
+ prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
+ options = [(x, v) for (x, v) in options if x not in prev_opts]
+ # filter options by current input
+ options = [(k, v) for k, v in options if k.startswith(current)]
+ # get completion type given cwords and available subcommand options
+ completion_type = get_path_completion_type(
+ cwords, cword, subcommand.parser.option_list_all,
+ )
+ # get completion files and directories if ``completion_type`` is
+ # ``<file>``, ``<dir>`` or ``<path>``
+ if completion_type:
+ paths = auto_complete_paths(current, completion_type)
+ options = [(path, 0) for path in paths]
+ for option in options:
+ opt_label = option[0]
+ # append '=' to options which require args
+ if option[1] and option[0][:2] == "--":
+ opt_label += '='
+ print(opt_label)
+ else:
+ # show main parser options only when necessary
+
+ opts = [i.option_list for i in parser.option_groups]
+ opts.append(parser.option_list)
+ flattened_opts = chain.from_iterable(opts)
+ if current.startswith('-'):
+ for opt in flattened_opts:
+ if opt.help != optparse.SUPPRESS_HELP:
+ subcommands += opt._long_opts + opt._short_opts
+ else:
+ # get completion type given cwords and all available options
+ completion_type = get_path_completion_type(cwords, cword,
+ flattened_opts)
+ if completion_type:
+ subcommands = list(auto_complete_paths(current,
+ completion_type))
+
+ print(' '.join([x for x in subcommands if x.startswith(current)]))
+ sys.exit(1)
+
+
+def get_path_completion_type(cwords, cword, opts):
+ # type: (List[str], int, Iterable[Any]) -> Optional[str]
+ """Get the type of path completion (``file``, ``dir``, ``path`` or None)
+
+ :param cwords: same as the environmental variable ``COMP_WORDS``
+ :param cword: same as the environmental variable ``COMP_CWORD``
+ :param opts: The available options to check
+ :return: path completion type (``file``, ``dir``, ``path`` or None)
+ """
+ if cword < 2 or not cwords[cword - 2].startswith('-'):
+ return None
+ for opt in opts:
+ if opt.help == optparse.SUPPRESS_HELP:
+ continue
+ for o in str(opt).split('/'):
+ if cwords[cword - 2].split('=')[0] == o:
+ if not opt.metavar or any(
+ x in ('path', 'file', 'dir')
+ for x in opt.metavar.split('/')):
+ return opt.metavar
+ return None
+
+
+def auto_complete_paths(current, completion_type):
+ # type: (str, str) -> Iterable[str]
+ """If ``completion_type`` is ``file`` or ``path``, list all regular files
+ and directories starting with ``current``; otherwise only list directories
+ starting with ``current``.
+
+ :param current: The word to be completed
+ :param completion_type: path completion type(`file`, `path` or `dir`)i
+ :return: A generator of regular files and/or directories
+ """
+ directory, filename = os.path.split(current)
+ current_path = os.path.abspath(directory)
+ # Don't complete paths if they can't be accessed
+ if not os.access(current_path, os.R_OK):
+ return
+ filename = os.path.normcase(filename)
+ # list all files that start with ``filename``
+ file_list = (x for x in os.listdir(current_path)
+ if os.path.normcase(x).startswith(filename))
+ for f in file_list:
+ opt = os.path.join(current_path, f)
+ comp_file = os.path.normcase(os.path.join(directory, f))
+ # complete regular files when there is not ``<dir>`` after option
+ # complete directories when there is ``<file>``, ``<path>`` or
+ # ``<dir>``after option
+ if completion_type != 'dir' and os.path.isfile(opt):
+ yield comp_file
+ elif os.path.isdir(opt):
+ yield os.path.join(comp_file, '')
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/base_command.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/base_command.py
new file mode 100644
index 0000000000000000000000000000000000000000..628faa3eee0e441b8fed0eea9c6e4b74222ebb3f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/base_command.py
@@ -0,0 +1,226 @@
+"""Base Command class, and related routines"""
+
+from __future__ import absolute_import, print_function
+
+import logging
+import logging.config
+import optparse
+import os
+import platform
+import sys
+import traceback
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.command_context import CommandContextMixIn
+from pip._internal.cli.parser import (
+ ConfigOptionParser,
+ UpdatingDefaultsHelpFormatter,
+)
+from pip._internal.cli.status_codes import (
+ ERROR,
+ PREVIOUS_BUILD_DIR_ERROR,
+ SUCCESS,
+ UNKNOWN_ERROR,
+ VIRTUALENV_NOT_FOUND,
+)
+from pip._internal.exceptions import (
+ BadCommand,
+ CommandError,
+ InstallationError,
+ PreviousBuildDirError,
+ UninstallationError,
+)
+from pip._internal.utils.deprecation import deprecated
+from pip._internal.utils.filesystem import check_path_owner
+from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
+from pip._internal.utils.misc import get_prog, normalize_path
+from pip._internal.utils.temp_dir import global_tempdir_manager
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Tuple, Any
+ from optparse import Values
+
+__all__ = ['Command']
+
+logger = logging.getLogger(__name__)
+
+
+class Command(CommandContextMixIn):
+ usage = None # type: str
+ ignore_require_venv = False # type: bool
+
+ def __init__(self, name, summary, isolated=False):
+ # type: (str, str, bool) -> None
+ super(Command, self).__init__()
+ parser_kw = {
+ 'usage': self.usage,
+ 'prog': '%s %s' % (get_prog(), name),
+ 'formatter': UpdatingDefaultsHelpFormatter(),
+ 'add_help_option': False,
+ 'name': name,
+ 'description': self.__doc__,
+ 'isolated': isolated,
+ }
+
+ self.name = name
+ self.summary = summary
+ self.parser = ConfigOptionParser(**parser_kw)
+
+ # Commands should add options to this option group
+ optgroup_name = '%s Options' % self.name.capitalize()
+ self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
+
+ # Add the general options
+ gen_opts = cmdoptions.make_option_group(
+ cmdoptions.general_group,
+ self.parser,
+ )
+ self.parser.add_option_group(gen_opts)
+
+ def handle_pip_version_check(self, options):
+ # type: (Values) -> None
+ """
+ This is a no-op so that commands by default do not do the pip version
+ check.
+ """
+ # Make sure we do the pip version check if the index_group options
+ # are present.
+ assert not hasattr(options, 'no_index')
+
+ def run(self, options, args):
+ # type: (Values, List[Any]) -> Any
+ raise NotImplementedError
+
+ def parse_args(self, args):
+ # type: (List[str]) -> Tuple[Any, Any]
+ # factored out for testability
+ return self.parser.parse_args(args)
+
+ def main(self, args):
+ # type: (List[str]) -> int
+ try:
+ with self.main_context():
+ return self._main(args)
+ finally:
+ logging.shutdown()
+
+ def _main(self, args):
+ # type: (List[str]) -> int
+ # Intentionally set as early as possible so globally-managed temporary
+ # directories are available to the rest of the code.
+ self.enter_context(global_tempdir_manager())
+
+ options, args = self.parse_args(args)
+
+ # Set verbosity so that it can be used elsewhere.
+ self.verbosity = options.verbose - options.quiet
+
+ level_number = setup_logging(
+ verbosity=self.verbosity,
+ no_color=options.no_color,
+ user_log_file=options.log,
+ )
+
+ if (
+ sys.version_info[:2] == (2, 7) and
+ not options.no_python_version_warning
+ ):
+ message = (
+ "A future version of pip will drop support for Python 2.7. "
+ "More details about Python 2 support in pip, can be found at "
+ "https://pip.pypa.io/en/latest/development/release-process/#python-2-support" # noqa
+ )
+ if platform.python_implementation() == "CPython":
+ message = (
+ "Python 2.7 reached the end of its life on January "
+ "1st, 2020. Please upgrade your Python as Python 2.7 "
+ "is no longer maintained. "
+ ) + message
+ deprecated(message, replacement=None, gone_in=None)
+
+ if options.skip_requirements_regex:
+ deprecated(
+ "--skip-requirements-regex is unsupported and will be removed",
+ replacement=(
+ "manage requirements/constraints files explicitly, "
+ "possibly generating them from metadata"
+ ),
+ gone_in="20.1",
+ issue=7297,
+ )
+
+ # TODO: Try to get these passing down from the command?
+ # without resorting to os.environ to hold these.
+ # This also affects isolated builds and it should.
+
+ if options.no_input:
+ os.environ['PIP_NO_INPUT'] = '1'
+
+ if options.exists_action:
+ os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
+
+ if options.require_venv and not self.ignore_require_venv:
+ # If a venv is required check if it can really be found
+ if not running_under_virtualenv():
+ logger.critical(
+ 'Could not find an activated virtualenv (required).'
+ )
+ sys.exit(VIRTUALENV_NOT_FOUND)
+
+ if options.cache_dir:
+ options.cache_dir = normalize_path(options.cache_dir)
+ if not check_path_owner(options.cache_dir):
+ logger.warning(
+ "The directory '%s' or its parent directory is not owned "
+ "or is not writable by the current user. The cache "
+ "has been disabled. Check the permissions and owner of "
+ "that directory. If executing pip with sudo, you may want "
+ "sudo's -H flag.",
+ options.cache_dir,
+ )
+ options.cache_dir = None
+
+ try:
+ status = self.run(options, args)
+ # FIXME: all commands should return an exit status
+ # and when it is done, isinstance is not needed anymore
+ if isinstance(status, int):
+ return status
+ except PreviousBuildDirError as exc:
+ logger.critical(str(exc))
+ logger.debug('Exception information:', exc_info=True)
+
+ return PREVIOUS_BUILD_DIR_ERROR
+ except (InstallationError, UninstallationError, BadCommand) as exc:
+ logger.critical(str(exc))
+ logger.debug('Exception information:', exc_info=True)
+
+ return ERROR
+ except CommandError as exc:
+ logger.critical('%s', exc)
+ logger.debug('Exception information:', exc_info=True)
+
+ return ERROR
+ except BrokenStdoutLoggingError:
+ # Bypass our logger and write any remaining messages to stderr
+ # because stdout no longer works.
+ print('ERROR: Pipe to stdout was broken', file=sys.stderr)
+ if level_number <= logging.DEBUG:
+ traceback.print_exc(file=sys.stderr)
+
+ return ERROR
+ except KeyboardInterrupt:
+ logger.critical('Operation cancelled by user')
+ logger.debug('Exception information:', exc_info=True)
+
+ return ERROR
+ except BaseException:
+ logger.critical('Exception:', exc_info=True)
+
+ return UNKNOWN_ERROR
+ finally:
+ self.handle_pip_version_check(options)
+
+ return SUCCESS
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/cmdoptions.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/cmdoptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..447f3191887dba6e1893c93a0c5ee77de88f1074
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/cmdoptions.py
@@ -0,0 +1,957 @@
+"""
+shared options and groups
+
+The principle here is to define options once, but *not* instantiate them
+globally. One reason being that options with action='append' can carry state
+between parses. pip parses general options twice internally, and shouldn't
+pass on state. To be consistent, all options will follow this design.
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+from __future__ import absolute_import
+
+import logging
+import os
+import textwrap
+import warnings
+from distutils.util import strtobool
+from functools import partial
+from optparse import SUPPRESS_HELP, Option, OptionGroup
+from textwrap import dedent
+
+from pip._internal.exceptions import CommandError
+from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
+from pip._internal.models.format_control import FormatControl
+from pip._internal.models.index import PyPI
+from pip._internal.models.target_python import TargetPython
+from pip._internal.utils.hashes import STRONG_HASHES
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.ui import BAR_TYPES
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, Callable, Dict, Optional, Tuple
+ from optparse import OptionParser, Values
+ from pip._internal.cli.parser import ConfigOptionParser
+
+logger = logging.getLogger(__name__)
+
+
+def raise_option_error(parser, option, msg):
+ # type: (OptionParser, Option, str) -> None
+ """
+ Raise an option parsing error using parser.error().
+
+ Args:
+ parser: an OptionParser instance.
+ option: an Option instance.
+ msg: the error text.
+ """
+ msg = '{} error: {}'.format(option, msg)
+ msg = textwrap.fill(' '.join(msg.split()))
+ parser.error(msg)
+
+
+def make_option_group(group, parser):
+ # type: (Dict[str, Any], ConfigOptionParser) -> OptionGroup
+ """
+ Return an OptionGroup object
+ group -- assumed to be dict with 'name' and 'options' keys
+ parser -- an optparse Parser
+ """
+ option_group = OptionGroup(parser, group['name'])
+ for option in group['options']:
+ option_group.add_option(option())
+ return option_group
+
+
+def check_install_build_global(options, check_options=None):
+ # type: (Values, Optional[Values]) -> None
+ """Disable wheels if per-setup.py call options are set.
+
+ :param options: The OptionParser options to update.
+ :param check_options: The options to check, if not supplied defaults to
+ options.
+ """
+ if check_options is None:
+ check_options = options
+
+ def getname(n):
+ # type: (str) -> Optional[Any]
+ return getattr(check_options, n, None)
+ names = ["build_options", "global_options", "install_options"]
+ if any(map(getname, names)):
+ control = options.format_control
+ control.disallow_binaries()
+ warnings.warn(
+ 'Disabling all use of wheels due to the use of --build-option '
+ '/ --global-option / --install-option.', stacklevel=2,
+ )
+
+
+def check_dist_restriction(options, check_target=False):
+ # type: (Values, bool) -> None
+ """Function for determining if custom platform options are allowed.
+
+ :param options: The OptionParser options.
+ :param check_target: Whether or not to check if --target is being used.
+ """
+ dist_restriction_set = any([
+ options.python_version,
+ options.platform,
+ options.abi,
+ options.implementation,
+ ])
+
+ binary_only = FormatControl(set(), {':all:'})
+ sdist_dependencies_allowed = (
+ options.format_control != binary_only and
+ not options.ignore_dependencies
+ )
+
+ # Installations or downloads using dist restrictions must not combine
+ # source distributions and dist-specific wheels, as they are not
+ # guaranteed to be locally compatible.
+ if dist_restriction_set and sdist_dependencies_allowed:
+ raise CommandError(
+ "When restricting platform and interpreter constraints using "
+ "--python-version, --platform, --abi, or --implementation, "
+ "either --no-deps must be set, or --only-binary=:all: must be "
+ "set and --no-binary must not be set (or must be set to "
+ ":none:)."
+ )
+
+ if check_target:
+ if dist_restriction_set and not options.target_dir:
+ raise CommandError(
+ "Can not use any platform or abi specific options unless "
+ "installing via '--target'"
+ )
+
+
+def _path_option_check(option, opt, value):
+ # type: (Option, str, str) -> str
+ return os.path.expanduser(value)
+
+
+class PipOption(Option):
+ TYPES = Option.TYPES + ("path",)
+ TYPE_CHECKER = Option.TYPE_CHECKER.copy()
+ TYPE_CHECKER["path"] = _path_option_check
+
+
+###########
+# options #
+###########
+
+help_ = partial(
+ Option,
+ '-h', '--help',
+ dest='help',
+ action='help',
+ help='Show help.',
+) # type: Callable[..., Option]
+
+isolated_mode = partial(
+ Option,
+ "--isolated",
+ dest="isolated_mode",
+ action="store_true",
+ default=False,
+ help=(
+ "Run pip in an isolated mode, ignoring environment variables and user "
+ "configuration."
+ ),
+) # type: Callable[..., Option]
+
+require_virtualenv = partial(
+ Option,
+ # Run only if inside a virtualenv, bail if not.
+ '--require-virtualenv', '--require-venv',
+ dest='require_venv',
+ action='store_true',
+ default=False,
+ help=SUPPRESS_HELP
+) # type: Callable[..., Option]
+
+verbose = partial(
+ Option,
+ '-v', '--verbose',
+ dest='verbose',
+ action='count',
+ default=0,
+ help='Give more output. Option is additive, and can be used up to 3 times.'
+) # type: Callable[..., Option]
+
+no_color = partial(
+ Option,
+ '--no-color',
+ dest='no_color',
+ action='store_true',
+ default=False,
+ help="Suppress colored output",
+) # type: Callable[..., Option]
+
+version = partial(
+ Option,
+ '-V', '--version',
+ dest='version',
+ action='store_true',
+ help='Show version and exit.',
+) # type: Callable[..., Option]
+
+quiet = partial(
+ Option,
+ '-q', '--quiet',
+ dest='quiet',
+ action='count',
+ default=0,
+ help=(
+ 'Give less output. Option is additive, and can be used up to 3'
+ ' times (corresponding to WARNING, ERROR, and CRITICAL logging'
+ ' levels).'
+ ),
+) # type: Callable[..., Option]
+
+progress_bar = partial(
+ Option,
+ '--progress-bar',
+ dest='progress_bar',
+ type='choice',
+ choices=list(BAR_TYPES.keys()),
+ default='on',
+ help=(
+ 'Specify type of progress to be displayed [' +
+ '|'.join(BAR_TYPES.keys()) + '] (default: %default)'
+ ),
+) # type: Callable[..., Option]
+
+log = partial(
+ PipOption,
+ "--log", "--log-file", "--local-log",
+ dest="log",
+ metavar="path",
+ type="path",
+ help="Path to a verbose appending log."
+) # type: Callable[..., Option]
+
+no_input = partial(
+ Option,
+ # Don't ask for input
+ '--no-input',
+ dest='no_input',
+ action='store_true',
+ default=False,
+ help=SUPPRESS_HELP
+) # type: Callable[..., Option]
+
+proxy = partial(
+ Option,
+ '--proxy',
+ dest='proxy',
+ type='str',
+ default='',
+ help="Specify a proxy in the form [user:passwd@]proxy.server:port."
+) # type: Callable[..., Option]
+
+retries = partial(
+ Option,
+ '--retries',
+ dest='retries',
+ type='int',
+ default=5,
+ help="Maximum number of retries each connection should attempt "
+ "(default %default times).",
+) # type: Callable[..., Option]
+
+timeout = partial(
+ Option,
+ '--timeout', '--default-timeout',
+ metavar='sec',
+ dest='timeout',
+ type='float',
+ default=15,
+ help='Set the socket timeout (default %default seconds).',
+) # type: Callable[..., Option]
+
+skip_requirements_regex = partial(
+ Option,
+ # A regex to be used to skip requirements
+ '--skip-requirements-regex',
+ dest='skip_requirements_regex',
+ type='str',
+ default='',
+ help=SUPPRESS_HELP,
+) # type: Callable[..., Option]
+
+
+def exists_action():
+ # type: () -> Option
+ return Option(
+ # Option when path already exist
+ '--exists-action',
+ dest='exists_action',
+ type='choice',
+ choices=['s', 'i', 'w', 'b', 'a'],
+ default=[],
+ action='append',
+ metavar='action',
+ help="Default action when a path already exists: "
+ "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
+ )
+
+
+cert = partial(
+ PipOption,
+ '--cert',
+ dest='cert',
+ type='path',
+ metavar='path',
+ help="Path to alternate CA bundle.",
+) # type: Callable[..., Option]
+
+client_cert = partial(
+ PipOption,
+ '--client-cert',
+ dest='client_cert',
+ type='path',
+ default=None,
+ metavar='path',
+ help="Path to SSL client certificate, a single file containing the "
+ "private key and the certificate in PEM format.",
+) # type: Callable[..., Option]
+
+index_url = partial(
+ Option,
+ '-i', '--index-url', '--pypi-url',
+ dest='index_url',
+ metavar='URL',
+ default=PyPI.simple_url,
+ help="Base URL of the Python Package Index (default %default). "
+ "This should point to a repository compliant with PEP 503 "
+ "(the simple repository API) or a local directory laid out "
+ "in the same format.",
+) # type: Callable[..., Option]
+
+
+def extra_index_url():
+ # type: () -> Option
+ return Option(
+ '--extra-index-url',
+ dest='extra_index_urls',
+ metavar='URL',
+ action='append',
+ default=[],
+ help="Extra URLs of package indexes to use in addition to "
+ "--index-url. Should follow the same rules as "
+ "--index-url.",
+ )
+
+
+no_index = partial(
+ Option,
+ '--no-index',
+ dest='no_index',
+ action='store_true',
+ default=False,
+ help='Ignore package index (only looking at --find-links URLs instead).',
+) # type: Callable[..., Option]
+
+
+def find_links():
+ # type: () -> Option
+ return Option(
+ '-f', '--find-links',
+ dest='find_links',
+ action='append',
+ default=[],
+ metavar='url',
+ help="If a url or path to an html file, then parse for links to "
+ "archives. If a local path or file:// url that's a directory, "
+ "then look for archives in the directory listing.",
+ )
+
+
+def trusted_host():
+ # type: () -> Option
+ return Option(
+ "--trusted-host",
+ dest="trusted_hosts",
+ action="append",
+ metavar="HOSTNAME",
+ default=[],
+ help="Mark this host or host:port pair as trusted, even though it "
+ "does not have valid or any HTTPS.",
+ )
+
+
+def constraints():
+ # type: () -> Option
+ return Option(
+ '-c', '--constraint',
+ dest='constraints',
+ action='append',
+ default=[],
+ metavar='file',
+ help='Constrain versions using the given constraints file. '
+ 'This option can be used multiple times.'
+ )
+
+
+def requirements():
+ # type: () -> Option
+ return Option(
+ '-r', '--requirement',
+ dest='requirements',
+ action='append',
+ default=[],
+ metavar='file',
+ help='Install from the given requirements file. '
+ 'This option can be used multiple times.'
+ )
+
+
+def editable():
+ # type: () -> Option
+ return Option(
+ '-e', '--editable',
+ dest='editables',
+ action='append',
+ default=[],
+ metavar='path/url',
+ help=('Install a project in editable mode (i.e. setuptools '
+ '"develop mode") from a local project path or a VCS url.'),
+ )
+
+
+def _handle_src(option, opt_str, value, parser):
+ # type: (Option, str, str, OptionParser) -> None
+ value = os.path.abspath(value)
+ setattr(parser.values, option.dest, value)
+
+
+src = partial(
+ PipOption,
+ '--src', '--source', '--source-dir', '--source-directory',
+ dest='src_dir',
+ type='path',
+ metavar='dir',
+ default=get_src_prefix(),
+ action='callback',
+ callback=_handle_src,
+ help='Directory to check out editable projects into. '
+ 'The default in a virtualenv is "<venv path>/src". '
+ 'The default for global installs is "<current dir>/src".'
+) # type: Callable[..., Option]
+
+
+def _get_format_control(values, option):
+ # type: (Values, Option) -> Any
+ """Get a format_control object."""
+ return getattr(values, option.dest)
+
+
+def _handle_no_binary(option, opt_str, value, parser):
+ # type: (Option, str, str, OptionParser) -> None
+ existing = _get_format_control(parser.values, option)
+ FormatControl.handle_mutual_excludes(
+ value, existing.no_binary, existing.only_binary,
+ )
+
+
+def _handle_only_binary(option, opt_str, value, parser):
+ # type: (Option, str, str, OptionParser) -> None
+ existing = _get_format_control(parser.values, option)
+ FormatControl.handle_mutual_excludes(
+ value, existing.only_binary, existing.no_binary,
+ )
+
+
+def no_binary():
+ # type: () -> Option
+ format_control = FormatControl(set(), set())
+ return Option(
+ "--no-binary", dest="format_control", action="callback",
+ callback=_handle_no_binary, type="str",
+ default=format_control,
+ help="Do not use binary packages. Can be supplied multiple times, and "
+ "each time adds to the existing value. Accepts either :all: to "
+ "disable all binary packages, :none: to empty the set, or one or "
+ "more package names with commas between them (no colons). Note "
+ "that some packages are tricky to compile and may fail to "
+ "install when this option is used on them.",
+ )
+
+
+def only_binary():
+ # type: () -> Option
+ format_control = FormatControl(set(), set())
+ return Option(
+ "--only-binary", dest="format_control", action="callback",
+ callback=_handle_only_binary, type="str",
+ default=format_control,
+ help="Do not use source packages. Can be supplied multiple times, and "
+ "each time adds to the existing value. Accepts either :all: to "
+ "disable all source packages, :none: to empty the set, or one or "
+ "more package names with commas between them. Packages without "
+ "binary distributions will fail to install when this option is "
+ "used on them.",
+ )
+
+
+platform = partial(
+ Option,
+ '--platform',
+ dest='platform',
+ metavar='platform',
+ default=None,
+ help=("Only use wheels compatible with <platform>. "
+ "Defaults to the platform of the running system."),
+) # type: Callable[..., Option]
+
+
+# This was made a separate function for unit-testing purposes.
+def _convert_python_version(value):
+ # type: (str) -> Tuple[Tuple[int, ...], Optional[str]]
+ """
+ Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
+
+ :return: A 2-tuple (version_info, error_msg), where `error_msg` is
+ non-None if and only if there was a parsing error.
+ """
+ if not value:
+ # The empty string is the same as not providing a value.
+ return (None, None)
+
+ parts = value.split('.')
+ if len(parts) > 3:
+ return ((), 'at most three version parts are allowed')
+
+ if len(parts) == 1:
+ # Then we are in the case of "3" or "37".
+ value = parts[0]
+ if len(value) > 1:
+ parts = [value[0], value[1:]]
+
+ try:
+ version_info = tuple(int(part) for part in parts)
+ except ValueError:
+ return ((), 'each version part must be an integer')
+
+ return (version_info, None)
+
+
+def _handle_python_version(option, opt_str, value, parser):
+ # type: (Option, str, str, OptionParser) -> None
+ """
+ Handle a provided --python-version value.
+ """
+ version_info, error_msg = _convert_python_version(value)
+ if error_msg is not None:
+ msg = (
+ 'invalid --python-version value: {!r}: {}'.format(
+ value, error_msg,
+ )
+ )
+ raise_option_error(parser, option=option, msg=msg)
+
+ parser.values.python_version = version_info
+
+
+python_version = partial(
+ Option,
+ '--python-version',
+ dest='python_version',
+ metavar='python_version',
+ action='callback',
+ callback=_handle_python_version, type='str',
+ default=None,
+ help=dedent("""\
+ The Python interpreter version to use for wheel and "Requires-Python"
+ compatibility checks. Defaults to a version derived from the running
+ interpreter. The version can be specified using up to three dot-separated
+ integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
+ version can also be given as a string without dots (e.g. "37" for 3.7.0).
+ """),
+) # type: Callable[..., Option]
+
+
+implementation = partial(
+ Option,
+ '--implementation',
+ dest='implementation',
+ metavar='implementation',
+ default=None,
+ help=("Only use wheels compatible with Python "
+ "implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
+ " or 'ip'. If not specified, then the current "
+ "interpreter implementation is used. Use 'py' to force "
+ "implementation-agnostic wheels."),
+) # type: Callable[..., Option]
+
+
+abi = partial(
+ Option,
+ '--abi',
+ dest='abi',
+ metavar='abi',
+ default=None,
+ help=("Only use wheels compatible with Python "
+ "abi <abi>, e.g. 'pypy_41'. If not specified, then the "
+ "current interpreter abi tag is used. Generally "
+ "you will need to specify --implementation, "
+ "--platform, and --python-version when using "
+ "this option."),
+) # type: Callable[..., Option]
+
+
+def add_target_python_options(cmd_opts):
+ # type: (OptionGroup) -> None
+ cmd_opts.add_option(platform())
+ cmd_opts.add_option(python_version())
+ cmd_opts.add_option(implementation())
+ cmd_opts.add_option(abi())
+
+
+def make_target_python(options):
+ # type: (Values) -> TargetPython
+ target_python = TargetPython(
+ platform=options.platform,
+ py_version_info=options.python_version,
+ abi=options.abi,
+ implementation=options.implementation,
+ )
+
+ return target_python
+
+
+def prefer_binary():
+ # type: () -> Option
+ return Option(
+ "--prefer-binary",
+ dest="prefer_binary",
+ action="store_true",
+ default=False,
+ help="Prefer older binary packages over newer source packages."
+ )
+
+
+cache_dir = partial(
+ PipOption,
+ "--cache-dir",
+ dest="cache_dir",
+ default=USER_CACHE_DIR,
+ metavar="dir",
+ type='path',
+ help="Store the cache data in <dir>."
+) # type: Callable[..., Option]
+
+
+def _handle_no_cache_dir(option, opt, value, parser):
+ # type: (Option, str, str, OptionParser) -> None
+ """
+ Process a value provided for the --no-cache-dir option.
+
+ This is an optparse.Option callback for the --no-cache-dir option.
+ """
+ # The value argument will be None if --no-cache-dir is passed via the
+ # command-line, since the option doesn't accept arguments. However,
+ # the value can be non-None if the option is triggered e.g. by an
+ # environment variable, like PIP_NO_CACHE_DIR=true.
+ if value is not None:
+ # Then parse the string value to get argument error-checking.
+ try:
+ strtobool(value)
+ except ValueError as exc:
+ raise_option_error(parser, option=option, msg=str(exc))
+
+ # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
+ # converted to 0 (like "false" or "no") caused cache_dir to be disabled
+ # rather than enabled (logic would say the latter). Thus, we disable
+ # the cache directory not just on values that parse to True, but (for
+ # backwards compatibility reasons) also on values that parse to False.
+ # In other words, always set it to False if the option is provided in
+ # some (valid) form.
+ parser.values.cache_dir = False
+
+
+no_cache = partial(
+ Option,
+ "--no-cache-dir",
+ dest="cache_dir",
+ action="callback",
+ callback=_handle_no_cache_dir,
+ help="Disable the cache.",
+) # type: Callable[..., Option]
+
+no_deps = partial(
+ Option,
+ '--no-deps', '--no-dependencies',
+ dest='ignore_dependencies',
+ action='store_true',
+ default=False,
+ help="Don't install package dependencies.",
+) # type: Callable[..., Option]
+
+
+def _handle_build_dir(option, opt, value, parser):
+ # type: (Option, str, str, OptionParser) -> None
+ if value:
+ value = os.path.abspath(value)
+ setattr(parser.values, option.dest, value)
+
+
+build_dir = partial(
+ PipOption,
+ '-b', '--build', '--build-dir', '--build-directory',
+ dest='build_dir',
+ type='path',
+ metavar='dir',
+ action='callback',
+ callback=_handle_build_dir,
+ help='Directory to unpack packages into and build in. Note that '
+ 'an initial build still takes place in a temporary directory. '
+ 'The location of temporary directories can be controlled by setting '
+ 'the TMPDIR environment variable (TEMP on Windows) appropriately. '
+ 'When passed, build directories are not cleaned in case of failures.'
+) # type: Callable[..., Option]
+
+ignore_requires_python = partial(
+ Option,
+ '--ignore-requires-python',
+ dest='ignore_requires_python',
+ action='store_true',
+ help='Ignore the Requires-Python information.'
+) # type: Callable[..., Option]
+
+no_build_isolation = partial(
+ Option,
+ '--no-build-isolation',
+ dest='build_isolation',
+ action='store_false',
+ default=True,
+ help='Disable isolation when building a modern source distribution. '
+ 'Build dependencies specified by PEP 518 must be already installed '
+ 'if this option is used.'
+) # type: Callable[..., Option]
+
+
+def _handle_no_use_pep517(option, opt, value, parser):
+ # type: (Option, str, str, OptionParser) -> None
+ """
+ Process a value provided for the --no-use-pep517 option.
+
+ This is an optparse.Option callback for the no_use_pep517 option.
+ """
+ # Since --no-use-pep517 doesn't accept arguments, the value argument
+ # will be None if --no-use-pep517 is passed via the command-line.
+ # However, the value can be non-None if the option is triggered e.g.
+ # by an environment variable, for example "PIP_NO_USE_PEP517=true".
+ if value is not None:
+ msg = """A value was passed for --no-use-pep517,
+ probably using either the PIP_NO_USE_PEP517 environment variable
+ or the "no-use-pep517" config file option. Use an appropriate value
+ of the PIP_USE_PEP517 environment variable or the "use-pep517"
+ config file option instead.
+ """
+ raise_option_error(parser, option=option, msg=msg)
+
+ # Otherwise, --no-use-pep517 was passed via the command-line.
+ parser.values.use_pep517 = False
+
+
+use_pep517 = partial(
+ Option,
+ '--use-pep517',
+ dest='use_pep517',
+ action='store_true',
+ default=None,
+ help='Use PEP 517 for building source distributions '
+ '(use --no-use-pep517 to force legacy behaviour).'
+) # type: Any
+
+no_use_pep517 = partial(
+ Option,
+ '--no-use-pep517',
+ dest='use_pep517',
+ action='callback',
+ callback=_handle_no_use_pep517,
+ default=None,
+ help=SUPPRESS_HELP
+) # type: Any
+
+install_options = partial(
+ Option,
+ '--install-option',
+ dest='install_options',
+ action='append',
+ metavar='options',
+ help="Extra arguments to be supplied to the setup.py install "
+ "command (use like --install-option=\"--install-scripts=/usr/local/"
+ "bin\"). Use multiple --install-option options to pass multiple "
+ "options to setup.py install. If you are using an option with a "
+ "directory path, be sure to use absolute path.",
+) # type: Callable[..., Option]
+
+global_options = partial(
+ Option,
+ '--global-option',
+ dest='global_options',
+ action='append',
+ metavar='options',
+ help="Extra global options to be supplied to the setup.py "
+ "call before the install command.",
+) # type: Callable[..., Option]
+
+no_clean = partial(
+ Option,
+ '--no-clean',
+ action='store_true',
+ default=False,
+ help="Don't clean up build directories."
+) # type: Callable[..., Option]
+
+pre = partial(
+ Option,
+ '--pre',
+ action='store_true',
+ default=False,
+ help="Include pre-release and development versions. By default, "
+ "pip only finds stable versions.",
+) # type: Callable[..., Option]
+
+disable_pip_version_check = partial(
+ Option,
+ "--disable-pip-version-check",
+ dest="disable_pip_version_check",
+ action="store_true",
+ default=True,
+ help="Don't periodically check PyPI to determine whether a new version "
+ "of pip is available for download. Implied with --no-index.",
+) # type: Callable[..., Option]
+
+
+# Deprecated, Remove later
+always_unzip = partial(
+ Option,
+ '-Z', '--always-unzip',
+ dest='always_unzip',
+ action='store_true',
+ help=SUPPRESS_HELP,
+) # type: Callable[..., Option]
+
+
+def _handle_merge_hash(option, opt_str, value, parser):
+ # type: (Option, str, str, OptionParser) -> None
+ """Given a value spelled "algo:digest", append the digest to a list
+ pointed to in a dict by the algo name."""
+ if not parser.values.hashes:
+ parser.values.hashes = {}
+ try:
+ algo, digest = value.split(':', 1)
+ except ValueError:
+ parser.error('Arguments to %s must be a hash name '
+ 'followed by a value, like --hash=sha256:abcde...' %
+ opt_str)
+ if algo not in STRONG_HASHES:
+ parser.error('Allowed hash algorithms for %s are %s.' %
+ (opt_str, ', '.join(STRONG_HASHES)))
+ parser.values.hashes.setdefault(algo, []).append(digest)
+
+
+hash = partial(
+ Option,
+ '--hash',
+ # Hash values eventually end up in InstallRequirement.hashes due to
+ # __dict__ copying in process_line().
+ dest='hashes',
+ action='callback',
+ callback=_handle_merge_hash,
+ type='string',
+ help="Verify that the package's archive matches this "
+ 'hash before installing. Example: --hash=sha256:abcdef...',
+) # type: Callable[..., Option]
+
+
+require_hashes = partial(
+ Option,
+ '--require-hashes',
+ dest='require_hashes',
+ action='store_true',
+ default=False,
+ help='Require a hash to check each requirement against, for '
+ 'repeatable installs. This option is implied when any package in a '
+ 'requirements file has a --hash option.',
+) # type: Callable[..., Option]
+
+
+list_path = partial(
+ PipOption,
+ '--path',
+ dest='path',
+ type='path',
+ action='append',
+ help='Restrict to the specified installation path for listing '
+ 'packages (can be used multiple times).'
+) # type: Callable[..., Option]
+
+
+def check_list_path_option(options):
+ # type: (Values) -> None
+ if options.path and (options.user or options.local):
+ raise CommandError(
+ "Cannot combine '--path' with '--user' or '--local'"
+ )
+
+
+no_python_version_warning = partial(
+ Option,
+ '--no-python-version-warning',
+ dest='no_python_version_warning',
+ action='store_true',
+ default=False,
+ help='Silence deprecation warnings for upcoming unsupported Pythons.',
+) # type: Callable[..., Option]
+
+
+##########
+# groups #
+##########
+
+general_group = {
+ 'name': 'General Options',
+ 'options': [
+ help_,
+ isolated_mode,
+ require_virtualenv,
+ verbose,
+ version,
+ quiet,
+ log,
+ no_input,
+ proxy,
+ retries,
+ timeout,
+ skip_requirements_regex,
+ exists_action,
+ trusted_host,
+ cert,
+ client_cert,
+ cache_dir,
+ no_cache,
+ disable_pip_version_check,
+ no_color,
+ no_python_version_warning,
+ ]
+} # type: Dict[str, Any]
+
+index_group = {
+ 'name': 'Package Index Options',
+ 'options': [
+ index_url,
+ extra_index_url,
+ no_index,
+ find_links,
+ ]
+} # type: Dict[str, Any]
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/command_context.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/command_context.py
new file mode 100644
index 0000000000000000000000000000000000000000..d1a64a776062a95258d3331cdec9b987e433ddf9
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/command_context.py
@@ -0,0 +1,36 @@
+from contextlib import contextmanager
+
+from pip._vendor.contextlib2 import ExitStack
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Iterator, ContextManager, TypeVar
+
+ _T = TypeVar('_T', covariant=True)
+
+
+class CommandContextMixIn(object):
+ def __init__(self):
+ # type: () -> None
+ super(CommandContextMixIn, self).__init__()
+ self._in_main_context = False
+ self._main_context = ExitStack()
+
+ @contextmanager
+ def main_context(self):
+ # type: () -> Iterator[None]
+ assert not self._in_main_context
+
+ self._in_main_context = True
+ try:
+ with self._main_context:
+ yield
+ finally:
+ self._in_main_context = False
+
+ def enter_context(self, context_provider):
+ # type: (ContextManager[_T]) -> _T
+ assert self._in_main_context
+
+ return self._main_context.enter_context(context_provider)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/main.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/main.py
new file mode 100644
index 0000000000000000000000000000000000000000..5e97a5103f6af5baded5758f0ee41eb1aa641cc7
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/main.py
@@ -0,0 +1,75 @@
+"""Primary application entrypoint.
+"""
+from __future__ import absolute_import
+
+import locale
+import logging
+import os
+import sys
+
+from pip._internal.cli.autocompletion import autocomplete
+from pip._internal.cli.main_parser import parse_command
+from pip._internal.commands import create_command
+from pip._internal.exceptions import PipError
+from pip._internal.utils import deprecation
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Optional
+
+logger = logging.getLogger(__name__)
+
+
+# Do not import and use main() directly! Using it directly is actively
+# discouraged by pip's maintainers. The name, location and behavior of
+# this function is subject to change, so calling it directly is not
+# portable across different pip versions.
+
+# In addition, running pip in-process is unsupported and unsafe. This is
+# elaborated in detail at
+# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program.
+# That document also provides suggestions that should work for nearly
+# all users that are considering importing and using main() directly.
+
+# However, we know that certain users will still want to invoke pip
+# in-process. If you understand and accept the implications of using pip
+# in an unsupported manner, the best approach is to use runpy to avoid
+# depending on the exact location of this entry point.
+
+# The following example shows how to use runpy to invoke pip in that
+# case:
+#
+# sys.argv = ["pip", your, args, here]
+# runpy.run_module("pip", run_name="__main__")
+#
+# Note that this will exit the process after running, unlike a direct
+# call to main. As it is not safe to do any processing after calling
+# main, this should not be an issue in practice.
+
+def main(args=None):
+ # type: (Optional[List[str]]) -> int
+ if args is None:
+ args = sys.argv[1:]
+
+ # Configure our deprecation warnings to be sent through loggers
+ deprecation.install_warning_logger()
+
+ autocomplete()
+
+ try:
+ cmd_name, cmd_args = parse_command(args)
+ except PipError as exc:
+ sys.stderr.write("ERROR: %s" % exc)
+ sys.stderr.write(os.linesep)
+ sys.exit(1)
+
+ # Needed for locale.getpreferredencoding(False) to work
+ # in pip._internal.utils.encoding.auto_decode
+ try:
+ locale.setlocale(locale.LC_ALL, '')
+ except locale.Error as e:
+ # setlocale can apparently crash if locale are uninitialized
+ logger.debug("Ignoring error %s when setting locale", e)
+ command = create_command(cmd_name, isolated=("--isolated" in cmd_args))
+
+ return command.main(cmd_args)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/main_parser.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/main_parser.py
new file mode 100644
index 0000000000000000000000000000000000000000..a89821d44890ee9a89b186c66c9ce12d5ccc02dc
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/main_parser.py
@@ -0,0 +1,99 @@
+"""A single place for constructing and exposing the main parser
+"""
+
+import os
+import sys
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.parser import (
+ ConfigOptionParser,
+ UpdatingDefaultsHelpFormatter,
+)
+from pip._internal.commands import commands_dict, get_similar_commands
+from pip._internal.exceptions import CommandError
+from pip._internal.utils.misc import get_pip_version, get_prog
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Tuple, List
+
+
+__all__ = ["create_main_parser", "parse_command"]
+
+
+def create_main_parser():
+ # type: () -> ConfigOptionParser
+ """Creates and returns the main parser for pip's CLI
+ """
+
+ parser_kw = {
+ 'usage': '\n%prog <command> [options]',
+ 'add_help_option': False,
+ 'formatter': UpdatingDefaultsHelpFormatter(),
+ 'name': 'global',
+ 'prog': get_prog(),
+ }
+
+ parser = ConfigOptionParser(**parser_kw)
+ parser.disable_interspersed_args()
+
+ parser.version = get_pip_version()
+
+ # add the general options
+ gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
+ parser.add_option_group(gen_opts)
+
+ # so the help formatter knows
+ parser.main = True # type: ignore
+
+ # create command listing for description
+ description = [''] + [
+ '%-27s %s' % (name, command_info.summary)
+ for name, command_info in commands_dict.items()
+ ]
+ parser.description = '\n'.join(description)
+
+ return parser
+
+
+def parse_command(args):
+ # type: (List[str]) -> Tuple[str, List[str]]
+ parser = create_main_parser()
+
+ # Note: parser calls disable_interspersed_args(), so the result of this
+ # call is to split the initial args into the general options before the
+ # subcommand and everything else.
+ # For example:
+ # args: ['--timeout=5', 'install', '--user', 'INITools']
+ # general_options: ['--timeout==5']
+ # args_else: ['install', '--user', 'INITools']
+ general_options, args_else = parser.parse_args(args)
+
+ # --version
+ if general_options.version:
+ sys.stdout.write(parser.version) # type: ignore
+ sys.stdout.write(os.linesep)
+ sys.exit()
+
+ # pip || pip help -> print_help()
+ if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
+ parser.print_help()
+ sys.exit()
+
+ # the subcommand name
+ cmd_name = args_else[0]
+
+ if cmd_name not in commands_dict:
+ guess = get_similar_commands(cmd_name)
+
+ msg = ['unknown command "%s"' % cmd_name]
+ if guess:
+ msg.append('maybe you meant "%s"' % guess)
+
+ raise CommandError(' - '.join(msg))
+
+ # all the args without the subcommand
+ cmd_args = args[:]
+ cmd_args.remove(cmd_name)
+
+ return cmd_name, cmd_args
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/parser.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/parser.py
new file mode 100644
index 0000000000000000000000000000000000000000..c99456bae88d0c73ab79a671b440993c8195568d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/parser.py
@@ -0,0 +1,265 @@
+"""Base option parser setup"""
+
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import logging
+import optparse
+import sys
+import textwrap
+from distutils.util import strtobool
+
+from pip._vendor.six import string_types
+
+from pip._internal.cli.status_codes import UNKNOWN_ERROR
+from pip._internal.configuration import Configuration, ConfigurationError
+from pip._internal.utils.compat import get_terminal_size
+
+logger = logging.getLogger(__name__)
+
+
+class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
+ """A prettier/less verbose help formatter for optparse."""
+
+ def __init__(self, *args, **kwargs):
+ # help position must be aligned with __init__.parseopts.description
+ kwargs['max_help_position'] = 30
+ kwargs['indent_increment'] = 1
+ kwargs['width'] = get_terminal_size()[0] - 2
+ optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)
+
+ def format_option_strings(self, option):
+ return self._format_option_strings(option, ' <%s>', ', ')
+
+ def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
+ """
+ Return a comma-separated list of option strings and metavars.
+
+ :param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
+ :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
+ :param optsep: separator
+ """
+ opts = []
+
+ if option._short_opts:
+ opts.append(option._short_opts[0])
+ if option._long_opts:
+ opts.append(option._long_opts[0])
+ if len(opts) > 1:
+ opts.insert(1, optsep)
+
+ if option.takes_value():
+ metavar = option.metavar or option.dest.lower()
+ opts.append(mvarfmt % metavar.lower())
+
+ return ''.join(opts)
+
+ def format_heading(self, heading):
+ if heading == 'Options':
+ return ''
+ return heading + ':\n'
+
+ def format_usage(self, usage):
+ """
+ Ensure there is only one newline between usage and the first heading
+ if there is no description.
+ """
+ msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ")
+ return msg
+
+ def format_description(self, description):
+ # leave full control over description to us
+ if description:
+ if hasattr(self.parser, 'main'):
+ label = 'Commands'
+ else:
+ label = 'Description'
+ # some doc strings have initial newlines, some don't
+ description = description.lstrip('\n')
+ # some doc strings have final newlines and spaces, some don't
+ description = description.rstrip()
+ # dedent, then reindent
+ description = self.indent_lines(textwrap.dedent(description), " ")
+ description = '%s:\n%s\n' % (label, description)
+ return description
+ else:
+ return ''
+
+ def format_epilog(self, epilog):
+ # leave full control over epilog to us
+ if epilog:
+ return epilog
+ else:
+ return ''
+
+ def indent_lines(self, text, indent):
+ new_lines = [indent + line for line in text.split('\n')]
+ return "\n".join(new_lines)
+
+
+class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
+ """Custom help formatter for use in ConfigOptionParser.
+
+ This is updates the defaults before expanding them, allowing
+ them to show up correctly in the help listing.
+ """
+
+ def expand_default(self, option):
+ if self.parser is not None:
+ self.parser._update_defaults(self.parser.defaults)
+ return optparse.IndentedHelpFormatter.expand_default(self, option)
+
+
+class CustomOptionParser(optparse.OptionParser):
+
+ def insert_option_group(self, idx, *args, **kwargs):
+ """Insert an OptionGroup at a given position."""
+ group = self.add_option_group(*args, **kwargs)
+
+ self.option_groups.pop()
+ self.option_groups.insert(idx, group)
+
+ return group
+
+ @property
+ def option_list_all(self):
+ """Get a list of all options, including those in option groups."""
+ res = self.option_list[:]
+ for i in self.option_groups:
+ res.extend(i.option_list)
+
+ return res
+
+
+class ConfigOptionParser(CustomOptionParser):
+ """Custom option parser which updates its defaults by checking the
+ configuration files and environmental variables"""
+
+ def __init__(self, *args, **kwargs):
+ self.name = kwargs.pop('name')
+
+ isolated = kwargs.pop("isolated", False)
+ self.config = Configuration(isolated)
+
+ assert self.name
+ optparse.OptionParser.__init__(self, *args, **kwargs)
+
+ def check_default(self, option, key, val):
+ try:
+ return option.check_value(key, val)
+ except optparse.OptionValueError as exc:
+ print("An error occurred during configuration: %s" % exc)
+ sys.exit(3)
+
+ def _get_ordered_configuration_items(self):
+ # Configuration gives keys in an unordered manner. Order them.
+ override_order = ["global", self.name, ":env:"]
+
+ # Pool the options into different groups
+ section_items = {name: [] for name in override_order}
+ for section_key, val in self.config.items():
+ # ignore empty values
+ if not val:
+ logger.debug(
+ "Ignoring configuration key '%s' as it's value is empty.",
+ section_key
+ )
+ continue
+
+ section, key = section_key.split(".", 1)
+ if section in override_order:
+ section_items[section].append((key, val))
+
+ # Yield each group in their override order
+ for section in override_order:
+ for key, val in section_items[section]:
+ yield key, val
+
+ def _update_defaults(self, defaults):
+ """Updates the given defaults with values from the config files and
+ the environ. Does a little special handling for certain types of
+ options (lists)."""
+
+ # Accumulate complex default state.
+ self.values = optparse.Values(self.defaults)
+ late_eval = set()
+ # Then set the options with those values
+ for key, val in self._get_ordered_configuration_items():
+ # '--' because configuration supports only long names
+ option = self.get_option('--' + key)
+
+ # Ignore options not present in this parser. E.g. non-globals put
+ # in [global] by users that want them to apply to all applicable
+ # commands.
+ if option is None:
+ continue
+
+ if option.action in ('store_true', 'store_false', 'count'):
+ try:
+ val = strtobool(val)
+ except ValueError:
+ error_msg = invalid_config_error_message(
+ option.action, key, val
+ )
+ self.error(error_msg)
+
+ elif option.action == 'append':
+ val = val.split()
+ val = [self.check_default(option, key, v) for v in val]
+ elif option.action == 'callback':
+ late_eval.add(option.dest)
+ opt_str = option.get_opt_string()
+ val = option.convert_value(opt_str, val)
+ # From take_action
+ args = option.callback_args or ()
+ kwargs = option.callback_kwargs or {}
+ option.callback(option, opt_str, val, self, *args, **kwargs)
+ else:
+ val = self.check_default(option, key, val)
+
+ defaults[option.dest] = val
+
+ for key in late_eval:
+ defaults[key] = getattr(self.values, key)
+ self.values = None
+ return defaults
+
+ def get_default_values(self):
+ """Overriding to make updating the defaults after instantiation of
+ the option parser possible, _update_defaults() does the dirty work."""
+ if not self.process_default_values:
+ # Old, pre-Optik 1.5 behaviour.
+ return optparse.Values(self.defaults)
+
+ # Load the configuration, or error out in case of an error
+ try:
+ self.config.load()
+ except ConfigurationError as err:
+ self.exit(UNKNOWN_ERROR, str(err))
+
+ defaults = self._update_defaults(self.defaults.copy()) # ours
+ for option in self._get_all_options():
+ default = defaults.get(option.dest)
+ if isinstance(default, string_types):
+ opt_str = option.get_opt_string()
+ defaults[option.dest] = option.check_value(opt_str, default)
+ return optparse.Values(defaults)
+
+ def error(self, msg):
+ self.print_usage(sys.stderr)
+ self.exit(UNKNOWN_ERROR, "%s\n" % msg)
+
+
+def invalid_config_error_message(action, key, val):
+ """Returns a better error message when invalid configuration option
+ is provided."""
+ if action in ('store_true', 'store_false'):
+ return ("{0} is not a valid value for {1} option, "
+ "please specify a boolean value like yes/no, "
+ "true/false or 1/0 instead.").format(val, key)
+
+ return ("{0} is not a valid value for {1} option, "
+ "please specify a numerical value like 1/0 "
+ "instead.").format(val, key)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/req_command.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/req_command.py
new file mode 100644
index 0000000000000000000000000000000000000000..9383b3b8dca756dea6a37b3f71cb3e556b60dfe9
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/req_command.py
@@ -0,0 +1,333 @@
+"""Contains the Command base classes that depend on PipSession.
+
+The classes in this module are in a separate module so the commands not
+needing download / PackageFinder capability don't unnecessarily import the
+PackageFinder machinery and all its vendored dependencies, etc.
+"""
+
+import logging
+import os
+from functools import partial
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.command_context import CommandContextMixIn
+from pip._internal.exceptions import CommandError
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.legacy_resolve import Resolver
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.network.download import Downloader
+from pip._internal.network.session import PipSession
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req.constructors import (
+ install_req_from_editable,
+ install_req_from_line,
+ install_req_from_req_string,
+)
+from pip._internal.req.req_file import parse_requirements
+from pip._internal.self_outdated_check import (
+ make_link_collector,
+ pip_self_version_check,
+)
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from optparse import Values
+ from typing import List, Optional, Tuple
+ from pip._internal.cache import WheelCache
+ from pip._internal.models.target_python import TargetPython
+ from pip._internal.req.req_set import RequirementSet
+ from pip._internal.req.req_tracker import RequirementTracker
+ from pip._internal.utils.temp_dir import TempDirectory
+
+logger = logging.getLogger(__name__)
+
+
+class SessionCommandMixin(CommandContextMixIn):
+
+ """
+ A class mixin for command classes needing _build_session().
+ """
+ def __init__(self):
+ # type: () -> None
+ super(SessionCommandMixin, self).__init__()
+ self._session = None # Optional[PipSession]
+
+ @classmethod
+ def _get_index_urls(cls, options):
+ # type: (Values) -> Optional[List[str]]
+ """Return a list of index urls from user-provided options."""
+ index_urls = []
+ if not getattr(options, "no_index", False):
+ url = getattr(options, "index_url", None)
+ if url:
+ index_urls.append(url)
+ urls = getattr(options, "extra_index_urls", None)
+ if urls:
+ index_urls.extend(urls)
+ # Return None rather than an empty list
+ return index_urls or None
+
+ def get_default_session(self, options):
+ # type: (Values) -> PipSession
+ """Get a default-managed session."""
+ if self._session is None:
+ self._session = self.enter_context(self._build_session(options))
+ # there's no type annotation on requests.Session, so it's
+ # automatically ContextManager[Any] and self._session becomes Any,
+ # then https://github.com/python/mypy/issues/7696 kicks in
+ assert self._session is not None
+ return self._session
+
+ def _build_session(self, options, retries=None, timeout=None):
+ # type: (Values, Optional[int], Optional[int]) -> PipSession
+ assert not options.cache_dir or os.path.isabs(options.cache_dir)
+ session = PipSession(
+ cache=(
+ os.path.join(options.cache_dir, "http")
+ if options.cache_dir else None
+ ),
+ retries=retries if retries is not None else options.retries,
+ trusted_hosts=options.trusted_hosts,
+ index_urls=self._get_index_urls(options),
+ )
+
+ # Handle custom ca-bundles from the user
+ if options.cert:
+ session.verify = options.cert
+
+ # Handle SSL client certificate
+ if options.client_cert:
+ session.cert = options.client_cert
+
+ # Handle timeouts
+ if options.timeout or timeout:
+ session.timeout = (
+ timeout if timeout is not None else options.timeout
+ )
+
+ # Handle configured proxies
+ if options.proxy:
+ session.proxies = {
+ "http": options.proxy,
+ "https": options.proxy,
+ }
+
+ # Determine if we can prompt the user for authentication or not
+ session.auth.prompting = not options.no_input
+
+ return session
+
+
+class IndexGroupCommand(Command, SessionCommandMixin):
+
+ """
+ Abstract base class for commands with the index_group options.
+
+ This also corresponds to the commands that permit the pip version check.
+ """
+
+ def handle_pip_version_check(self, options):
+ # type: (Values) -> None
+ """
+ Do the pip version check if not disabled.
+
+ This overrides the default behavior of not doing the check.
+ """
+ # Make sure the index_group options are present.
+ assert hasattr(options, 'no_index')
+
+ if options.disable_pip_version_check or options.no_index:
+ return
+
+ # Otherwise, check if we're using the latest version of pip available.
+ session = self._build_session(
+ options,
+ retries=0,
+ timeout=min(5, options.timeout)
+ )
+ with session:
+ pip_self_version_check(session, options)
+
+
+class RequirementCommand(IndexGroupCommand):
+
+ @staticmethod
+ def make_requirement_preparer(
+ temp_build_dir, # type: TempDirectory
+ options, # type: Values
+ req_tracker, # type: RequirementTracker
+ session, # type: PipSession
+ finder, # type: PackageFinder
+ use_user_site, # type: bool
+ download_dir=None, # type: str
+ wheel_download_dir=None, # type: str
+ ):
+ # type: (...) -> RequirementPreparer
+ """
+ Create a RequirementPreparer instance for the given parameters.
+ """
+ downloader = Downloader(session, progress_bar=options.progress_bar)
+
+ temp_build_dir_path = temp_build_dir.path
+ assert temp_build_dir_path is not None
+
+ return RequirementPreparer(
+ build_dir=temp_build_dir_path,
+ src_dir=options.src_dir,
+ download_dir=download_dir,
+ wheel_download_dir=wheel_download_dir,
+ build_isolation=options.build_isolation,
+ req_tracker=req_tracker,
+ downloader=downloader,
+ finder=finder,
+ require_hashes=options.require_hashes,
+ use_user_site=use_user_site,
+ )
+
+ @staticmethod
+ def make_resolver(
+ preparer, # type: RequirementPreparer
+ finder, # type: PackageFinder
+ options, # type: Values
+ wheel_cache=None, # type: Optional[WheelCache]
+ use_user_site=False, # type: bool
+ ignore_installed=True, # type: bool
+ ignore_requires_python=False, # type: bool
+ force_reinstall=False, # type: bool
+ upgrade_strategy="to-satisfy-only", # type: str
+ use_pep517=None, # type: Optional[bool]
+ py_version_info=None # type: Optional[Tuple[int, ...]]
+ ):
+ # type: (...) -> Resolver
+ """
+ Create a Resolver instance for the given parameters.
+ """
+ make_install_req = partial(
+ install_req_from_req_string,
+ isolated=options.isolated_mode,
+ wheel_cache=wheel_cache,
+ use_pep517=use_pep517,
+ )
+ return Resolver(
+ preparer=preparer,
+ finder=finder,
+ make_install_req=make_install_req,
+ use_user_site=use_user_site,
+ ignore_dependencies=options.ignore_dependencies,
+ ignore_installed=ignore_installed,
+ ignore_requires_python=ignore_requires_python,
+ force_reinstall=force_reinstall,
+ upgrade_strategy=upgrade_strategy,
+ py_version_info=py_version_info,
+ )
+
+ def populate_requirement_set(
+ self,
+ requirement_set, # type: RequirementSet
+ args, # type: List[str]
+ options, # type: Values
+ finder, # type: PackageFinder
+ session, # type: PipSession
+ wheel_cache, # type: Optional[WheelCache]
+ ):
+ # type: (...) -> None
+ """
+ Marshal cmd line args into a requirement set.
+ """
+ for filename in options.constraints:
+ for req_to_add in parse_requirements(
+ filename,
+ constraint=True, finder=finder, options=options,
+ session=session, wheel_cache=wheel_cache):
+ req_to_add.is_direct = True
+ requirement_set.add_requirement(req_to_add)
+
+ for req in args:
+ req_to_add = install_req_from_line(
+ req, None, isolated=options.isolated_mode,
+ use_pep517=options.use_pep517,
+ wheel_cache=wheel_cache
+ )
+ req_to_add.is_direct = True
+ requirement_set.add_requirement(req_to_add)
+
+ for req in options.editables:
+ req_to_add = install_req_from_editable(
+ req,
+ isolated=options.isolated_mode,
+ use_pep517=options.use_pep517,
+ wheel_cache=wheel_cache
+ )
+ req_to_add.is_direct = True
+ requirement_set.add_requirement(req_to_add)
+
+ # NOTE: options.require_hashes may be set if --require-hashes is True
+ for filename in options.requirements:
+ for req_to_add in parse_requirements(
+ filename,
+ finder=finder, options=options, session=session,
+ wheel_cache=wheel_cache,
+ use_pep517=options.use_pep517):
+ req_to_add.is_direct = True
+ requirement_set.add_requirement(req_to_add)
+
+ # If any requirement has hash options, enable hash checking.
+ requirements = (
+ requirement_set.unnamed_requirements +
+ list(requirement_set.requirements.values())
+ )
+ if any(req.has_hash_options for req in requirements):
+ options.require_hashes = True
+
+ if not (args or options.editables or options.requirements):
+ opts = {'name': self.name}
+ if options.find_links:
+ raise CommandError(
+ 'You must give at least one requirement to %(name)s '
+ '(maybe you meant "pip %(name)s %(links)s"?)' %
+ dict(opts, links=' '.join(options.find_links)))
+ else:
+ raise CommandError(
+ 'You must give at least one requirement to %(name)s '
+ '(see "pip help %(name)s")' % opts)
+
+ @staticmethod
+ def trace_basic_info(finder):
+ # type: (PackageFinder) -> None
+ """
+ Trace basic information about the provided objects.
+ """
+ # Display where finder is looking for packages
+ search_scope = finder.search_scope
+ locations = search_scope.get_formatted_locations()
+ if locations:
+ logger.info(locations)
+
+ def _build_package_finder(
+ self,
+ options, # type: Values
+ session, # type: PipSession
+ target_python=None, # type: Optional[TargetPython]
+ ignore_requires_python=None, # type: Optional[bool]
+ ):
+ # type: (...) -> PackageFinder
+ """
+ Create a package finder appropriate to this requirement command.
+
+ :param ignore_requires_python: Whether to ignore incompatible
+ "Requires-Python" values in links. Defaults to False.
+ """
+ link_collector = make_link_collector(session, options=options)
+ selection_prefs = SelectionPreferences(
+ allow_yanked=True,
+ format_control=options.format_control,
+ allow_all_prereleases=options.pre,
+ prefer_binary=options.prefer_binary,
+ ignore_requires_python=ignore_requires_python,
+ )
+
+ return PackageFinder.create(
+ link_collector=link_collector,
+ selection_prefs=selection_prefs,
+ target_python=target_python,
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/status_codes.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/status_codes.py
new file mode 100644
index 0000000000000000000000000000000000000000..275360a3175abaeab86148d61b735904f96d72f6
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/cli/status_codes.py
@@ -0,0 +1,8 @@
+from __future__ import absolute_import
+
+SUCCESS = 0
+ERROR = 1
+UNKNOWN_ERROR = 2
+VIRTUALENV_NOT_FOUND = 3
+PREVIOUS_BUILD_DIR_ERROR = 4
+NO_MATCHES_FOUND = 23
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__init__.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..2a311f8fc8930735a39eee61cf701db1f2a35daa
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__init__.py
@@ -0,0 +1,114 @@
+"""
+Package containing all pip commands
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import importlib
+from collections import OrderedDict, namedtuple
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any
+ from pip._internal.cli.base_command import Command
+
+
+CommandInfo = namedtuple('CommandInfo', 'module_path, class_name, summary')
+
+# The ordering matters for help display.
+# Also, even though the module path starts with the same
+# "pip._internal.commands" prefix in each case, we include the full path
+# because it makes testing easier (specifically when modifying commands_dict
+# in test setup / teardown by adding info for a FakeCommand class defined
+# in a test-related module).
+# Finally, we need to pass an iterable of pairs here rather than a dict
+# so that the ordering won't be lost when using Python 2.7.
+commands_dict = OrderedDict([
+ ('install', CommandInfo(
+ 'pip._internal.commands.install', 'InstallCommand',
+ 'Install packages.',
+ )),
+ ('download', CommandInfo(
+ 'pip._internal.commands.download', 'DownloadCommand',
+ 'Download packages.',
+ )),
+ ('uninstall', CommandInfo(
+ 'pip._internal.commands.uninstall', 'UninstallCommand',
+ 'Uninstall packages.',
+ )),
+ ('freeze', CommandInfo(
+ 'pip._internal.commands.freeze', 'FreezeCommand',
+ 'Output installed packages in requirements format.',
+ )),
+ ('list', CommandInfo(
+ 'pip._internal.commands.list', 'ListCommand',
+ 'List installed packages.',
+ )),
+ ('show', CommandInfo(
+ 'pip._internal.commands.show', 'ShowCommand',
+ 'Show information about installed packages.',
+ )),
+ ('check', CommandInfo(
+ 'pip._internal.commands.check', 'CheckCommand',
+ 'Verify installed packages have compatible dependencies.',
+ )),
+ ('config', CommandInfo(
+ 'pip._internal.commands.configuration', 'ConfigurationCommand',
+ 'Manage local and global configuration.',
+ )),
+ ('search', CommandInfo(
+ 'pip._internal.commands.search', 'SearchCommand',
+ 'Search PyPI for packages.',
+ )),
+ ('wheel', CommandInfo(
+ 'pip._internal.commands.wheel', 'WheelCommand',
+ 'Build wheels from your requirements.',
+ )),
+ ('hash', CommandInfo(
+ 'pip._internal.commands.hash', 'HashCommand',
+ 'Compute hashes of package archives.',
+ )),
+ ('completion', CommandInfo(
+ 'pip._internal.commands.completion', 'CompletionCommand',
+ 'A helper command used for command completion.',
+ )),
+ ('debug', CommandInfo(
+ 'pip._internal.commands.debug', 'DebugCommand',
+ 'Show information useful for debugging.',
+ )),
+ ('help', CommandInfo(
+ 'pip._internal.commands.help', 'HelpCommand',
+ 'Show help for commands.',
+ )),
+]) # type: OrderedDict[str, CommandInfo]
+
+
+def create_command(name, **kwargs):
+ # type: (str, **Any) -> Command
+ """
+ Create an instance of the Command class with the given name.
+ """
+ module_path, class_name, summary = commands_dict[name]
+ module = importlib.import_module(module_path)
+ command_class = getattr(module, class_name)
+ command = command_class(name=name, summary=summary, **kwargs)
+
+ return command
+
+
+def get_similar_commands(name):
+ """Command name auto-correct."""
+ from difflib import get_close_matches
+
+ name = name.lower()
+
+ close_commands = get_close_matches(name, commands_dict.keys())
+
+ if close_commands:
+ return close_commands[0]
+ else:
+ return False
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e9410aaac0c11d19ee1aabe208eb70e5f8a7f5dd
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/check.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/check.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..304ddbc56310412ff1b271bdf172f234e15d0554
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/check.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/completion.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/completion.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..312d263a72ef2619f65b75aea147e6b39e78b4d0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/completion.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e7e6bd6db54dea544bd8ddd5ebbafaf4214f9b3c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/debug.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/debug.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3b0b2d46b1f20eb8a2e21b81070849ddf6fcfe8a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/debug.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/download.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/download.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..95f3140fee5d9aaeffdb81c3273cfef2716de6c3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/download.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e4cd430b08a0b7d289bffcc5d63ba34529802f18
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/hash.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/hash.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e724e201490fd4a227d95939cfc9aebcbf04dc51
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/hash.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/help.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/help.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d59e7aaf9a9e36e48dfc081958db981aa87439a6
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/help.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/install.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/install.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e5e7c20a6756a43f974bdacb376edb98b1681aa8
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/install.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/list.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/list.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..38871a3b232fa0e94378605a62bb535523bbe7f3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/list.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/search.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/search.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3b4e1a74ecab8df4880f067aeba6ca52446c03e2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/search.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/show.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/show.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c423c4f3022daecf4914c2a344a4e26853324c02
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/show.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e51ea04a31a754ce87b5c86f3f36bcaef5bb885a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f9f6660fb713bccfb06df2a8a4fecd29fe9058a2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/check.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/check.py
new file mode 100644
index 0000000000000000000000000000000000000000..968944611ea7e284dac912c36d63816c7ca585b4
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/check.py
@@ -0,0 +1,45 @@
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+import logging
+
+from pip._internal.cli.base_command import Command
+from pip._internal.operations.check import (
+ check_package_set,
+ create_package_set_from_installed,
+)
+from pip._internal.utils.misc import write_output
+
+logger = logging.getLogger(__name__)
+
+
+class CheckCommand(Command):
+ """Verify installed packages have compatible dependencies."""
+
+ usage = """
+ %prog [options]"""
+
+ def run(self, options, args):
+ package_set, parsing_probs = create_package_set_from_installed()
+ missing, conflicting = check_package_set(package_set)
+
+ for project_name in missing:
+ version = package_set[project_name].version
+ for dependency in missing[project_name]:
+ write_output(
+ "%s %s requires %s, which is not installed.",
+ project_name, version, dependency[0],
+ )
+
+ for project_name in conflicting:
+ version = package_set[project_name].version
+ for dep_name, dep_version, req in conflicting[project_name]:
+ write_output(
+ "%s %s has requirement %s, but you have %s %s.",
+ project_name, version, req, dep_name, dep_version,
+ )
+
+ if missing or conflicting or parsing_probs:
+ return 1
+ else:
+ write_output("No broken requirements found.")
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/completion.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/completion.py
new file mode 100644
index 0000000000000000000000000000000000000000..c532806e3866e652063c92226716c11edbf116b1
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/completion.py
@@ -0,0 +1,96 @@
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import sys
+import textwrap
+
+from pip._internal.cli.base_command import Command
+from pip._internal.utils.misc import get_prog
+
+BASE_COMPLETION = """
+# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
+"""
+
+COMPLETION_SCRIPTS = {
+ 'bash': """
+ _pip_completion()
+ {
+ COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
+ COMP_CWORD=$COMP_CWORD \\
+ PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) )
+ }
+ complete -o default -F _pip_completion %(prog)s
+ """,
+ 'zsh': """
+ function _pip_completion {
+ local words cword
+ read -Ac words
+ read -cn cword
+ reply=( $( COMP_WORDS="$words[*]" \\
+ COMP_CWORD=$(( cword-1 )) \\
+ PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ))
+ }
+ compctl -K _pip_completion %(prog)s
+ """,
+ 'fish': """
+ function __fish_complete_pip
+ set -lx COMP_WORDS (commandline -o) ""
+ set -lx COMP_CWORD ( \\
+ math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
+ )
+ set -lx PIP_AUTO_COMPLETE 1
+ string split \\ -- (eval $COMP_WORDS[1])
+ end
+ complete -fa "(__fish_complete_pip)" -c %(prog)s
+ """,
+}
+
+
+class CompletionCommand(Command):
+ """A helper command to be used for command completion."""
+
+ ignore_require_venv = True
+
+ def __init__(self, *args, **kw):
+ super(CompletionCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+
+ cmd_opts.add_option(
+ '--bash', '-b',
+ action='store_const',
+ const='bash',
+ dest='shell',
+ help='Emit completion code for bash')
+ cmd_opts.add_option(
+ '--zsh', '-z',
+ action='store_const',
+ const='zsh',
+ dest='shell',
+ help='Emit completion code for zsh')
+ cmd_opts.add_option(
+ '--fish', '-f',
+ action='store_const',
+ const='fish',
+ dest='shell',
+ help='Emit completion code for fish')
+
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def run(self, options, args):
+ """Prints the completion code of the given shell"""
+ shells = COMPLETION_SCRIPTS.keys()
+ shell_options = ['--' + shell for shell in sorted(shells)]
+ if options.shell in shells:
+ script = textwrap.dedent(
+ COMPLETION_SCRIPTS.get(options.shell, '') % {
+ 'prog': get_prog(),
+ }
+ )
+ print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
+ else:
+ sys.stderr.write(
+ 'ERROR: You must pass %s\n' % ' or '.join(shell_options)
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/configuration.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/configuration.py
new file mode 100644
index 0000000000000000000000000000000000000000..efcf5bb3699f9e9c3111adb6975dabaa8887b08f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/configuration.py
@@ -0,0 +1,233 @@
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+import logging
+import os
+import subprocess
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.configuration import (
+ Configuration,
+ get_configuration_files,
+ kinds,
+)
+from pip._internal.exceptions import PipError
+from pip._internal.utils.misc import get_prog, write_output
+
+logger = logging.getLogger(__name__)
+
+
+class ConfigurationCommand(Command):
+ """Manage local and global configuration.
+
+ Subcommands:
+
+ list: List the active configuration (or from the file specified)
+ edit: Edit the configuration file in an editor
+ get: Get the value associated with name
+ set: Set the name=value
+ unset: Unset the value associated with name
+
+ If none of --user, --global and --site are passed, a virtual
+ environment configuration file is used if one is active and the file
+ exists. Otherwise, all modifications happen on the to the user file by
+ default.
+ """
+
+ ignore_require_venv = True
+ usage = """
+ %prog [<file-option>] list
+ %prog [<file-option>] [--editor <editor-path>] edit
+
+ %prog [<file-option>] get name
+ %prog [<file-option>] set name value
+ %prog [<file-option>] unset name
+ """
+
+ def __init__(self, *args, **kwargs):
+ super(ConfigurationCommand, self).__init__(*args, **kwargs)
+
+ self.configuration = None
+
+ self.cmd_opts.add_option(
+ '--editor',
+ dest='editor',
+ action='store',
+ default=None,
+ help=(
+ 'Editor to use to edit the file. Uses VISUAL or EDITOR '
+ 'environment variables if not provided.'
+ )
+ )
+
+ self.cmd_opts.add_option(
+ '--global',
+ dest='global_file',
+ action='store_true',
+ default=False,
+ help='Use the system-wide configuration file only'
+ )
+
+ self.cmd_opts.add_option(
+ '--user',
+ dest='user_file',
+ action='store_true',
+ default=False,
+ help='Use the user configuration file only'
+ )
+
+ self.cmd_opts.add_option(
+ '--site',
+ dest='site_file',
+ action='store_true',
+ default=False,
+ help='Use the current environment configuration file only'
+ )
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ handlers = {
+ "list": self.list_values,
+ "edit": self.open_in_editor,
+ "get": self.get_name,
+ "set": self.set_name_value,
+ "unset": self.unset_name
+ }
+
+ # Determine action
+ if not args or args[0] not in handlers:
+ logger.error("Need an action ({}) to perform.".format(
+ ", ".join(sorted(handlers)))
+ )
+ return ERROR
+
+ action = args[0]
+
+ # Determine which configuration files are to be loaded
+ # Depends on whether the command is modifying.
+ try:
+ load_only = self._determine_file(
+ options, need_value=(action in ["get", "set", "unset", "edit"])
+ )
+ except PipError as e:
+ logger.error(e.args[0])
+ return ERROR
+
+ # Load a new configuration
+ self.configuration = Configuration(
+ isolated=options.isolated_mode, load_only=load_only
+ )
+ self.configuration.load()
+
+ # Error handling happens here, not in the action-handlers.
+ try:
+ handlers[action](options, args[1:])
+ except PipError as e:
+ logger.error(e.args[0])
+ return ERROR
+
+ return SUCCESS
+
+ def _determine_file(self, options, need_value):
+ file_options = [key for key, value in (
+ (kinds.USER, options.user_file),
+ (kinds.GLOBAL, options.global_file),
+ (kinds.SITE, options.site_file),
+ ) if value]
+
+ if not file_options:
+ if not need_value:
+ return None
+ # Default to user, unless there's a site file.
+ elif any(
+ os.path.exists(site_config_file)
+ for site_config_file in get_configuration_files()[kinds.SITE]
+ ):
+ return kinds.SITE
+ else:
+ return kinds.USER
+ elif len(file_options) == 1:
+ return file_options[0]
+
+ raise PipError(
+ "Need exactly one file to operate upon "
+ "(--user, --site, --global) to perform."
+ )
+
+ def list_values(self, options, args):
+ self._get_n_args(args, "list", n=0)
+
+ for key, value in sorted(self.configuration.items()):
+ write_output("%s=%r", key, value)
+
+ def get_name(self, options, args):
+ key = self._get_n_args(args, "get [name]", n=1)
+ value = self.configuration.get_value(key)
+
+ write_output("%s", value)
+
+ def set_name_value(self, options, args):
+ key, value = self._get_n_args(args, "set [name] [value]", n=2)
+ self.configuration.set_value(key, value)
+
+ self._save_configuration()
+
+ def unset_name(self, options, args):
+ key = self._get_n_args(args, "unset [name]", n=1)
+ self.configuration.unset_value(key)
+
+ self._save_configuration()
+
+ def open_in_editor(self, options, args):
+ editor = self._determine_editor(options)
+
+ fname = self.configuration.get_file_to_edit()
+ if fname is None:
+ raise PipError("Could not determine appropriate file.")
+
+ try:
+ subprocess.check_call([editor, fname])
+ except subprocess.CalledProcessError as e:
+ raise PipError(
+ "Editor Subprocess exited with exit code {}"
+ .format(e.returncode)
+ )
+
+ def _get_n_args(self, args, example, n):
+ """Helper to make sure the command got the right number of arguments
+ """
+ if len(args) != n:
+ msg = (
+ 'Got unexpected number of arguments, expected {}. '
+ '(example: "{} config {}")'
+ ).format(n, get_prog(), example)
+ raise PipError(msg)
+
+ if n == 1:
+ return args[0]
+ else:
+ return args
+
+ def _save_configuration(self):
+ # We successfully ran a modifying command. Need to save the
+ # configuration.
+ try:
+ self.configuration.save()
+ except Exception:
+ logger.error(
+ "Unable to save configuration. Please report this as a bug.",
+ exc_info=1
+ )
+ raise PipError("Internal Error.")
+
+ def _determine_editor(self, options):
+ if options.editor is not None:
+ return options.editor
+ elif "VISUAL" in os.environ:
+ return os.environ["VISUAL"]
+ elif "EDITOR" in os.environ:
+ return os.environ["EDITOR"]
+ else:
+ raise PipError("Could not determine editor to use.")
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/debug.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/debug.py
new file mode 100644
index 0000000000000000000000000000000000000000..fe93b3a3926653c481c77830a4775585d2c488bd
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/debug.py
@@ -0,0 +1,142 @@
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import locale
+import logging
+import os
+import sys
+
+from pip._vendor.certifi import where
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.cmdoptions import make_target_python
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import get_pip_version
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, List, Optional
+ from optparse import Values
+
+logger = logging.getLogger(__name__)
+
+
+def show_value(name, value):
+ # type: (str, Optional[str]) -> None
+ logger.info('{}: {}'.format(name, value))
+
+
+def show_sys_implementation():
+ # type: () -> None
+ logger.info('sys.implementation:')
+ if hasattr(sys, 'implementation'):
+ implementation = sys.implementation # type: ignore
+ implementation_name = implementation.name
+ else:
+ implementation_name = ''
+
+ with indent_log():
+ show_value('name', implementation_name)
+
+
+def show_tags(options):
+ # type: (Values) -> None
+ tag_limit = 10
+
+ target_python = make_target_python(options)
+ tags = target_python.get_tags()
+
+ # Display the target options that were explicitly provided.
+ formatted_target = target_python.format_given()
+ suffix = ''
+ if formatted_target:
+ suffix = ' (target: {})'.format(formatted_target)
+
+ msg = 'Compatible tags: {}{}'.format(len(tags), suffix)
+ logger.info(msg)
+
+ if options.verbose < 1 and len(tags) > tag_limit:
+ tags_limited = True
+ tags = tags[:tag_limit]
+ else:
+ tags_limited = False
+
+ with indent_log():
+ for tag in tags:
+ logger.info(str(tag))
+
+ if tags_limited:
+ msg = (
+ '...\n'
+ '[First {tag_limit} tags shown. Pass --verbose to show all.]'
+ ).format(tag_limit=tag_limit)
+ logger.info(msg)
+
+
+def ca_bundle_info(config):
+ levels = set()
+ for key, value in config.items():
+ levels.add(key.split('.')[0])
+
+ if not levels:
+ return "Not specified"
+
+ levels_that_override_global = ['install', 'wheel', 'download']
+ global_overriding_level = [
+ level for level in levels if level in levels_that_override_global
+ ]
+ if not global_overriding_level:
+ return 'global'
+
+ levels.remove('global')
+ return ", ".join(levels)
+
+
+class DebugCommand(Command):
+ """
+ Display debug information.
+ """
+
+ usage = """
+ %prog <options>"""
+ ignore_require_venv = True
+
+ def __init__(self, *args, **kw):
+ super(DebugCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+ cmdoptions.add_target_python_options(cmd_opts)
+ self.parser.insert_option_group(0, cmd_opts)
+ self.parser.config.load()
+
+ def run(self, options, args):
+ # type: (Values, List[Any]) -> int
+ logger.warning(
+ "This command is only meant for debugging. "
+ "Do not use this with automation for parsing and getting these "
+ "details, since the output and options of this command may "
+ "change without notice."
+ )
+ show_value('pip version', get_pip_version())
+ show_value('sys.version', sys.version)
+ show_value('sys.executable', sys.executable)
+ show_value('sys.getdefaultencoding', sys.getdefaultencoding())
+ show_value('sys.getfilesystemencoding', sys.getfilesystemencoding())
+ show_value(
+ 'locale.getpreferredencoding', locale.getpreferredencoding(),
+ )
+ show_value('sys.platform', sys.platform)
+ show_sys_implementation()
+
+ show_value("'cert' config value", ca_bundle_info(self.parser.config))
+ show_value("REQUESTS_CA_BUNDLE", os.environ.get('REQUESTS_CA_BUNDLE'))
+ show_value("CURL_CA_BUNDLE", os.environ.get('CURL_CA_BUNDLE'))
+ show_value("pip._vendor.certifi.where()", where())
+
+ show_tags(options)
+
+ return SUCCESS
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/download.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/download.py
new file mode 100644
index 0000000000000000000000000000000000000000..24da3eb2a263217e068d815e73d0f78f64a89398
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/download.py
@@ -0,0 +1,147 @@
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import logging
+import os
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.cmdoptions import make_target_python
+from pip._internal.cli.req_command import RequirementCommand
+from pip._internal.req import RequirementSet
+from pip._internal.req.req_tracker import get_requirement_tracker
+from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
+from pip._internal.utils.temp_dir import TempDirectory
+
+logger = logging.getLogger(__name__)
+
+
+class DownloadCommand(RequirementCommand):
+ """
+ Download packages from:
+
+ - PyPI (and other indexes) using requirement specifiers.
+ - VCS project urls.
+ - Local project directories.
+ - Local or remote source archives.
+
+ pip also supports downloading from "requirements files", which provide
+ an easy way to specify a whole environment to be downloaded.
+ """
+
+ usage = """
+ %prog [options] <requirement specifier> [package-index-options] ...
+ %prog [options] -r <requirements file> [package-index-options] ...
+ %prog [options] <vcs project url> ...
+ %prog [options] <local project path> ...
+ %prog [options] <archive url/path> ..."""
+
+ def __init__(self, *args, **kw):
+ super(DownloadCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+
+ cmd_opts.add_option(cmdoptions.constraints())
+ cmd_opts.add_option(cmdoptions.requirements())
+ cmd_opts.add_option(cmdoptions.build_dir())
+ cmd_opts.add_option(cmdoptions.no_deps())
+ cmd_opts.add_option(cmdoptions.global_options())
+ cmd_opts.add_option(cmdoptions.no_binary())
+ cmd_opts.add_option(cmdoptions.only_binary())
+ cmd_opts.add_option(cmdoptions.prefer_binary())
+ cmd_opts.add_option(cmdoptions.src())
+ cmd_opts.add_option(cmdoptions.pre())
+ cmd_opts.add_option(cmdoptions.no_clean())
+ cmd_opts.add_option(cmdoptions.require_hashes())
+ cmd_opts.add_option(cmdoptions.progress_bar())
+ cmd_opts.add_option(cmdoptions.no_build_isolation())
+ cmd_opts.add_option(cmdoptions.use_pep517())
+ cmd_opts.add_option(cmdoptions.no_use_pep517())
+
+ cmd_opts.add_option(
+ '-d', '--dest', '--destination-dir', '--destination-directory',
+ dest='download_dir',
+ metavar='dir',
+ default=os.curdir,
+ help=("Download packages into <dir>."),
+ )
+
+ cmdoptions.add_target_python_options(cmd_opts)
+
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group,
+ self.parser,
+ )
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def run(self, options, args):
+ options.ignore_installed = True
+ # editable doesn't really make sense for `pip download`, but the bowels
+ # of the RequirementSet code require that property.
+ options.editables = []
+
+ cmdoptions.check_dist_restriction(options)
+
+ options.download_dir = normalize_path(options.download_dir)
+
+ ensure_dir(options.download_dir)
+
+ session = self.get_default_session(options)
+
+ target_python = make_target_python(options)
+ finder = self._build_package_finder(
+ options=options,
+ session=session,
+ target_python=target_python,
+ )
+ build_delete = (not (options.no_clean or options.build_dir))
+
+ with get_requirement_tracker() as req_tracker, TempDirectory(
+ options.build_dir, delete=build_delete, kind="download"
+ ) as directory:
+
+ requirement_set = RequirementSet()
+ self.populate_requirement_set(
+ requirement_set,
+ args,
+ options,
+ finder,
+ session,
+ None
+ )
+
+ preparer = self.make_requirement_preparer(
+ temp_build_dir=directory,
+ options=options,
+ req_tracker=req_tracker,
+ session=session,
+ finder=finder,
+ download_dir=options.download_dir,
+ use_user_site=False,
+ )
+
+ resolver = self.make_resolver(
+ preparer=preparer,
+ finder=finder,
+ options=options,
+ py_version_info=options.python_version,
+ )
+
+ self.trace_basic_info(finder)
+
+ resolver.resolve(requirement_set)
+
+ downloaded = ' '.join([
+ req.name for req in requirement_set.successfully_downloaded
+ ])
+ if downloaded:
+ write_output('Successfully downloaded %s', downloaded)
+
+ # Clean up
+ if not options.no_clean:
+ requirement_set.cleanup_files()
+
+ return requirement_set
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/freeze.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/freeze.py
new file mode 100644
index 0000000000000000000000000000000000000000..e96c0833f5f1e7f643a588cf6bf30e9ca2282bc5
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/freeze.py
@@ -0,0 +1,103 @@
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import sys
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.base_command import Command
+from pip._internal.models.format_control import FormatControl
+from pip._internal.operations.freeze import freeze
+from pip._internal.utils.compat import stdlib_pkgs
+
+DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel', 'pkg-resources'}
+
+
+class FreezeCommand(Command):
+ """
+ Output installed packages in requirements format.
+
+ packages are listed in a case-insensitive sorted order.
+ """
+
+ usage = """
+ %prog [options]"""
+ log_streams = ("ext://sys.stderr", "ext://sys.stderr")
+
+ def __init__(self, *args, **kw):
+ super(FreezeCommand, self).__init__(*args, **kw)
+
+ self.cmd_opts.add_option(
+ '-r', '--requirement',
+ dest='requirements',
+ action='append',
+ default=[],
+ metavar='file',
+ help="Use the order in the given requirements file and its "
+ "comments when generating output. This option can be "
+ "used multiple times.")
+ self.cmd_opts.add_option(
+ '-f', '--find-links',
+ dest='find_links',
+ action='append',
+ default=[],
+ metavar='URL',
+ help='URL for finding packages, which will be added to the '
+ 'output.')
+ self.cmd_opts.add_option(
+ '-l', '--local',
+ dest='local',
+ action='store_true',
+ default=False,
+ help='If in a virtualenv that has global access, do not output '
+ 'globally-installed packages.')
+ self.cmd_opts.add_option(
+ '--user',
+ dest='user',
+ action='store_true',
+ default=False,
+ help='Only output packages installed in user-site.')
+ self.cmd_opts.add_option(cmdoptions.list_path())
+ self.cmd_opts.add_option(
+ '--all',
+ dest='freeze_all',
+ action='store_true',
+ help='Do not skip these packages in the output:'
+ ' %s' % ', '.join(DEV_PKGS))
+ self.cmd_opts.add_option(
+ '--exclude-editable',
+ dest='exclude_editable',
+ action='store_true',
+ help='Exclude editable package from output.')
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ format_control = FormatControl(set(), set())
+ wheel_cache = WheelCache(options.cache_dir, format_control)
+ skip = set(stdlib_pkgs)
+ if not options.freeze_all:
+ skip.update(DEV_PKGS)
+
+ cmdoptions.check_list_path_option(options)
+
+ freeze_kwargs = dict(
+ requirement=options.requirements,
+ find_links=options.find_links,
+ local_only=options.local,
+ user_only=options.user,
+ paths=options.path,
+ skip_regex=options.skip_requirements_regex,
+ isolated=options.isolated_mode,
+ wheel_cache=wheel_cache,
+ skip=skip,
+ exclude_editable=options.exclude_editable,
+ )
+
+ try:
+ for line in freeze(**freeze_kwargs):
+ sys.stdout.write(line + '\n')
+ finally:
+ wheel_cache.cleanup()
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/hash.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/hash.py
new file mode 100644
index 0000000000000000000000000000000000000000..1dc7fb0eac936b625c79d20a7ec8267179a15a29
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/hash.py
@@ -0,0 +1,58 @@
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import hashlib
+import logging
+import sys
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR
+from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
+from pip._internal.utils.misc import read_chunks, write_output
+
+logger = logging.getLogger(__name__)
+
+
+class HashCommand(Command):
+ """
+ Compute a hash of a local package archive.
+
+ These can be used with --hash in a requirements file to do repeatable
+ installs.
+ """
+
+ usage = '%prog [options] <file> ...'
+ ignore_require_venv = True
+
+ def __init__(self, *args, **kw):
+ super(HashCommand, self).__init__(*args, **kw)
+ self.cmd_opts.add_option(
+ '-a', '--algorithm',
+ dest='algorithm',
+ choices=STRONG_HASHES,
+ action='store',
+ default=FAVORITE_HASH,
+ help='The hash algorithm to use: one of %s' %
+ ', '.join(STRONG_HASHES))
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ if not args:
+ self.parser.print_usage(sys.stderr)
+ return ERROR
+
+ algorithm = options.algorithm
+ for path in args:
+ write_output('%s:\n--hash=%s:%s',
+ path, algorithm, _hash_of_file(path, algorithm))
+
+
+def _hash_of_file(path, algorithm):
+ """Return the hash digest of a file."""
+ with open(path, 'rb') as archive:
+ hash = hashlib.new(algorithm)
+ for chunk in read_chunks(archive):
+ hash.update(chunk)
+ return hash.hexdigest()
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/help.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/help.py
new file mode 100644
index 0000000000000000000000000000000000000000..75af999b41e676f3abca4e2278b06aa404a95479
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/help.py
@@ -0,0 +1,41 @@
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.exceptions import CommandError
+
+
+class HelpCommand(Command):
+ """Show help for commands"""
+
+ usage = """
+ %prog <command>"""
+ ignore_require_venv = True
+
+ def run(self, options, args):
+ from pip._internal.commands import (
+ commands_dict, create_command, get_similar_commands,
+ )
+
+ try:
+ # 'pip help' with no args is handled by pip.__init__.parseopt()
+ cmd_name = args[0] # the command we need help for
+ except IndexError:
+ return SUCCESS
+
+ if cmd_name not in commands_dict:
+ guess = get_similar_commands(cmd_name)
+
+ msg = ['unknown command "%s"' % cmd_name]
+ if guess:
+ msg.append('maybe you meant "%s"' % guess)
+
+ raise CommandError(' - '.join(msg))
+
+ command = create_command(cmd_name)
+ command.parser.print_help()
+
+ return SUCCESS
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/install.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/install.py
new file mode 100644
index 0000000000000000000000000000000000000000..cb2fb280c986a60fd57ec34af5185e423169722e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/install.py
@@ -0,0 +1,727 @@
+# The following comment should be removed at some point in the future.
+# It's included for now because without it InstallCommand.run() has a
+# couple errors where we have to know req.name is str rather than
+# Optional[str] for the InstallRequirement req.
+# mypy: strict-optional=False
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import errno
+import logging
+import operator
+import os
+import shutil
+import site
+from optparse import SUPPRESS_HELP
+
+from pip._vendor import pkg_resources
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.cmdoptions import make_target_python
+from pip._internal.cli.req_command import RequirementCommand
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.exceptions import (
+ CommandError,
+ InstallationError,
+ PreviousBuildDirError,
+)
+from pip._internal.locations import distutils_scheme
+from pip._internal.operations.check import check_install_conflicts
+from pip._internal.req import RequirementSet, install_given_reqs
+from pip._internal.req.req_tracker import get_requirement_tracker
+from pip._internal.utils.deprecation import deprecated
+from pip._internal.utils.distutils_args import parse_distutils_args
+from pip._internal.utils.filesystem import test_writable_dir
+from pip._internal.utils.misc import (
+ ensure_dir,
+ get_installed_version,
+ protect_pip_from_modification_on_windows,
+ write_output,
+)
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.virtualenv import virtualenv_no_global
+from pip._internal.wheel_builder import build, should_build_for_install_command
+
+if MYPY_CHECK_RUNNING:
+ from optparse import Values
+ from typing import Any, Iterable, List, Optional
+
+ from pip._internal.models.format_control import FormatControl
+ from pip._internal.req.req_install import InstallRequirement
+ from pip._internal.wheel_builder import BinaryAllowedPredicate
+
+from pip._internal.locations import running_under_virtualenv
+
+logger = logging.getLogger(__name__)
+
+
+def get_check_binary_allowed(format_control):
+ # type: (FormatControl) -> BinaryAllowedPredicate
+ def check_binary_allowed(req):
+ # type: (InstallRequirement) -> bool
+ if req.use_pep517:
+ return True
+ canonical_name = canonicalize_name(req.name)
+ allowed_formats = format_control.get_allowed_formats(canonical_name)
+ return "binary" in allowed_formats
+
+ return check_binary_allowed
+
+
+class InstallCommand(RequirementCommand):
+ """
+ Install packages from:
+
+ - PyPI (and other indexes) using requirement specifiers.
+ - VCS project urls.
+ - Local project directories.
+ - Local or remote source archives.
+
+ pip also supports installing from "requirements files", which provide
+ an easy way to specify a whole environment to be installed.
+ """
+
+ usage = """
+ %prog [options] <requirement specifier> [package-index-options] ...
+ %prog [options] -r <requirements file> [package-index-options] ...
+ %prog [options] [-e] <vcs project url> ...
+ %prog [options] [-e] <local project path> ...
+ %prog [options] <archive url/path> ..."""
+
+ def __init__(self, *args, **kw):
+ super(InstallCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+
+ cmd_opts.add_option(cmdoptions.requirements())
+ cmd_opts.add_option(cmdoptions.constraints())
+ cmd_opts.add_option(cmdoptions.no_deps())
+ cmd_opts.add_option(cmdoptions.pre())
+
+ cmd_opts.add_option(cmdoptions.editable())
+ cmd_opts.add_option(
+ '-t', '--target',
+ dest='target_dir',
+ metavar='dir',
+ default=None,
+ help='Install packages into <dir>. '
+ 'By default this will not replace existing files/folders in '
+ '<dir>. Use --upgrade to replace existing packages in <dir> '
+ 'with new versions.'
+ )
+ cmdoptions.add_target_python_options(cmd_opts)
+
+ cmd_opts.add_option(
+ '--user',
+ dest='use_user_site',
+ action='store_true',
+ help="Install to the Python user install directory for your "
+ "platform. Typically ~/.local/, or %APPDATA%\\Python on "
+ "Windows. (See the Python documentation for site.USER_BASE "
+ "for full details.) On Debian systems, this is the "
+ "default when running outside of a virtual environment "
+ "and not as root.")
+
+ cmd_opts.add_option(
+ '--no-user',
+ dest='use_system_location',
+ action='store_true',
+ help=SUPPRESS_HELP)
+ cmd_opts.add_option(
+ '--root',
+ dest='root_path',
+ metavar='dir',
+ default=None,
+ help="Install everything relative to this alternate root "
+ "directory.")
+ cmd_opts.add_option(
+ '--prefix',
+ dest='prefix_path',
+ metavar='dir',
+ default=None,
+ help="Installation prefix where lib, bin and other top-level "
+ "folders are placed")
+
+ cmd_opts.add_option(
+ '--system',
+ dest='use_system_location',
+ action='store_true',
+ help="Install using the system scheme (overrides --user on "
+ "Debian systems)")
+
+ cmd_opts.add_option(cmdoptions.build_dir())
+
+ cmd_opts.add_option(cmdoptions.src())
+
+ cmd_opts.add_option(
+ '-U', '--upgrade',
+ dest='upgrade',
+ action='store_true',
+ help='Upgrade all specified packages to the newest available '
+ 'version. The handling of dependencies depends on the '
+ 'upgrade-strategy used.'
+ )
+
+ cmd_opts.add_option(
+ '--upgrade-strategy',
+ dest='upgrade_strategy',
+ default='only-if-needed',
+ choices=['only-if-needed', 'eager'],
+ help='Determines how dependency upgrading should be handled '
+ '[default: %default]. '
+ '"eager" - dependencies are upgraded regardless of '
+ 'whether the currently installed version satisfies the '
+ 'requirements of the upgraded package(s). '
+ '"only-if-needed" - are upgraded only when they do not '
+ 'satisfy the requirements of the upgraded package(s).'
+ )
+
+ cmd_opts.add_option(
+ '--force-reinstall',
+ dest='force_reinstall',
+ action='store_true',
+ help='Reinstall all packages even if they are already '
+ 'up-to-date.')
+
+ cmd_opts.add_option(
+ '-I', '--ignore-installed',
+ dest='ignore_installed',
+ action='store_true',
+ help='Ignore the installed packages, overwriting them. '
+ 'This can break your system if the existing package '
+ 'is of a different version or was installed '
+ 'with a different package manager!'
+ )
+
+ cmd_opts.add_option(cmdoptions.ignore_requires_python())
+ cmd_opts.add_option(cmdoptions.no_build_isolation())
+ cmd_opts.add_option(cmdoptions.use_pep517())
+ cmd_opts.add_option(cmdoptions.no_use_pep517())
+
+ cmd_opts.add_option(cmdoptions.install_options())
+ cmd_opts.add_option(cmdoptions.global_options())
+
+ cmd_opts.add_option(
+ "--compile",
+ action="store_true",
+ dest="compile",
+ default=True,
+ help="Compile Python source files to bytecode",
+ )
+
+ cmd_opts.add_option(
+ "--no-compile",
+ action="store_false",
+ dest="compile",
+ help="Do not compile Python source files to bytecode",
+ )
+
+ cmd_opts.add_option(
+ "--no-warn-script-location",
+ action="store_false",
+ dest="warn_script_location",
+ default=True,
+ help="Do not warn when installing scripts outside PATH",
+ )
+ cmd_opts.add_option(
+ "--no-warn-conflicts",
+ action="store_false",
+ dest="warn_about_conflicts",
+ default=True,
+ help="Do not warn about broken dependencies",
+ )
+
+ cmd_opts.add_option(cmdoptions.no_binary())
+ cmd_opts.add_option(cmdoptions.only_binary())
+ cmd_opts.add_option(cmdoptions.prefer_binary())
+ cmd_opts.add_option(cmdoptions.no_clean())
+ cmd_opts.add_option(cmdoptions.require_hashes())
+ cmd_opts.add_option(cmdoptions.progress_bar())
+
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group,
+ self.parser,
+ )
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def run(self, options, args):
+ # type: (Values, List[Any]) -> int
+ cmdoptions.check_install_build_global(options)
+ upgrade_strategy = "to-satisfy-only"
+ if options.upgrade:
+ upgrade_strategy = options.upgrade_strategy
+
+ cmdoptions.check_dist_restriction(options, check_target=True)
+
+ if options.python_version:
+ python_versions = [options.python_version]
+ else:
+ python_versions = None
+
+ # compute install location defaults
+ if (not options.use_user_site and not options.prefix_path and not
+ options.target_dir and not options.use_system_location):
+ if not running_under_virtualenv() and os.geteuid() != 0:
+ options.use_user_site = True
+
+ if options.use_system_location:
+ options.use_user_site = False
+
+ options.src_dir = os.path.abspath(options.src_dir)
+ install_options = options.install_options or []
+
+ options.use_user_site = decide_user_install(
+ options.use_user_site,
+ prefix_path=options.prefix_path,
+ target_dir=options.target_dir,
+ root_path=options.root_path,
+ isolated_mode=options.isolated_mode,
+ )
+
+ target_temp_dir = None # type: Optional[TempDirectory]
+ target_temp_dir_path = None # type: Optional[str]
+ if options.target_dir:
+ options.ignore_installed = True
+ options.target_dir = os.path.abspath(options.target_dir)
+ if (os.path.exists(options.target_dir) and not
+ os.path.isdir(options.target_dir)):
+ raise CommandError(
+ "Target path exists but is not a directory, will not "
+ "continue."
+ )
+
+ # Create a target directory for using with the target option
+ target_temp_dir = TempDirectory(kind="target")
+ target_temp_dir_path = target_temp_dir.path
+
+ global_options = options.global_options or []
+
+ session = self.get_default_session(options)
+
+ target_python = make_target_python(options)
+ finder = self._build_package_finder(
+ options=options,
+ session=session,
+ target_python=target_python,
+ ignore_requires_python=options.ignore_requires_python,
+ )
+ build_delete = (not (options.no_clean or options.build_dir))
+ wheel_cache = WheelCache(options.cache_dir, options.format_control)
+
+ with get_requirement_tracker() as req_tracker, TempDirectory(
+ options.build_dir, delete=build_delete, kind="install"
+ ) as directory:
+ requirement_set = RequirementSet(
+ check_supported_wheels=not options.target_dir,
+ )
+
+ try:
+ self.populate_requirement_set(
+ requirement_set, args, options, finder, session,
+ wheel_cache
+ )
+
+ warn_deprecated_install_options(
+ requirement_set, options.install_options
+ )
+
+ preparer = self.make_requirement_preparer(
+ temp_build_dir=directory,
+ options=options,
+ req_tracker=req_tracker,
+ session=session,
+ finder=finder,
+ use_user_site=options.use_user_site,
+ )
+ resolver = self.make_resolver(
+ preparer=preparer,
+ finder=finder,
+ options=options,
+ wheel_cache=wheel_cache,
+ use_user_site=options.use_user_site,
+ ignore_installed=options.ignore_installed,
+ ignore_requires_python=options.ignore_requires_python,
+ force_reinstall=options.force_reinstall,
+ upgrade_strategy=upgrade_strategy,
+ use_pep517=options.use_pep517,
+ )
+
+ self.trace_basic_info(finder)
+
+ resolver.resolve(requirement_set)
+
+ try:
+ pip_req = requirement_set.get_requirement("pip")
+ except KeyError:
+ modifying_pip = None
+ else:
+ # If we're not replacing an already installed pip,
+ # we're not modifying it.
+ modifying_pip = pip_req.satisfied_by is None
+ protect_pip_from_modification_on_windows(
+ modifying_pip=modifying_pip
+ )
+
+ check_binary_allowed = get_check_binary_allowed(
+ finder.format_control
+ )
+
+ reqs_to_build = [
+ r for r in requirement_set.requirements.values()
+ if should_build_for_install_command(
+ r, check_binary_allowed
+ )
+ ]
+
+ _, build_failures = build(
+ reqs_to_build,
+ wheel_cache=wheel_cache,
+ build_options=[],
+ global_options=[],
+ )
+
+ # If we're using PEP 517, we cannot do a direct install
+ # so we fail here.
+ # We don't care about failures building legacy
+ # requirements, as we'll fall through to a direct
+ # install for those.
+ pep517_build_failures = [
+ r for r in build_failures if r.use_pep517
+ ]
+ if pep517_build_failures:
+ raise InstallationError(
+ "Could not build wheels for {} which use"
+ " PEP 517 and cannot be installed directly".format(
+ ", ".join(r.name for r in pep517_build_failures)))
+
+ to_install = resolver.get_installation_order(
+ requirement_set
+ )
+
+ # Consistency Checking of the package set we're installing.
+ should_warn_about_conflicts = (
+ not options.ignore_dependencies and
+ options.warn_about_conflicts
+ )
+ if should_warn_about_conflicts:
+ self._warn_about_conflicts(to_install)
+
+ # Don't warn about script install locations if
+ # --target has been specified
+ warn_script_location = options.warn_script_location
+ if options.target_dir:
+ warn_script_location = False
+
+ installed = install_given_reqs(
+ to_install,
+ install_options,
+ global_options,
+ root=options.root_path,
+ home=target_temp_dir_path,
+ prefix=options.prefix_path,
+ pycompile=options.compile,
+ warn_script_location=warn_script_location,
+ use_user_site=options.use_user_site,
+ )
+
+ lib_locations = get_lib_location_guesses(
+ user=options.use_user_site,
+ home=target_temp_dir_path,
+ root=options.root_path,
+ prefix=options.prefix_path,
+ isolated=options.isolated_mode,
+ )
+ working_set = pkg_resources.WorkingSet(lib_locations)
+
+ installed.sort(key=operator.attrgetter('name'))
+ items = []
+ for result in installed:
+ item = result.name
+ try:
+ installed_version = get_installed_version(
+ result.name, working_set=working_set
+ )
+ if installed_version:
+ item += '-' + installed_version
+ except Exception:
+ pass
+ items.append(item)
+ installed_desc = ' '.join(items)
+ if installed_desc:
+ write_output(
+ 'Successfully installed %s', installed_desc,
+ )
+ except EnvironmentError as error:
+ show_traceback = (self.verbosity >= 1)
+
+ message = create_env_error_message(
+ error, show_traceback, options.use_user_site,
+ )
+ logger.error(message, exc_info=show_traceback)
+
+ return ERROR
+ except PreviousBuildDirError:
+ options.no_clean = True
+ raise
+ finally:
+ # Clean up
+ if not options.no_clean:
+ requirement_set.cleanup_files()
+ wheel_cache.cleanup()
+
+ if options.target_dir:
+ self._handle_target_dir(
+ options.target_dir, target_temp_dir, options.upgrade
+ )
+
+ return SUCCESS
+
+ def _handle_target_dir(self, target_dir, target_temp_dir, upgrade):
+ ensure_dir(target_dir)
+
+ # Checking both purelib and platlib directories for installed
+ # packages to be moved to target directory
+ lib_dir_list = []
+
+ with target_temp_dir:
+ # Checking both purelib and platlib directories for installed
+ # packages to be moved to target directory
+ scheme = distutils_scheme('', home=target_temp_dir.path)
+ purelib_dir = scheme['purelib']
+ platlib_dir = scheme['platlib']
+ data_dir = scheme['data']
+
+ if os.path.exists(purelib_dir):
+ lib_dir_list.append(purelib_dir)
+ if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
+ lib_dir_list.append(platlib_dir)
+ if os.path.exists(data_dir):
+ lib_dir_list.append(data_dir)
+
+ for lib_dir in lib_dir_list:
+ for item in os.listdir(lib_dir):
+ if lib_dir == data_dir:
+ ddir = os.path.join(data_dir, item)
+ if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
+ continue
+ target_item_dir = os.path.join(target_dir, item)
+ if os.path.exists(target_item_dir):
+ if not upgrade:
+ logger.warning(
+ 'Target directory %s already exists. Specify '
+ '--upgrade to force replacement.',
+ target_item_dir
+ )
+ continue
+ if os.path.islink(target_item_dir):
+ logger.warning(
+ 'Target directory %s already exists and is '
+ 'a link. Pip will not automatically replace '
+ 'links, please remove if replacement is '
+ 'desired.',
+ target_item_dir
+ )
+ continue
+ if os.path.isdir(target_item_dir):
+ shutil.rmtree(target_item_dir)
+ else:
+ os.remove(target_item_dir)
+
+ shutil.move(
+ os.path.join(lib_dir, item),
+ target_item_dir
+ )
+
+ def _warn_about_conflicts(self, to_install):
+ try:
+ package_set, _dep_info = check_install_conflicts(to_install)
+ except Exception:
+ logger.error("Error checking for conflicts.", exc_info=True)
+ return
+ missing, conflicting = _dep_info
+
+ # NOTE: There is some duplication here from pip check
+ for project_name in missing:
+ version = package_set[project_name][0]
+ for dependency in missing[project_name]:
+ logger.critical(
+ "%s %s requires %s, which is not installed.",
+ project_name, version, dependency[1],
+ )
+
+ for project_name in conflicting:
+ version = package_set[project_name][0]
+ for dep_name, dep_version, req in conflicting[project_name]:
+ logger.critical(
+ "%s %s has requirement %s, but you'll have %s %s which is "
+ "incompatible.",
+ project_name, version, req, dep_name, dep_version,
+ )
+
+
+def get_lib_location_guesses(*args, **kwargs):
+ scheme = distutils_scheme('', *args, **kwargs)
+ return [scheme['purelib'], scheme['platlib']]
+
+
+def site_packages_writable(**kwargs):
+ return all(
+ test_writable_dir(d) for d in set(get_lib_location_guesses(**kwargs))
+ )
+
+
+def decide_user_install(
+ use_user_site, # type: Optional[bool]
+ prefix_path=None, # type: Optional[str]
+ target_dir=None, # type: Optional[str]
+ root_path=None, # type: Optional[str]
+ isolated_mode=False, # type: bool
+):
+ # type: (...) -> bool
+ """Determine whether to do a user install based on the input options.
+
+ If use_user_site is False, no additional checks are done.
+ If use_user_site is True, it is checked for compatibility with other
+ options.
+ If use_user_site is None, the default behaviour depends on the environment,
+ which is provided by the other arguments.
+ """
+ # In some cases (config from tox), use_user_site can be set to an integer
+ # rather than a bool, which 'use_user_site is False' wouldn't catch.
+ if (use_user_site is not None) and (not use_user_site):
+ logger.debug("Non-user install by explicit request")
+ return False
+
+ if use_user_site:
+ if prefix_path:
+ raise CommandError(
+ "Can not combine '--user' and '--prefix' as they imply "
+ "different installation locations"
+ )
+ if virtualenv_no_global():
+ raise InstallationError(
+ "Can not perform a '--user' install. User site-packages "
+ "are not visible in this virtualenv."
+ )
+ logger.debug("User install by explicit request")
+ return True
+
+ # If we are here, user installs have not been explicitly requested/avoided
+ assert use_user_site is None
+
+ # user install incompatible with --prefix/--target
+ if prefix_path or target_dir:
+ logger.debug("Non-user install due to --prefix or --target option")
+ return False
+
+ # If user installs are not enabled, choose a non-user install
+ if not site.ENABLE_USER_SITE:
+ logger.debug("Non-user install because user site-packages disabled")
+ return False
+
+ # If we have permission for a non-user install, do that,
+ # otherwise do a user install.
+ if site_packages_writable(root=root_path, isolated=isolated_mode):
+ logger.debug("Non-user install because site-packages writeable")
+ return False
+
+ logger.info("Defaulting to user installation because normal site-packages "
+ "is not writeable")
+ return True
+
+
+def warn_deprecated_install_options(requirement_set, options):
+ # type: (RequirementSet, Optional[List[str]]) -> None
+ """If any location-changing --install-option arguments were passed for
+ requirements or on the command-line, then show a deprecation warning.
+ """
+ def format_options(option_names):
+ # type: (Iterable[str]) -> List[str]
+ return ["--{}".format(name.replace("_", "-")) for name in option_names]
+
+ requirements = (
+ requirement_set.unnamed_requirements +
+ list(requirement_set.requirements.values())
+ )
+
+ offenders = []
+
+ for requirement in requirements:
+ install_options = requirement.options.get("install_options", [])
+ location_options = parse_distutils_args(install_options)
+ if location_options:
+ offenders.append(
+ "{!r} from {}".format(
+ format_options(location_options.keys()), requirement
+ )
+ )
+
+ if options:
+ location_options = parse_distutils_args(options)
+ if location_options:
+ offenders.append(
+ "{!r} from command line".format(
+ format_options(location_options.keys())
+ )
+ )
+
+ if not offenders:
+ return
+
+ deprecated(
+ reason=(
+ "Location-changing options found in --install-option: {}. "
+ "This configuration may cause unexpected behavior and is "
+ "unsupported.".format(
+ "; ".join(offenders)
+ )
+ ),
+ replacement=(
+ "using pip-level options like --user, --prefix, --root, and "
+ "--target"
+ ),
+ gone_in="20.2",
+ issue=7309,
+ )
+
+
+def create_env_error_message(error, show_traceback, using_user_site):
+ """Format an error message for an EnvironmentError
+
+ It may occur anytime during the execution of the install command.
+ """
+ parts = []
+
+ # Mention the error if we are not going to show a traceback
+ parts.append("Could not install packages due to an EnvironmentError")
+ if not show_traceback:
+ parts.append(": ")
+ parts.append(str(error))
+ else:
+ parts.append(".")
+
+ # Spilt the error indication from a helper message (if any)
+ parts[-1] += "\n"
+
+ # Suggest useful actions to the user:
+ # (1) using user site-packages or (2) verifying the permissions
+ if error.errno == errno.EACCES:
+ user_option_part = "Consider using the `--user` option"
+ permissions_part = "Check the permissions"
+
+ if not using_user_site:
+ parts.extend([
+ user_option_part, " or ",
+ permissions_part.lower(),
+ ])
+ else:
+ parts.append(permissions_part)
+ parts.append(".\n")
+
+ return "".join(parts).strip() + "\n"
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/list.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/list.py
new file mode 100644
index 0000000000000000000000000000000000000000..d0062063e7be3e3fa1f09452e793eceafc1b2343
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/list.py
@@ -0,0 +1,315 @@
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import json
+import logging
+
+from pip._vendor import six
+from pip._vendor.six.moves import zip_longest
+
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.req_command import IndexGroupCommand
+from pip._internal.exceptions import CommandError
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.self_outdated_check import make_link_collector
+from pip._internal.utils.misc import (
+ dist_is_editable,
+ get_installed_distributions,
+ write_output,
+)
+from pip._internal.utils.packaging import get_installer
+
+from pip._vendor.packaging.version import parse
+
+logger = logging.getLogger(__name__)
+
+
+class ListCommand(IndexGroupCommand):
+ """
+ List installed packages, including editables.
+
+ Packages are listed in a case-insensitive sorted order.
+ """
+
+ usage = """
+ %prog [options]"""
+
+ def __init__(self, *args, **kw):
+ super(ListCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+
+ cmd_opts.add_option(
+ '-o', '--outdated',
+ action='store_true',
+ default=False,
+ help='List outdated packages')
+ cmd_opts.add_option(
+ '-u', '--uptodate',
+ action='store_true',
+ default=False,
+ help='List uptodate packages')
+ cmd_opts.add_option(
+ '-e', '--editable',
+ action='store_true',
+ default=False,
+ help='List editable projects.')
+ cmd_opts.add_option(
+ '-l', '--local',
+ action='store_true',
+ default=False,
+ help=('If in a virtualenv that has global access, do not list '
+ 'globally-installed packages.'),
+ )
+ self.cmd_opts.add_option(
+ '--user',
+ dest='user',
+ action='store_true',
+ default=False,
+ help='Only output packages installed in user-site.')
+ cmd_opts.add_option(cmdoptions.list_path())
+ cmd_opts.add_option(
+ '--pre',
+ action='store_true',
+ default=False,
+ help=("Include pre-release and development versions. By default, "
+ "pip only finds stable versions."),
+ )
+
+ cmd_opts.add_option(
+ '--format',
+ action='store',
+ dest='list_format',
+ default="columns",
+ choices=('columns', 'freeze', 'json'),
+ help="Select the output format among: columns (default), freeze, "
+ "or json",
+ )
+
+ cmd_opts.add_option(
+ '--not-required',
+ action='store_true',
+ dest='not_required',
+ help="List packages that are not dependencies of "
+ "installed packages.",
+ )
+
+ cmd_opts.add_option(
+ '--exclude-editable',
+ action='store_false',
+ dest='include_editable',
+ help='Exclude editable package from output.',
+ )
+ cmd_opts.add_option(
+ '--include-editable',
+ action='store_true',
+ dest='include_editable',
+ help='Include editable package from output.',
+ default=True,
+ )
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group, self.parser
+ )
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def _build_package_finder(self, options, session):
+ """
+ Create a package finder appropriate to this list command.
+ """
+ link_collector = make_link_collector(session, options=options)
+
+ # Pass allow_yanked=False to ignore yanked versions.
+ selection_prefs = SelectionPreferences(
+ allow_yanked=False,
+ allow_all_prereleases=options.pre,
+ )
+
+ return PackageFinder.create(
+ link_collector=link_collector,
+ selection_prefs=selection_prefs,
+ )
+
+ def run(self, options, args):
+ if options.outdated and options.uptodate:
+ raise CommandError(
+ "Options --outdated and --uptodate cannot be combined.")
+
+ cmdoptions.check_list_path_option(options)
+
+ packages = get_installed_distributions(
+ local_only=options.local,
+ user_only=options.user,
+ editables_only=options.editable,
+ include_editables=options.include_editable,
+ paths=options.path,
+ )
+
+ # get_not_required must be called firstly in order to find and
+ # filter out all dependencies correctly. Otherwise a package
+ # can't be identified as requirement because some parent packages
+ # could be filtered out before.
+ if options.not_required:
+ packages = self.get_not_required(packages, options)
+
+ if options.outdated:
+ packages = self.get_outdated(packages, options)
+ elif options.uptodate:
+ packages = self.get_uptodate(packages, options)
+
+ self.output_package_listing(packages, options)
+
+ def get_outdated(self, packages, options):
+ return [
+ dist for dist in self.iter_packages_latest_infos(packages, options)
+ if parse(str(dist.latest_version)) > parse(str(dist.parsed_version))
+ ]
+
+ def get_uptodate(self, packages, options):
+ return [
+ dist for dist in self.iter_packages_latest_infos(packages, options)
+ if parse(str(dist.latest_version)) == parse(str(dist.parsed_version))
+ ]
+
+ def get_not_required(self, packages, options):
+ dep_keys = set()
+ for dist in packages:
+ dep_keys.update(requirement.key for requirement in dist.requires())
+ return {pkg for pkg in packages if pkg.key not in dep_keys}
+
+ def iter_packages_latest_infos(self, packages, options):
+ with self._build_session(options) as session:
+ finder = self._build_package_finder(options, session)
+
+ for dist in packages:
+ typ = 'unknown'
+ all_candidates = finder.find_all_candidates(dist.key)
+ if not options.pre:
+ # Remove prereleases
+ all_candidates = [candidate for candidate in all_candidates
+ if not candidate.version.is_prerelease]
+
+ evaluator = finder.make_candidate_evaluator(
+ project_name=dist.project_name,
+ )
+ best_candidate = evaluator.sort_best_candidate(all_candidates)
+ if best_candidate is None:
+ continue
+
+ remote_version = best_candidate.version
+ if best_candidate.link.is_wheel:
+ typ = 'wheel'
+ else:
+ typ = 'sdist'
+ # This is dirty but makes the rest of the code much cleaner
+ dist.latest_version = remote_version
+ dist.latest_filetype = typ
+ yield dist
+
+ def output_package_listing(self, packages, options):
+ packages = sorted(
+ packages,
+ key=lambda dist: dist.project_name.lower(),
+ )
+ if options.list_format == 'columns' and packages:
+ data, header = format_for_columns(packages, options)
+ self.output_package_listing_columns(data, header)
+ elif options.list_format == 'freeze':
+ for dist in packages:
+ if options.verbose >= 1:
+ write_output("%s==%s (%s)", dist.project_name,
+ dist.version, dist.location)
+ else:
+ write_output("%s==%s", dist.project_name, dist.version)
+ elif options.list_format == 'json':
+ write_output(format_for_json(packages, options))
+
+ def output_package_listing_columns(self, data, header):
+ # insert the header first: we need to know the size of column names
+ if len(data) > 0:
+ data.insert(0, header)
+
+ pkg_strings, sizes = tabulate(data)
+
+ # Create and add a separator.
+ if len(data) > 0:
+ pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))
+
+ for val in pkg_strings:
+ write_output(val)
+
+
+def tabulate(vals):
+ # From pfmoore on GitHub:
+ # https://github.com/pypa/pip/issues/3651#issuecomment-216932564
+ assert len(vals) > 0
+
+ sizes = [0] * max(len(x) for x in vals)
+ for row in vals:
+ sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)]
+
+ result = []
+ for row in vals:
+ display = " ".join([str(c).ljust(s) if c is not None else ''
+ for s, c in zip_longest(sizes, row)])
+ result.append(display)
+
+ return result, sizes
+
+
+def format_for_columns(pkgs, options):
+ """
+ Convert the package data into something usable
+ by output_package_listing_columns.
+ """
+ running_outdated = options.outdated
+ # Adjust the header for the `pip list --outdated` case.
+ if running_outdated:
+ header = ["Package", "Version", "Latest", "Type"]
+ else:
+ header = ["Package", "Version"]
+
+ data = []
+ if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs):
+ header.append("Location")
+ if options.verbose >= 1:
+ header.append("Installer")
+
+ for proj in pkgs:
+ # if we're working on the 'outdated' list, separate out the
+ # latest_version and type
+ row = [proj.project_name, proj.version]
+
+ if running_outdated:
+ row.append(proj.latest_version)
+ row.append(proj.latest_filetype)
+
+ if options.verbose >= 1 or dist_is_editable(proj):
+ row.append(proj.location)
+ if options.verbose >= 1:
+ row.append(get_installer(proj))
+
+ data.append(row)
+
+ return data, header
+
+
+def format_for_json(packages, options):
+ data = []
+ for dist in packages:
+ info = {
+ 'name': dist.project_name,
+ 'version': six.text_type(dist.version),
+ }
+ if options.verbose >= 1:
+ info['location'] = dist.location
+ info['installer'] = get_installer(dist)
+ if options.outdated:
+ info['latest_version'] = six.text_type(dist.latest_version)
+ info['latest_filetype'] = dist.latest_filetype
+ data.append(info)
+ return json.dumps(data)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/search.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/search.py
new file mode 100644
index 0000000000000000000000000000000000000000..2e880eec2242a0aec7a2a6b53cde16fa106fd683
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/search.py
@@ -0,0 +1,145 @@
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import logging
+import sys
+import textwrap
+from collections import OrderedDict
+
+from pip._vendor import pkg_resources
+from pip._vendor.packaging.version import parse as parse_version
+# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
+# why we ignore the type on this import
+from pip._vendor.six.moves import xmlrpc_client # type: ignore
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.req_command import SessionCommandMixin
+from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
+from pip._internal.exceptions import CommandError
+from pip._internal.models.index import PyPI
+from pip._internal.network.xmlrpc import PipXmlrpcTransport
+from pip._internal.utils.compat import get_terminal_size
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import write_output
+
+logger = logging.getLogger(__name__)
+
+
+class SearchCommand(Command, SessionCommandMixin):
+ """Search for PyPI packages whose name or summary contains <query>."""
+
+ usage = """
+ %prog [options] <query>"""
+ ignore_require_venv = True
+
+ def __init__(self, *args, **kw):
+ super(SearchCommand, self).__init__(*args, **kw)
+ self.cmd_opts.add_option(
+ '-i', '--index',
+ dest='index',
+ metavar='URL',
+ default=PyPI.pypi_url,
+ help='Base URL of Python Package Index (default %default)')
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ if not args:
+ raise CommandError('Missing required argument (search query).')
+ query = args
+ pypi_hits = self.search(query, options)
+ hits = transform_hits(pypi_hits)
+
+ terminal_width = None
+ if sys.stdout.isatty():
+ terminal_width = get_terminal_size()[0]
+
+ print_results(hits, terminal_width=terminal_width)
+ if pypi_hits:
+ return SUCCESS
+ return NO_MATCHES_FOUND
+
+ def search(self, query, options):
+ index_url = options.index
+
+ session = self.get_default_session(options)
+
+ transport = PipXmlrpcTransport(index_url, session)
+ pypi = xmlrpc_client.ServerProxy(index_url, transport)
+ hits = pypi.search({'name': query, 'summary': query}, 'or')
+ return hits
+
+
+def transform_hits(hits):
+ """
+ The list from pypi is really a list of versions. We want a list of
+ packages with the list of versions stored inline. This converts the
+ list from pypi into one we can use.
+ """
+ packages = OrderedDict()
+ for hit in hits:
+ name = hit['name']
+ summary = hit['summary']
+ version = hit['version']
+
+ if name not in packages.keys():
+ packages[name] = {
+ 'name': name,
+ 'summary': summary,
+ 'versions': [version],
+ }
+ else:
+ packages[name]['versions'].append(version)
+
+ # if this is the highest version, replace summary and score
+ if version == highest_version(packages[name]['versions']):
+ packages[name]['summary'] = summary
+
+ return list(packages.values())
+
+
+def print_results(hits, name_column_width=None, terminal_width=None):
+ if not hits:
+ return
+ if name_column_width is None:
+ name_column_width = max([
+ len(hit['name']) + len(highest_version(hit.get('versions', ['-'])))
+ for hit in hits
+ ]) + 4
+
+ installed_packages = [p.project_name for p in pkg_resources.working_set]
+ for hit in hits:
+ name = hit['name']
+ summary = hit['summary'] or ''
+ latest = highest_version(hit.get('versions', ['-']))
+ if terminal_width is not None:
+ target_width = terminal_width - name_column_width - 5
+ if target_width > 10:
+ # wrap and indent summary to fit terminal
+ summary = textwrap.wrap(summary, target_width)
+ summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
+
+ line = '%-*s - %s' % (name_column_width,
+ '%s (%s)' % (name, latest), summary)
+ try:
+ write_output(line)
+ if name in installed_packages:
+ dist = pkg_resources.get_distribution(name)
+ with indent_log():
+ if dist.version == latest:
+ write_output('INSTALLED: %s (latest)', dist.version)
+ else:
+ write_output('INSTALLED: %s', dist.version)
+ if parse_version(latest).pre:
+ write_output('LATEST: %s (pre-release; install'
+ ' with "pip install --pre")', latest)
+ else:
+ write_output('LATEST: %s', latest)
+ except UnicodeEncodeError:
+ pass
+
+
+def highest_version(versions):
+ return max(versions, key=parse_version)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/show.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/show.py
new file mode 100644
index 0000000000000000000000000000000000000000..a46b08eeb3d22852421b2b6c8b1b15ce12aaa9a3
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/show.py
@@ -0,0 +1,180 @@
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import logging
+import os
+from email.parser import FeedParser
+
+from pip._vendor import pkg_resources
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.utils.misc import write_output
+
+logger = logging.getLogger(__name__)
+
+
+class ShowCommand(Command):
+ """
+ Show information about one or more installed packages.
+
+ The output is in RFC-compliant mail header format.
+ """
+
+ usage = """
+ %prog [options] <package> ..."""
+ ignore_require_venv = True
+
+ def __init__(self, *args, **kw):
+ super(ShowCommand, self).__init__(*args, **kw)
+ self.cmd_opts.add_option(
+ '-f', '--files',
+ dest='files',
+ action='store_true',
+ default=False,
+ help='Show the full list of installed files for each package.')
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ if not args:
+ logger.warning('ERROR: Please provide a package name or names.')
+ return ERROR
+ query = args
+
+ results = search_packages_info(query)
+ if not print_results(
+ results, list_files=options.files, verbose=options.verbose):
+ return ERROR
+ return SUCCESS
+
+
+def search_packages_info(query):
+ """
+ Gather details from installed distributions. Print distribution name,
+ version, location, and installed files. Installed files requires a
+ pip generated 'installed-files.txt' in the distributions '.egg-info'
+ directory.
+ """
+ installed = {}
+ for p in pkg_resources.working_set:
+ installed[canonicalize_name(p.project_name)] = p
+
+ query_names = [canonicalize_name(name) for name in query]
+ missing = sorted(
+ [name for name, pkg in zip(query, query_names) if pkg not in installed]
+ )
+ if missing:
+ logger.warning('Package(s) not found: %s', ', '.join(missing))
+
+ def get_requiring_packages(package_name):
+ canonical_name = canonicalize_name(package_name)
+ return [
+ pkg.project_name for pkg in pkg_resources.working_set
+ if canonical_name in
+ [canonicalize_name(required.name) for required in
+ pkg.requires()]
+ ]
+
+ for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
+ package = {
+ 'name': dist.project_name,
+ 'version': dist.version,
+ 'location': dist.location,
+ 'requires': [dep.project_name for dep in dist.requires()],
+ 'required_by': get_requiring_packages(dist.project_name)
+ }
+ file_list = None
+ metadata = None
+ if isinstance(dist, pkg_resources.DistInfoDistribution):
+ # RECORDs should be part of .dist-info metadatas
+ if dist.has_metadata('RECORD'):
+ lines = dist.get_metadata_lines('RECORD')
+ paths = [l.split(',')[0] for l in lines]
+ paths = [os.path.join(dist.location, p) for p in paths]
+ file_list = [os.path.relpath(p, dist.location) for p in paths]
+
+ if dist.has_metadata('METADATA'):
+ metadata = dist.get_metadata('METADATA')
+ else:
+ # Otherwise use pip's log for .egg-info's
+ if dist.has_metadata('installed-files.txt'):
+ paths = dist.get_metadata_lines('installed-files.txt')
+ paths = [os.path.join(dist.egg_info, p) for p in paths]
+ file_list = [os.path.relpath(p, dist.location) for p in paths]
+
+ if dist.has_metadata('PKG-INFO'):
+ metadata = dist.get_metadata('PKG-INFO')
+
+ if dist.has_metadata('entry_points.txt'):
+ entry_points = dist.get_metadata_lines('entry_points.txt')
+ package['entry_points'] = entry_points
+
+ if dist.has_metadata('INSTALLER'):
+ for line in dist.get_metadata_lines('INSTALLER'):
+ if line.strip():
+ package['installer'] = line.strip()
+ break
+
+ # @todo: Should pkg_resources.Distribution have a
+ # `get_pkg_info` method?
+ feed_parser = FeedParser()
+ feed_parser.feed(metadata)
+ pkg_info_dict = feed_parser.close()
+ for key in ('metadata-version', 'summary',
+ 'home-page', 'author', 'author-email', 'license'):
+ package[key] = pkg_info_dict.get(key)
+
+ # It looks like FeedParser cannot deal with repeated headers
+ classifiers = []
+ for line in metadata.splitlines():
+ if line.startswith('Classifier: '):
+ classifiers.append(line[len('Classifier: '):])
+ package['classifiers'] = classifiers
+
+ if file_list:
+ package['files'] = sorted(file_list)
+ yield package
+
+
+def print_results(distributions, list_files=False, verbose=False):
+ """
+ Print the informations from installed distributions found.
+ """
+ results_printed = False
+ for i, dist in enumerate(distributions):
+ results_printed = True
+ if i > 0:
+ write_output("---")
+
+ write_output("Name: %s", dist.get('name', ''))
+ write_output("Version: %s", dist.get('version', ''))
+ write_output("Summary: %s", dist.get('summary', ''))
+ write_output("Home-page: %s", dist.get('home-page', ''))
+ write_output("Author: %s", dist.get('author', ''))
+ write_output("Author-email: %s", dist.get('author-email', ''))
+ write_output("License: %s", dist.get('license', ''))
+ write_output("Location: %s", dist.get('location', ''))
+ write_output("Requires: %s", ', '.join(dist.get('requires', [])))
+ write_output("Required-by: %s", ', '.join(dist.get('required_by', [])))
+
+ if verbose:
+ write_output("Metadata-Version: %s",
+ dist.get('metadata-version', ''))
+ write_output("Installer: %s", dist.get('installer', ''))
+ write_output("Classifiers:")
+ for classifier in dist.get('classifiers', []):
+ write_output(" %s", classifier)
+ write_output("Entry-points:")
+ for entry in dist.get('entry_points', []):
+ write_output(" %s", entry.strip())
+ if list_files:
+ write_output("Files:")
+ for line in dist.get('files', []):
+ write_output(" %s", line.strip())
+ if "files" not in dist:
+ write_output("Cannot locate installed-files.txt")
+ return results_printed
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/uninstall.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/uninstall.py
new file mode 100644
index 0000000000000000000000000000000000000000..1bde414a6c1a5f4b9ef5b990f22b334a9b0a71b6
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/uninstall.py
@@ -0,0 +1,82 @@
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.cli.base_command import Command
+from pip._internal.cli.req_command import SessionCommandMixin
+from pip._internal.exceptions import InstallationError
+from pip._internal.req import parse_requirements
+from pip._internal.req.constructors import install_req_from_line
+from pip._internal.utils.misc import protect_pip_from_modification_on_windows
+
+
+class UninstallCommand(Command, SessionCommandMixin):
+ """
+ Uninstall packages.
+
+ pip is able to uninstall most installed packages. Known exceptions are:
+
+ - Pure distutils packages installed with ``python setup.py install``, which
+ leave behind no metadata to determine what files were installed.
+ - Script wrappers installed by ``python setup.py develop``.
+ """
+
+ usage = """
+ %prog [options] <package> ...
+ %prog [options] -r <requirements file> ..."""
+
+ def __init__(self, *args, **kw):
+ super(UninstallCommand, self).__init__(*args, **kw)
+ self.cmd_opts.add_option(
+ '-r', '--requirement',
+ dest='requirements',
+ action='append',
+ default=[],
+ metavar='file',
+ help='Uninstall all the packages listed in the given requirements '
+ 'file. This option can be used multiple times.',
+ )
+ self.cmd_opts.add_option(
+ '-y', '--yes',
+ dest='yes',
+ action='store_true',
+ help="Don't ask for confirmation of uninstall deletions.")
+
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ def run(self, options, args):
+ session = self.get_default_session(options)
+
+ reqs_to_uninstall = {}
+ for name in args:
+ req = install_req_from_line(
+ name, isolated=options.isolated_mode,
+ )
+ if req.name:
+ reqs_to_uninstall[canonicalize_name(req.name)] = req
+ for filename in options.requirements:
+ for req in parse_requirements(
+ filename,
+ options=options,
+ session=session):
+ if req.name:
+ reqs_to_uninstall[canonicalize_name(req.name)] = req
+ if not reqs_to_uninstall:
+ raise InstallationError(
+ 'You must give at least one requirement to %(name)s (see '
+ '"pip help %(name)s")' % dict(name=self.name)
+ )
+
+ protect_pip_from_modification_on_windows(
+ modifying_pip="pip" in reqs_to_uninstall
+ )
+
+ for req in reqs_to_uninstall.values():
+ uninstall_pathset = req.uninstall(
+ auto_confirm=options.yes, verbose=self.verbosity > 0,
+ )
+ if uninstall_pathset:
+ uninstall_pathset.commit()
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/wheel.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/wheel.py
new file mode 100644
index 0000000000000000000000000000000000000000..eb44bcee45930d10654d5bdc6af5658aef01bc41
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/commands/wheel.py
@@ -0,0 +1,197 @@
+# -*- coding: utf-8 -*-
+
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import logging
+import os
+import shutil
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.req_command import RequirementCommand
+from pip._internal.exceptions import CommandError, PreviousBuildDirError
+from pip._internal.req import RequirementSet
+from pip._internal.req.req_tracker import get_requirement_tracker
+from pip._internal.utils.misc import ensure_dir, normalize_path
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.wheel_builder import build, should_build_for_wheel_command
+
+if MYPY_CHECK_RUNNING:
+ from optparse import Values
+ from typing import Any, List
+
+
+logger = logging.getLogger(__name__)
+
+
+class WheelCommand(RequirementCommand):
+ """
+ Build Wheel archives for your requirements and dependencies.
+
+ Wheel is a built-package format, and offers the advantage of not
+ recompiling your software during every install. For more details, see the
+ wheel docs: https://wheel.readthedocs.io/en/latest/
+
+ Requirements: setuptools>=0.8, and wheel.
+
+ 'pip wheel' uses the bdist_wheel setuptools extension from the wheel
+ package to build individual wheels.
+
+ """
+
+ usage = """
+ %prog [options] <requirement specifier> ...
+ %prog [options] -r <requirements file> ...
+ %prog [options] [-e] <vcs project url> ...
+ %prog [options] [-e] <local project path> ...
+ %prog [options] <archive url/path> ..."""
+
+ def __init__(self, *args, **kw):
+ super(WheelCommand, self).__init__(*args, **kw)
+
+ cmd_opts = self.cmd_opts
+
+ cmd_opts.add_option(
+ '-w', '--wheel-dir',
+ dest='wheel_dir',
+ metavar='dir',
+ default=os.curdir,
+ help=("Build wheels into <dir>, where the default is the "
+ "current working directory."),
+ )
+ cmd_opts.add_option(cmdoptions.no_binary())
+ cmd_opts.add_option(cmdoptions.only_binary())
+ cmd_opts.add_option(cmdoptions.prefer_binary())
+ cmd_opts.add_option(
+ '--build-option',
+ dest='build_options',
+ metavar='options',
+ action='append',
+ help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
+ )
+ cmd_opts.add_option(cmdoptions.no_build_isolation())
+ cmd_opts.add_option(cmdoptions.use_pep517())
+ cmd_opts.add_option(cmdoptions.no_use_pep517())
+ cmd_opts.add_option(cmdoptions.constraints())
+ cmd_opts.add_option(cmdoptions.editable())
+ cmd_opts.add_option(cmdoptions.requirements())
+ cmd_opts.add_option(cmdoptions.src())
+ cmd_opts.add_option(cmdoptions.ignore_requires_python())
+ cmd_opts.add_option(cmdoptions.no_deps())
+ cmd_opts.add_option(cmdoptions.build_dir())
+ cmd_opts.add_option(cmdoptions.progress_bar())
+
+ cmd_opts.add_option(
+ '--global-option',
+ dest='global_options',
+ action='append',
+ metavar='options',
+ help="Extra global options to be supplied to the setup.py "
+ "call before the 'bdist_wheel' command.")
+
+ cmd_opts.add_option(
+ '--pre',
+ action='store_true',
+ default=False,
+ help=("Include pre-release and development versions. By default, "
+ "pip only finds stable versions."),
+ )
+
+ cmd_opts.add_option(cmdoptions.no_clean())
+ cmd_opts.add_option(cmdoptions.require_hashes())
+
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group,
+ self.parser,
+ )
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, cmd_opts)
+
+ def run(self, options, args):
+ # type: (Values, List[Any]) -> None
+ cmdoptions.check_install_build_global(options)
+
+ session = self.get_default_session(options)
+
+ finder = self._build_package_finder(options, session)
+ build_delete = (not (options.no_clean or options.build_dir))
+ wheel_cache = WheelCache(options.cache_dir, options.format_control)
+
+ options.wheel_dir = normalize_path(options.wheel_dir)
+ ensure_dir(options.wheel_dir)
+
+ with get_requirement_tracker() as req_tracker, TempDirectory(
+ options.build_dir, delete=build_delete, kind="wheel"
+ ) as directory:
+
+ requirement_set = RequirementSet()
+
+ try:
+ self.populate_requirement_set(
+ requirement_set, args, options, finder, session,
+ wheel_cache
+ )
+
+ preparer = self.make_requirement_preparer(
+ temp_build_dir=directory,
+ options=options,
+ req_tracker=req_tracker,
+ session=session,
+ finder=finder,
+ wheel_download_dir=options.wheel_dir,
+ use_user_site=False,
+ )
+
+ resolver = self.make_resolver(
+ preparer=preparer,
+ finder=finder,
+ options=options,
+ wheel_cache=wheel_cache,
+ ignore_requires_python=options.ignore_requires_python,
+ use_pep517=options.use_pep517,
+ )
+
+ self.trace_basic_info(finder)
+
+ resolver.resolve(requirement_set)
+
+ reqs_to_build = [
+ r for r in requirement_set.requirements.values()
+ if should_build_for_wheel_command(r)
+ ]
+
+ # build wheels
+ build_successes, build_failures = build(
+ reqs_to_build,
+ wheel_cache=wheel_cache,
+ build_options=options.build_options or [],
+ global_options=options.global_options or [],
+ )
+ for req in build_successes:
+ assert req.link and req.link.is_wheel
+ assert req.local_file_path
+ # copy from cache to target directory
+ try:
+ shutil.copy(req.local_file_path, options.wheel_dir)
+ except OSError as e:
+ logger.warning(
+ "Building wheel for %s failed: %s",
+ req.name, e,
+ )
+ build_failures.append(req)
+ if len(build_failures) != 0:
+ raise CommandError(
+ "Failed to build one or more wheels"
+ )
+ except PreviousBuildDirError:
+ options.no_clean = True
+ raise
+ finally:
+ if not options.no_clean:
+ requirement_set.cleanup_files()
+ wheel_cache.cleanup()
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/configuration.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/configuration.py
new file mode 100644
index 0000000000000000000000000000000000000000..f09a1ae25c2b58ad5d15040efc4cbd99658e54b6
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/configuration.py
@@ -0,0 +1,422 @@
+"""Configuration management setup
+
+Some terminology:
+- name
+ As written in config files.
+- value
+ Value associated with a name
+- key
+ Name combined with it's section (section.name)
+- variant
+ A single word describing where the configuration key-value pair came from
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+import locale
+import logging
+import os
+import sys
+
+from pip._vendor.six.moves import configparser
+
+from pip._internal.exceptions import (
+ ConfigurationError,
+ ConfigurationFileCouldNotBeLoaded,
+)
+from pip._internal.utils import appdirs
+from pip._internal.utils.compat import WINDOWS, expanduser
+from pip._internal.utils.misc import ensure_dir, enum
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, Dict, Iterable, List, NewType, Optional, Tuple
+ )
+
+ RawConfigParser = configparser.RawConfigParser # Shorthand
+ Kind = NewType("Kind", str)
+
+logger = logging.getLogger(__name__)
+
+
+# NOTE: Maybe use the optionx attribute to normalize keynames.
+def _normalize_name(name):
+ # type: (str) -> str
+ """Make a name consistent regardless of source (environment or file)
+ """
+ name = name.lower().replace('_', '-')
+ if name.startswith('--'):
+ name = name[2:] # only prefer long opts
+ return name
+
+
+def _disassemble_key(name):
+ # type: (str) -> List[str]
+ if "." not in name:
+ error_message = (
+ "Key does not contain dot separated section and key. "
+ "Perhaps you wanted to use 'global.{}' instead?"
+ ).format(name)
+ raise ConfigurationError(error_message)
+ return name.split(".", 1)
+
+
+# The kinds of configurations there are.
+kinds = enum(
+ USER="user", # User Specific
+ GLOBAL="global", # System Wide
+ SITE="site", # [Virtual] Environment Specific
+ ENV="env", # from PIP_CONFIG_FILE
+ ENV_VAR="env-var", # from Environment Variables
+)
+
+
+CONFIG_BASENAME = 'pip.ini' if WINDOWS else 'pip.conf'
+
+
+def get_configuration_files():
+ # type: () -> Dict[Kind, List[str]]
+ global_config_files = [
+ os.path.join(path, CONFIG_BASENAME)
+ for path in appdirs.site_config_dirs('pip')
+ ]
+
+ site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
+ legacy_config_file = os.path.join(
+ expanduser('~'),
+ 'pip' if WINDOWS else '.pip',
+ CONFIG_BASENAME,
+ )
+ new_config_file = os.path.join(
+ appdirs.user_config_dir("pip"), CONFIG_BASENAME
+ )
+ return {
+ kinds.GLOBAL: global_config_files,
+ kinds.SITE: [site_config_file],
+ kinds.USER: [legacy_config_file, new_config_file],
+ }
+
+
+class Configuration(object):
+ """Handles management of configuration.
+
+ Provides an interface to accessing and managing configuration files.
+
+ This class converts provides an API that takes "section.key-name" style
+ keys and stores the value associated with it as "key-name" under the
+ section "section".
+
+ This allows for a clean interface wherein the both the section and the
+ key-name are preserved in an easy to manage form in the configuration files
+ and the data stored is also nice.
+ """
+
+ def __init__(self, isolated, load_only=None):
+ # type: (bool, Kind) -> None
+ super(Configuration, self).__init__()
+
+ _valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.SITE, None]
+ if load_only not in _valid_load_only:
+ raise ConfigurationError(
+ "Got invalid value for load_only - should be one of {}".format(
+ ", ".join(map(repr, _valid_load_only[:-1]))
+ )
+ )
+ self.isolated = isolated # type: bool
+ self.load_only = load_only # type: Optional[Kind]
+
+ # The order here determines the override order.
+ self._override_order = [
+ kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
+ ]
+
+ self._ignore_env_names = ["version", "help"]
+
+ # Because we keep track of where we got the data from
+ self._parsers = {
+ variant: [] for variant in self._override_order
+ } # type: Dict[Kind, List[Tuple[str, RawConfigParser]]]
+ self._config = {
+ variant: {} for variant in self._override_order
+ } # type: Dict[Kind, Dict[str, Any]]
+ self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]]
+
+ def load(self):
+ # type: () -> None
+ """Loads configuration from configuration files and environment
+ """
+ self._load_config_files()
+ if not self.isolated:
+ self._load_environment_vars()
+
+ def get_file_to_edit(self):
+ # type: () -> Optional[str]
+ """Returns the file with highest priority in configuration
+ """
+ assert self.load_only is not None, \
+ "Need to be specified a file to be editing"
+
+ try:
+ return self._get_parser_to_modify()[0]
+ except IndexError:
+ return None
+
+ def items(self):
+ # type: () -> Iterable[Tuple[str, Any]]
+ """Returns key-value pairs like dict.items() representing the loaded
+ configuration
+ """
+ return self._dictionary.items()
+
+ def get_value(self, key):
+ # type: (str) -> Any
+ """Get a value from the configuration.
+ """
+ try:
+ return self._dictionary[key]
+ except KeyError:
+ raise ConfigurationError("No such key - {}".format(key))
+
+ def set_value(self, key, value):
+ # type: (str, Any) -> None
+ """Modify a value in the configuration.
+ """
+ self._ensure_have_load_only()
+
+ fname, parser = self._get_parser_to_modify()
+
+ if parser is not None:
+ section, name = _disassemble_key(key)
+
+ # Modify the parser and the configuration
+ if not parser.has_section(section):
+ parser.add_section(section)
+ parser.set(section, name, value)
+
+ self._config[self.load_only][key] = value
+ self._mark_as_modified(fname, parser)
+
+ def unset_value(self, key):
+ # type: (str) -> None
+ """Unset a value in the configuration.
+ """
+ self._ensure_have_load_only()
+
+ if key not in self._config[self.load_only]:
+ raise ConfigurationError("No such key - {}".format(key))
+
+ fname, parser = self._get_parser_to_modify()
+
+ if parser is not None:
+ section, name = _disassemble_key(key)
+
+ # Remove the key in the parser
+ modified_something = False
+ if parser.has_section(section):
+ # Returns whether the option was removed or not
+ modified_something = parser.remove_option(section, name)
+
+ if modified_something:
+ # name removed from parser, section may now be empty
+ section_iter = iter(parser.items(section))
+ try:
+ val = next(section_iter)
+ except StopIteration:
+ val = None
+
+ if val is None:
+ parser.remove_section(section)
+
+ self._mark_as_modified(fname, parser)
+ else:
+ raise ConfigurationError(
+ "Fatal Internal error [id=1]. Please report as a bug."
+ )
+
+ del self._config[self.load_only][key]
+
+ def save(self):
+ # type: () -> None
+ """Save the current in-memory state.
+ """
+ self._ensure_have_load_only()
+
+ for fname, parser in self._modified_parsers:
+ logger.info("Writing to %s", fname)
+
+ # Ensure directory exists.
+ ensure_dir(os.path.dirname(fname))
+
+ with open(fname, "w") as f:
+ parser.write(f)
+
+ #
+ # Private routines
+ #
+
+ def _ensure_have_load_only(self):
+ # type: () -> None
+ if self.load_only is None:
+ raise ConfigurationError("Needed a specific file to be modifying.")
+ logger.debug("Will be working with %s variant only", self.load_only)
+
+ @property
+ def _dictionary(self):
+ # type: () -> Dict[str, Any]
+ """A dictionary representing the loaded configuration.
+ """
+ # NOTE: Dictionaries are not populated if not loaded. So, conditionals
+ # are not needed here.
+ retval = {}
+
+ for variant in self._override_order:
+ retval.update(self._config[variant])
+
+ return retval
+
+ def _load_config_files(self):
+ # type: () -> None
+ """Loads configuration from configuration files
+ """
+ config_files = dict(self._iter_config_files())
+ if config_files[kinds.ENV][0:1] == [os.devnull]:
+ logger.debug(
+ "Skipping loading configuration files due to "
+ "environment's PIP_CONFIG_FILE being os.devnull"
+ )
+ return
+
+ for variant, files in config_files.items():
+ for fname in files:
+ # If there's specific variant set in `load_only`, load only
+ # that variant, not the others.
+ if self.load_only is not None and variant != self.load_only:
+ logger.debug(
+ "Skipping file '%s' (variant: %s)", fname, variant
+ )
+ continue
+
+ parser = self._load_file(variant, fname)
+
+ # Keeping track of the parsers used
+ self._parsers[variant].append((fname, parser))
+
+ def _load_file(self, variant, fname):
+ # type: (Kind, str) -> RawConfigParser
+ logger.debug("For variant '%s', will try loading '%s'", variant, fname)
+ parser = self._construct_parser(fname)
+
+ for section in parser.sections():
+ items = parser.items(section)
+ self._config[variant].update(self._normalized_keys(section, items))
+
+ return parser
+
+ def _construct_parser(self, fname):
+ # type: (str) -> RawConfigParser
+ parser = configparser.RawConfigParser()
+ # If there is no such file, don't bother reading it but create the
+ # parser anyway, to hold the data.
+ # Doing this is useful when modifying and saving files, where we don't
+ # need to construct a parser.
+ if os.path.exists(fname):
+ try:
+ parser.read(fname)
+ except UnicodeDecodeError:
+ # See https://github.com/pypa/pip/issues/4963
+ raise ConfigurationFileCouldNotBeLoaded(
+ reason="contains invalid {} characters".format(
+ locale.getpreferredencoding(False)
+ ),
+ fname=fname,
+ )
+ except configparser.Error as error:
+ # See https://github.com/pypa/pip/issues/4893
+ raise ConfigurationFileCouldNotBeLoaded(error=error)
+ return parser
+
+ def _load_environment_vars(self):
+ # type: () -> None
+ """Loads configuration from environment variables
+ """
+ self._config[kinds.ENV_VAR].update(
+ self._normalized_keys(":env:", self._get_environ_vars())
+ )
+
+ def _normalized_keys(self, section, items):
+ # type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any]
+ """Normalizes items to construct a dictionary with normalized keys.
+
+ This routine is where the names become keys and are made the same
+ regardless of source - configuration files or environment.
+ """
+ normalized = {}
+ for name, val in items:
+ key = section + "." + _normalize_name(name)
+ normalized[key] = val
+ return normalized
+
+ def _get_environ_vars(self):
+ # type: () -> Iterable[Tuple[str, str]]
+ """Returns a generator with all environmental vars with prefix PIP_"""
+ for key, val in os.environ.items():
+ should_be_yielded = (
+ key.startswith("PIP_") and
+ key[4:].lower() not in self._ignore_env_names
+ )
+ if should_be_yielded:
+ yield key[4:].lower(), val
+
+ # XXX: This is patched in the tests.
+ def _iter_config_files(self):
+ # type: () -> Iterable[Tuple[Kind, List[str]]]
+ """Yields variant and configuration files associated with it.
+
+ This should be treated like items of a dictionary.
+ """
+ # SMELL: Move the conditions out of this function
+
+ # environment variables have the lowest priority
+ config_file = os.environ.get('PIP_CONFIG_FILE', None)
+ if config_file is not None:
+ yield kinds.ENV, [config_file]
+ else:
+ yield kinds.ENV, []
+
+ config_files = get_configuration_files()
+
+ # at the base we have any global configuration
+ yield kinds.GLOBAL, config_files[kinds.GLOBAL]
+
+ # per-user configuration next
+ should_load_user_config = not self.isolated and not (
+ config_file and os.path.exists(config_file)
+ )
+ if should_load_user_config:
+ # The legacy config file is overridden by the new config file
+ yield kinds.USER, config_files[kinds.USER]
+
+ # finally virtualenv configuration first trumping others
+ yield kinds.SITE, config_files[kinds.SITE]
+
+ def _get_parser_to_modify(self):
+ # type: () -> Tuple[str, RawConfigParser]
+ # Determine which parser to modify
+ parsers = self._parsers[self.load_only]
+ if not parsers:
+ # This should not happen if everything works correctly.
+ raise ConfigurationError(
+ "Fatal Internal error [id=2]. Please report as a bug."
+ )
+
+ # Use the highest priority parser.
+ return parsers[-1]
+
+ # XXX: This is patched in the tests.
+ def _mark_as_modified(self, fname, parser):
+ # type: (str, RawConfigParser) -> None
+ file_parser_tuple = (fname, parser)
+ if file_parser_tuple not in self._modified_parsers:
+ self._modified_parsers.append(file_parser_tuple)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/__init__.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..d5c1afc5bc1ffd09c0ce46f4f2f700a1b996fe47
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/__init__.py
@@ -0,0 +1,24 @@
+from pip._internal.distributions.sdist import SourceDistribution
+from pip._internal.distributions.wheel import WheelDistribution
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from pip._internal.distributions.base import AbstractDistribution
+ from pip._internal.req.req_install import InstallRequirement
+
+
+def make_distribution_for_install_requirement(install_req):
+ # type: (InstallRequirement) -> AbstractDistribution
+ """Returns a Distribution for the given InstallRequirement
+ """
+ # Editable requirements will always be source distributions. They use the
+ # legacy logic until we create a modern standard for them.
+ if install_req.editable:
+ return SourceDistribution(install_req)
+
+ # If it's a wheel, it's a WheelDistribution
+ if install_req.is_wheel:
+ return WheelDistribution(install_req)
+
+ # Otherwise, a SourceDistribution
+ return SourceDistribution(install_req)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0dec737b0162054ffeaa060e5e17193d23197af4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/base.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/base.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ecd03a0a64bf12564c5b47eeabf09e92fba796bc
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/base.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8727f1b55e4fba147d06ef3eb701717551a39cbe
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..216c3883d3f407ef41b84f3fcf22df8277864c07
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6ced8c16d1d5024bf444b58f1632b7f931ac6694
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/base.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..b836b98d162abda775f4b0c2b132eac4cf58a22d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/base.py
@@ -0,0 +1,45 @@
+import abc
+
+from pip._vendor.six import add_metaclass
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional
+
+ from pip._vendor.pkg_resources import Distribution
+ from pip._internal.req import InstallRequirement
+ from pip._internal.index.package_finder import PackageFinder
+
+
+@add_metaclass(abc.ABCMeta)
+class AbstractDistribution(object):
+ """A base class for handling installable artifacts.
+
+ The requirements for anything installable are as follows:
+
+ - we must be able to determine the requirement name
+ (or we can't correctly handle the non-upgrade case).
+
+ - for packages with setup requirements, we must also be able
+ to determine their requirements without installing additional
+ packages (for the same reason as run-time dependencies)
+
+ - we must be able to create a Distribution object exposing the
+ above metadata.
+ """
+
+ def __init__(self, req):
+ # type: (InstallRequirement) -> None
+ super(AbstractDistribution, self).__init__()
+ self.req = req
+
+ @abc.abstractmethod
+ def get_pkg_resources_distribution(self):
+ # type: () -> Optional[Distribution]
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def prepare_distribution_metadata(self, finder, build_isolation):
+ # type: (PackageFinder, bool) -> None
+ raise NotImplementedError()
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/installed.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/installed.py
new file mode 100644
index 0000000000000000000000000000000000000000..0d15bf42405e541b5154de307c54df47b9b7e2ec
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/installed.py
@@ -0,0 +1,24 @@
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional
+
+ from pip._vendor.pkg_resources import Distribution
+ from pip._internal.index.package_finder import PackageFinder
+
+
+class InstalledDistribution(AbstractDistribution):
+ """Represents an installed package.
+
+ This does not need any preparation as the required information has already
+ been computed.
+ """
+
+ def get_pkg_resources_distribution(self):
+ # type: () -> Optional[Distribution]
+ return self.req.satisfied_by
+
+ def prepare_distribution_metadata(self, finder, build_isolation):
+ # type: (PackageFinder, bool) -> None
+ pass
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/sdist.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/sdist.py
new file mode 100644
index 0000000000000000000000000000000000000000..be3d7d97a1cfe877ce549603ebe1e17c65d68803
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/sdist.py
@@ -0,0 +1,104 @@
+import logging
+
+from pip._internal.build_env import BuildEnvironment
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.exceptions import InstallationError
+from pip._internal.utils.subprocess import runner_with_spinner_message
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Set, Tuple
+
+ from pip._vendor.pkg_resources import Distribution
+ from pip._internal.index.package_finder import PackageFinder
+
+
+logger = logging.getLogger(__name__)
+
+
+class SourceDistribution(AbstractDistribution):
+ """Represents a source distribution.
+
+ The preparation step for these needs metadata for the packages to be
+ generated, either using PEP 517 or using the legacy `setup.py egg_info`.
+ """
+
+ def get_pkg_resources_distribution(self):
+ # type: () -> Distribution
+ return self.req.get_dist()
+
+ def prepare_distribution_metadata(self, finder, build_isolation):
+ # type: (PackageFinder, bool) -> None
+ # Load pyproject.toml, to determine whether PEP 517 is to be used
+ self.req.load_pyproject_toml()
+
+ # Set up the build isolation, if this requirement should be isolated
+ should_isolate = self.req.use_pep517 and build_isolation
+ if should_isolate:
+ self._setup_isolation(finder)
+
+ self.req.prepare_metadata()
+
+ def _setup_isolation(self, finder):
+ # type: (PackageFinder) -> None
+ def _raise_conflicts(conflicting_with, conflicting_reqs):
+ # type: (str, Set[Tuple[str, str]]) -> None
+ format_string = (
+ "Some build dependencies for {requirement} "
+ "conflict with {conflicting_with}: {description}."
+ )
+ error_message = format_string.format(
+ requirement=self.req,
+ conflicting_with=conflicting_with,
+ description=', '.join(
+ '{} is incompatible with {}'.format(installed, wanted)
+ for installed, wanted in sorted(conflicting)
+ )
+ )
+ raise InstallationError(error_message)
+
+ # Isolate in a BuildEnvironment and install the build-time
+ # requirements.
+ pyproject_requires = self.req.pyproject_requires
+ assert pyproject_requires is not None
+
+ self.req.build_env = BuildEnvironment()
+ self.req.build_env.install_requirements(
+ finder, pyproject_requires, 'overlay',
+ "Installing build dependencies"
+ )
+ conflicting, missing = self.req.build_env.check_requirements(
+ self.req.requirements_to_check
+ )
+ if conflicting:
+ _raise_conflicts("PEP 517/518 supported requirements",
+ conflicting)
+ if missing:
+ logger.warning(
+ "Missing build requirements in pyproject.toml for %s.",
+ self.req,
+ )
+ logger.warning(
+ "The project does not specify a build backend, and "
+ "pip cannot fall back to setuptools without %s.",
+ " and ".join(map(repr, sorted(missing)))
+ )
+ # Install any extra build dependencies that the backend requests.
+ # This must be done in a second pass, as the pyproject.toml
+ # dependencies must be installed before we can call the backend.
+ with self.req.build_env:
+ runner = runner_with_spinner_message(
+ "Getting requirements to build wheel"
+ )
+ backend = self.req.pep517_backend
+ assert backend is not None
+ with backend.subprocess_runner(runner):
+ reqs = backend.get_requires_for_build_wheel()
+
+ conflicting, missing = self.req.build_env.check_requirements(reqs)
+ if conflicting:
+ _raise_conflicts("the backend dependencies", conflicting)
+ self.req.build_env.install_requirements(
+ finder, missing, 'normal',
+ "Installing backend dependencies"
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/wheel.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/wheel.py
new file mode 100644
index 0000000000000000000000000000000000000000..bf3482b151f08196c82e719a9c194dfc20e4182e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/distributions/wheel.py
@@ -0,0 +1,36 @@
+from zipfile import ZipFile
+
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel
+
+if MYPY_CHECK_RUNNING:
+ from pip._vendor.pkg_resources import Distribution
+ from pip._internal.index.package_finder import PackageFinder
+
+
+class WheelDistribution(AbstractDistribution):
+ """Represents a wheel distribution.
+
+ This does not need any preparation as wheels can be directly unpacked.
+ """
+
+ def get_pkg_resources_distribution(self):
+ # type: () -> Distribution
+ """Loads the metadata from the wheel file into memory and returns a
+ Distribution that uses it, not relying on the wheel file or
+ requirement.
+ """
+ # Set as part of preparation during download.
+ assert self.req.local_file_path
+ # Wheels are never unnamed.
+ assert self.req.name
+
+ with ZipFile(self.req.local_file_path, allowZip64=True) as z:
+ return pkg_resources_distribution_for_wheel(
+ z, self.req.name, self.req.local_file_path
+ )
+
+ def prepare_distribution_metadata(self, finder, build_isolation):
+ # type: (PackageFinder, bool) -> None
+ pass
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/exceptions.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/exceptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..dddec789ef40daff2e23a8da45e1042fd210bdc7
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/exceptions.py
@@ -0,0 +1,308 @@
+"""Exceptions used throughout package"""
+
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+from itertools import chain, groupby, repeat
+
+from pip._vendor.six import iteritems
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional
+ from pip._vendor.pkg_resources import Distribution
+ from pip._internal.req.req_install import InstallRequirement
+
+
+class PipError(Exception):
+ """Base pip exception"""
+
+
+class ConfigurationError(PipError):
+ """General exception in configuration"""
+
+
+class InstallationError(PipError):
+ """General exception during installation"""
+
+
+class UninstallationError(PipError):
+ """General exception during uninstallation"""
+
+
+class NoneMetadataError(PipError):
+ """
+ Raised when accessing "METADATA" or "PKG-INFO" metadata for a
+ pip._vendor.pkg_resources.Distribution object and
+ `dist.has_metadata('METADATA')` returns True but
+ `dist.get_metadata('METADATA')` returns None (and similarly for
+ "PKG-INFO").
+ """
+
+ def __init__(self, dist, metadata_name):
+ # type: (Distribution, str) -> None
+ """
+ :param dist: A Distribution object.
+ :param metadata_name: The name of the metadata being accessed
+ (can be "METADATA" or "PKG-INFO").
+ """
+ self.dist = dist
+ self.metadata_name = metadata_name
+
+ def __str__(self):
+ # type: () -> str
+ # Use `dist` in the error message because its stringification
+ # includes more information, like the version and location.
+ return (
+ 'None {} metadata found for distribution: {}'.format(
+ self.metadata_name, self.dist,
+ )
+ )
+
+
+class DistributionNotFound(InstallationError):
+ """Raised when a distribution cannot be found to satisfy a requirement"""
+
+
+class RequirementsFileParseError(InstallationError):
+ """Raised when a general error occurs parsing a requirements file line."""
+
+
+class BestVersionAlreadyInstalled(PipError):
+ """Raised when the most up-to-date version of a package is already
+ installed."""
+
+
+class BadCommand(PipError):
+ """Raised when virtualenv or a command is not found"""
+
+
+class CommandError(PipError):
+ """Raised when there is an error in command-line arguments"""
+
+
+class PreviousBuildDirError(PipError):
+ """Raised when there's a previous conflicting build directory"""
+
+
+class InvalidWheelFilename(InstallationError):
+ """Invalid wheel filename."""
+
+
+class UnsupportedWheel(InstallationError):
+ """Unsupported wheel."""
+
+
+class HashErrors(InstallationError):
+ """Multiple HashError instances rolled into one for reporting"""
+
+ def __init__(self):
+ self.errors = []
+
+ def append(self, error):
+ self.errors.append(error)
+
+ def __str__(self):
+ lines = []
+ self.errors.sort(key=lambda e: e.order)
+ for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
+ lines.append(cls.head)
+ lines.extend(e.body() for e in errors_of_cls)
+ if lines:
+ return '\n'.join(lines)
+
+ def __nonzero__(self):
+ return bool(self.errors)
+
+ def __bool__(self):
+ return self.__nonzero__()
+
+
+class HashError(InstallationError):
+ """
+ A failure to verify a package against known-good hashes
+
+ :cvar order: An int sorting hash exception classes by difficulty of
+ recovery (lower being harder), so the user doesn't bother fretting
+ about unpinned packages when he has deeper issues, like VCS
+ dependencies, to deal with. Also keeps error reports in a
+ deterministic order.
+ :cvar head: A section heading for display above potentially many
+ exceptions of this kind
+ :ivar req: The InstallRequirement that triggered this error. This is
+ pasted on after the exception is instantiated, because it's not
+ typically available earlier.
+
+ """
+ req = None # type: Optional[InstallRequirement]
+ head = ''
+
+ def body(self):
+ """Return a summary of me for display under the heading.
+
+ This default implementation simply prints a description of the
+ triggering requirement.
+
+ :param req: The InstallRequirement that provoked this error, with
+ populate_link() having already been called
+
+ """
+ return ' %s' % self._requirement_name()
+
+ def __str__(self):
+ return '%s\n%s' % (self.head, self.body())
+
+ def _requirement_name(self):
+ """Return a description of the requirement that triggered me.
+
+ This default implementation returns long description of the req, with
+ line numbers
+
+ """
+ return str(self.req) if self.req else 'unknown package'
+
+
+class VcsHashUnsupported(HashError):
+ """A hash was provided for a version-control-system-based requirement, but
+ we don't have a method for hashing those."""
+
+ order = 0
+ head = ("Can't verify hashes for these requirements because we don't "
+ "have a way to hash version control repositories:")
+
+
+class DirectoryUrlHashUnsupported(HashError):
+ """A hash was provided for a version-control-system-based requirement, but
+ we don't have a method for hashing those."""
+
+ order = 1
+ head = ("Can't verify hashes for these file:// requirements because they "
+ "point to directories:")
+
+
+class HashMissing(HashError):
+ """A hash was needed for a requirement but is absent."""
+
+ order = 2
+ head = ('Hashes are required in --require-hashes mode, but they are '
+ 'missing from some requirements. Here is a list of those '
+ 'requirements along with the hashes their downloaded archives '
+ 'actually had. Add lines like these to your requirements files to '
+ 'prevent tampering. (If you did not enable --require-hashes '
+ 'manually, note that it turns on automatically when any package '
+ 'has a hash.)')
+
+ def __init__(self, gotten_hash):
+ """
+ :param gotten_hash: The hash of the (possibly malicious) archive we
+ just downloaded
+ """
+ self.gotten_hash = gotten_hash
+
+ def body(self):
+ # Dodge circular import.
+ from pip._internal.utils.hashes import FAVORITE_HASH
+
+ package = None
+ if self.req:
+ # In the case of URL-based requirements, display the original URL
+ # seen in the requirements file rather than the package name,
+ # so the output can be directly copied into the requirements file.
+ package = (self.req.original_link if self.req.original_link
+ # In case someone feeds something downright stupid
+ # to InstallRequirement's constructor.
+ else getattr(self.req, 'req', None))
+ return ' %s --hash=%s:%s' % (package or 'unknown package',
+ FAVORITE_HASH,
+ self.gotten_hash)
+
+
+class HashUnpinned(HashError):
+ """A requirement had a hash specified but was not pinned to a specific
+ version."""
+
+ order = 3
+ head = ('In --require-hashes mode, all requirements must have their '
+ 'versions pinned with ==. These do not:')
+
+
+class HashMismatch(HashError):
+ """
+ Distribution file hash values don't match.
+
+ :ivar package_name: The name of the package that triggered the hash
+ mismatch. Feel free to write to this after the exception is raise to
+ improve its error message.
+
+ """
+ order = 4
+ head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
+ 'FILE. If you have updated the package versions, please update '
+ 'the hashes. Otherwise, examine the package contents carefully; '
+ 'someone may have tampered with them.')
+
+ def __init__(self, allowed, gots):
+ """
+ :param allowed: A dict of algorithm names pointing to lists of allowed
+ hex digests
+ :param gots: A dict of algorithm names pointing to hashes we
+ actually got from the files under suspicion
+ """
+ self.allowed = allowed
+ self.gots = gots
+
+ def body(self):
+ return ' %s:\n%s' % (self._requirement_name(),
+ self._hash_comparison())
+
+ def _hash_comparison(self):
+ """
+ Return a comparison of actual and expected hash values.
+
+ Example::
+
+ Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
+ or 123451234512345123451234512345123451234512345
+ Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
+
+ """
+ def hash_then_or(hash_name):
+ # For now, all the decent hashes have 6-char names, so we can get
+ # away with hard-coding space literals.
+ return chain([hash_name], repeat(' or'))
+
+ lines = []
+ for hash_name, expecteds in iteritems(self.allowed):
+ prefix = hash_then_or(hash_name)
+ lines.extend((' Expected %s %s' % (next(prefix), e))
+ for e in expecteds)
+ lines.append(' Got %s\n' %
+ self.gots[hash_name].hexdigest())
+ return '\n'.join(lines)
+
+
+class UnsupportedPythonVersion(InstallationError):
+ """Unsupported python version according to Requires-Python package
+ metadata."""
+
+
+class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
+ """When there are errors while loading a configuration file
+ """
+
+ def __init__(self, reason="could not be loaded", fname=None, error=None):
+ super(ConfigurationFileCouldNotBeLoaded, self).__init__(error)
+ self.reason = reason
+ self.fname = fname
+ self.error = error
+
+ def __str__(self):
+ if self.fname is not None:
+ message_part = " in {}.".format(self.fname)
+ else:
+ assert self.error is not None
+ message_part = ".\n{}\n".format(self.error.message)
+ return "Configuration file {}{}".format(self.reason, message_part)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/index/__init__.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/index/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..7a17b7b3b6ad49157ee41f3da304fec3d32342d3
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/index/__init__.py
@@ -0,0 +1,2 @@
+"""Index interaction code
+"""
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/index/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/index/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2400caafa15e449015d318040799846d485f28f5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/index/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/index/__pycache__/collector.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/index/__pycache__/collector.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..20d686b5e3da39bcca79f0aead5fd9e83059020f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/index/__pycache__/collector.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a0573578f6451f0415e93ce82424e62080f05785
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/index/collector.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/index/collector.py
new file mode 100644
index 0000000000000000000000000000000000000000..8330793171a3946d2bdba22c81784711acfa5e75
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/index/collector.py
@@ -0,0 +1,544 @@
+"""
+The main purpose of this module is to expose LinkCollector.collect_links().
+"""
+
+import cgi
+import itertools
+import logging
+import mimetypes
+import os
+from collections import OrderedDict
+
+from pip._vendor import html5lib, requests
+from pip._vendor.distlib.compat import unescape
+from pip._vendor.requests.exceptions import HTTPError, RetryError, SSLError
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+from pip._vendor.six.moves.urllib import request as urllib_request
+
+from pip._internal.models.link import Link
+from pip._internal.utils.filetypes import ARCHIVE_EXTENSIONS
+from pip._internal.utils.misc import redact_auth_from_url
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.urls import path_to_url, url_to_path
+from pip._internal.vcs import is_url, vcs
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Callable, Iterable, List, MutableMapping, Optional, Sequence, Tuple,
+ Union,
+ )
+ import xml.etree.ElementTree
+
+ from pip._vendor.requests import Response
+
+ from pip._internal.models.search_scope import SearchScope
+ from pip._internal.network.session import PipSession
+
+ HTMLElement = xml.etree.ElementTree.Element
+ ResponseHeaders = MutableMapping[str, str]
+
+
+logger = logging.getLogger(__name__)
+
+
+def _match_vcs_scheme(url):
+ # type: (str) -> Optional[str]
+ """Look for VCS schemes in the URL.
+
+ Returns the matched VCS scheme, or None if there's no match.
+ """
+ for scheme in vcs.schemes:
+ if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
+ return scheme
+ return None
+
+
+def _is_url_like_archive(url):
+ # type: (str) -> bool
+ """Return whether the URL looks like an archive.
+ """
+ filename = Link(url).filename
+ for bad_ext in ARCHIVE_EXTENSIONS:
+ if filename.endswith(bad_ext):
+ return True
+ return False
+
+
+class _NotHTML(Exception):
+ def __init__(self, content_type, request_desc):
+ # type: (str, str) -> None
+ super(_NotHTML, self).__init__(content_type, request_desc)
+ self.content_type = content_type
+ self.request_desc = request_desc
+
+
+def _ensure_html_header(response):
+ # type: (Response) -> None
+ """Check the Content-Type header to ensure the response contains HTML.
+
+ Raises `_NotHTML` if the content type is not text/html.
+ """
+ content_type = response.headers.get("Content-Type", "")
+ if not content_type.lower().startswith("text/html"):
+ raise _NotHTML(content_type, response.request.method)
+
+
+class _NotHTTP(Exception):
+ pass
+
+
+def _ensure_html_response(url, session):
+ # type: (str, PipSession) -> None
+ """Send a HEAD request to the URL, and ensure the response contains HTML.
+
+ Raises `_NotHTTP` if the URL is not available for a HEAD request, or
+ `_NotHTML` if the content type is not text/html.
+ """
+ scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
+ if scheme not in {'http', 'https'}:
+ raise _NotHTTP()
+
+ resp = session.head(url, allow_redirects=True)
+ resp.raise_for_status()
+
+ _ensure_html_header(resp)
+
+
+def _get_html_response(url, session):
+ # type: (str, PipSession) -> Response
+ """Access an HTML page with GET, and return the response.
+
+ This consists of three parts:
+
+ 1. If the URL looks suspiciously like an archive, send a HEAD first to
+ check the Content-Type is HTML, to avoid downloading a large file.
+ Raise `_NotHTTP` if the content type cannot be determined, or
+ `_NotHTML` if it is not HTML.
+ 2. Actually perform the request. Raise HTTP exceptions on network failures.
+ 3. Check the Content-Type header to make sure we got HTML, and raise
+ `_NotHTML` otherwise.
+ """
+ if _is_url_like_archive(url):
+ _ensure_html_response(url, session=session)
+
+ logger.debug('Getting page %s', redact_auth_from_url(url))
+
+ resp = session.get(
+ url,
+ headers={
+ "Accept": "text/html",
+ # We don't want to blindly returned cached data for
+ # /simple/, because authors generally expecting that
+ # twine upload && pip install will function, but if
+ # they've done a pip install in the last ~10 minutes
+ # it won't. Thus by setting this to zero we will not
+ # blindly use any cached data, however the benefit of
+ # using max-age=0 instead of no-cache, is that we will
+ # still support conditional requests, so we will still
+ # minimize traffic sent in cases where the page hasn't
+ # changed at all, we will just always incur the round
+ # trip for the conditional GET now instead of only
+ # once per 10 minutes.
+ # For more information, please see pypa/pip#5670.
+ "Cache-Control": "max-age=0",
+ },
+ )
+ resp.raise_for_status()
+
+ # The check for archives above only works if the url ends with
+ # something that looks like an archive. However that is not a
+ # requirement of an url. Unless we issue a HEAD request on every
+ # url we cannot know ahead of time for sure if something is HTML
+ # or not. However we can check after we've downloaded it.
+ _ensure_html_header(resp)
+
+ return resp
+
+
+def _get_encoding_from_headers(headers):
+ # type: (ResponseHeaders) -> Optional[str]
+ """Determine if we have any encoding information in our headers.
+ """
+ if headers and "Content-Type" in headers:
+ content_type, params = cgi.parse_header(headers["Content-Type"])
+ if "charset" in params:
+ return params['charset']
+ return None
+
+
+def _determine_base_url(document, page_url):
+ # type: (HTMLElement, str) -> str
+ """Determine the HTML document's base URL.
+
+ This looks for a ``<base>`` tag in the HTML document. If present, its href
+ attribute denotes the base URL of anchor tags in the document. If there is
+ no such tag (or if it does not have a valid href attribute), the HTML
+ file's URL is used as the base URL.
+
+ :param document: An HTML document representation. The current
+ implementation expects the result of ``html5lib.parse()``.
+ :param page_url: The URL of the HTML document.
+ """
+ for base in document.findall(".//base"):
+ href = base.get("href")
+ if href is not None:
+ return href
+ return page_url
+
+
+def _clean_link(url):
+ # type: (str) -> str
+ """Makes sure a link is fully encoded. That is, if a ' ' shows up in
+ the link, it will be rewritten to %20 (while not over-quoting
+ % or other characters)."""
+ # Split the URL into parts according to the general structure
+ # `scheme://netloc/path;parameters?query#fragment`. Note that the
+ # `netloc` can be empty and the URI will then refer to a local
+ # filesystem path.
+ result = urllib_parse.urlparse(url)
+ # In both cases below we unquote prior to quoting to make sure
+ # nothing is double quoted.
+ if result.netloc == "":
+ # On Windows the path part might contain a drive letter which
+ # should not be quoted. On Linux where drive letters do not
+ # exist, the colon should be quoted. We rely on urllib.request
+ # to do the right thing here.
+ path = urllib_request.pathname2url(
+ urllib_request.url2pathname(result.path))
+ else:
+ # In addition to the `/` character we protect `@` so that
+ # revision strings in VCS URLs are properly parsed.
+ path = urllib_parse.quote(urllib_parse.unquote(result.path), safe="/@")
+ return urllib_parse.urlunparse(result._replace(path=path))
+
+
+def _create_link_from_element(
+ anchor, # type: HTMLElement
+ page_url, # type: str
+ base_url, # type: str
+):
+ # type: (...) -> Optional[Link]
+ """
+ Convert an anchor element in a simple repository page to a Link.
+ """
+ href = anchor.get("href")
+ if not href:
+ return None
+
+ url = _clean_link(urllib_parse.urljoin(base_url, href))
+ pyrequire = anchor.get('data-requires-python')
+ pyrequire = unescape(pyrequire) if pyrequire else None
+
+ yanked_reason = anchor.get('data-yanked')
+ if yanked_reason:
+ # This is a unicode string in Python 2 (and 3).
+ yanked_reason = unescape(yanked_reason)
+
+ link = Link(
+ url,
+ comes_from=page_url,
+ requires_python=pyrequire,
+ yanked_reason=yanked_reason,
+ )
+
+ return link
+
+
+def parse_links(page):
+ # type: (HTMLPage) -> Iterable[Link]
+ """
+ Parse an HTML document, and yield its anchor elements as Link objects.
+ """
+ document = html5lib.parse(
+ page.content,
+ transport_encoding=page.encoding,
+ namespaceHTMLElements=False,
+ )
+
+ url = page.url
+ base_url = _determine_base_url(document, url)
+ for anchor in document.findall(".//a"):
+ link = _create_link_from_element(
+ anchor,
+ page_url=url,
+ base_url=base_url,
+ )
+ if link is None:
+ continue
+ yield link
+
+
+class HTMLPage(object):
+ """Represents one page, along with its URL"""
+
+ def __init__(
+ self,
+ content, # type: bytes
+ encoding, # type: Optional[str]
+ url, # type: str
+ ):
+ # type: (...) -> None
+ """
+ :param encoding: the encoding to decode the given content.
+ :param url: the URL from which the HTML was downloaded.
+ """
+ self.content = content
+ self.encoding = encoding
+ self.url = url
+
+ def __str__(self):
+ # type: () -> str
+ return redact_auth_from_url(self.url)
+
+
+def _handle_get_page_fail(
+ link, # type: Link
+ reason, # type: Union[str, Exception]
+ meth=None # type: Optional[Callable[..., None]]
+):
+ # type: (...) -> None
+ if meth is None:
+ meth = logger.debug
+ meth("Could not fetch URL %s: %s - skipping", link, reason)
+
+
+def _make_html_page(response):
+ # type: (Response) -> HTMLPage
+ encoding = _get_encoding_from_headers(response.headers)
+ return HTMLPage(response.content, encoding=encoding, url=response.url)
+
+
+def _get_html_page(link, session=None):
+ # type: (Link, Optional[PipSession]) -> Optional[HTMLPage]
+ if session is None:
+ raise TypeError(
+ "_get_html_page() missing 1 required keyword argument: 'session'"
+ )
+
+ url = link.url.split('#', 1)[0]
+
+ # Check for VCS schemes that do not support lookup as web pages.
+ vcs_scheme = _match_vcs_scheme(url)
+ if vcs_scheme:
+ logger.debug('Cannot look at %s URL %s', vcs_scheme, link)
+ return None
+
+ # Tack index.html onto file:// URLs that point to directories
+ scheme, _, path, _, _, _ = urllib_parse.urlparse(url)
+ if (scheme == 'file' and os.path.isdir(urllib_request.url2pathname(path))):
+ # add trailing slash if not present so urljoin doesn't trim
+ # final segment
+ if not url.endswith('/'):
+ url += '/'
+ url = urllib_parse.urljoin(url, 'index.html')
+ logger.debug(' file: URL is directory, getting %s', url)
+
+ try:
+ resp = _get_html_response(url, session=session)
+ except _NotHTTP:
+ logger.debug(
+ 'Skipping page %s because it looks like an archive, and cannot '
+ 'be checked by HEAD.', link,
+ )
+ except _NotHTML as exc:
+ logger.debug(
+ 'Skipping page %s because the %s request got Content-Type: %s',
+ link, exc.request_desc, exc.content_type,
+ )
+ except HTTPError as exc:
+ _handle_get_page_fail(link, exc)
+ except RetryError as exc:
+ _handle_get_page_fail(link, exc)
+ except SSLError as exc:
+ reason = "There was a problem confirming the ssl certificate: "
+ reason += str(exc)
+ _handle_get_page_fail(link, reason, meth=logger.info)
+ except requests.ConnectionError as exc:
+ _handle_get_page_fail(link, "connection error: %s" % exc)
+ except requests.Timeout:
+ _handle_get_page_fail(link, "timed out")
+ else:
+ return _make_html_page(resp)
+ return None
+
+
+def _remove_duplicate_links(links):
+ # type: (Iterable[Link]) -> List[Link]
+ """
+ Return a list of links, with duplicates removed and ordering preserved.
+ """
+ # We preserve the ordering when removing duplicates because we can.
+ return list(OrderedDict.fromkeys(links))
+
+
+def group_locations(locations, expand_dir=False):
+ # type: (Sequence[str], bool) -> Tuple[List[str], List[str]]
+ """
+ Divide a list of locations into two groups: "files" (archives) and "urls."
+
+ :return: A pair of lists (files, urls).
+ """
+ files = []
+ urls = []
+
+ # puts the url for the given file path into the appropriate list
+ def sort_path(path):
+ # type: (str) -> None
+ url = path_to_url(path)
+ if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
+ urls.append(url)
+ else:
+ files.append(url)
+
+ for url in locations:
+
+ is_local_path = os.path.exists(url)
+ is_file_url = url.startswith('file:')
+
+ if is_local_path or is_file_url:
+ if is_local_path:
+ path = url
+ else:
+ path = url_to_path(url)
+ if os.path.isdir(path):
+ if expand_dir:
+ path = os.path.realpath(path)
+ for item in os.listdir(path):
+ sort_path(os.path.join(path, item))
+ elif is_file_url:
+ urls.append(url)
+ else:
+ logger.warning(
+ "Path '{0}' is ignored: "
+ "it is a directory.".format(path),
+ )
+ elif os.path.isfile(path):
+ sort_path(path)
+ else:
+ logger.warning(
+ "Url '%s' is ignored: it is neither a file "
+ "nor a directory.", url,
+ )
+ elif is_url(url):
+ # Only add url with clear scheme
+ urls.append(url)
+ else:
+ logger.warning(
+ "Url '%s' is ignored. It is either a non-existing "
+ "path or lacks a specific scheme.", url,
+ )
+
+ return files, urls
+
+
+class CollectedLinks(object):
+
+ """
+ Encapsulates the return value of a call to LinkCollector.collect_links().
+
+ The return value includes both URLs to project pages containing package
+ links, as well as individual package Link objects collected from other
+ sources.
+
+ This info is stored separately as:
+
+ (1) links from the configured file locations,
+ (2) links from the configured find_links, and
+ (3) urls to HTML project pages, as described by the PEP 503 simple
+ repository API.
+ """
+
+ def __init__(
+ self,
+ files, # type: List[Link]
+ find_links, # type: List[Link]
+ project_urls, # type: List[Link]
+ ):
+ # type: (...) -> None
+ """
+ :param files: Links from file locations.
+ :param find_links: Links from find_links.
+ :param project_urls: URLs to HTML project pages, as described by
+ the PEP 503 simple repository API.
+ """
+ self.files = files
+ self.find_links = find_links
+ self.project_urls = project_urls
+
+
+class LinkCollector(object):
+
+ """
+ Responsible for collecting Link objects from all configured locations,
+ making network requests as needed.
+
+ The class's main method is its collect_links() method.
+ """
+
+ def __init__(
+ self,
+ session, # type: PipSession
+ search_scope, # type: SearchScope
+ ):
+ # type: (...) -> None
+ self.search_scope = search_scope
+ self.session = session
+
+ @property
+ def find_links(self):
+ # type: () -> List[str]
+ return self.search_scope.find_links
+
+ def fetch_page(self, location):
+ # type: (Link) -> Optional[HTMLPage]
+ """
+ Fetch an HTML page containing package links.
+ """
+ return _get_html_page(location, session=self.session)
+
+ def collect_links(self, project_name):
+ # type: (str) -> CollectedLinks
+ """Find all available links for the given project name.
+
+ :return: All the Link objects (unfiltered), as a CollectedLinks object.
+ """
+ search_scope = self.search_scope
+ index_locations = search_scope.get_index_urls_locations(project_name)
+ index_file_loc, index_url_loc = group_locations(index_locations)
+ fl_file_loc, fl_url_loc = group_locations(
+ self.find_links, expand_dir=True,
+ )
+
+ file_links = [
+ Link(url) for url in itertools.chain(index_file_loc, fl_file_loc)
+ ]
+
+ # We trust every directly linked archive in find_links
+ find_link_links = [Link(url, '-f') for url in self.find_links]
+
+ # We trust every url that the user has given us whether it was given
+ # via --index-url or --find-links.
+ # We want to filter out anything that does not have a secure origin.
+ url_locations = [
+ link for link in itertools.chain(
+ (Link(url) for url in index_url_loc),
+ (Link(url) for url in fl_url_loc),
+ )
+ if self.session.is_secure_origin(link)
+ ]
+
+ url_locations = _remove_duplicate_links(url_locations)
+ lines = [
+ '{} location(s) to search for versions of {}:'.format(
+ len(url_locations), project_name,
+ ),
+ ]
+ for link in url_locations:
+ lines.append('* {}'.format(link))
+ logger.debug('\n'.join(lines))
+
+ return CollectedLinks(
+ files=file_links,
+ find_links=find_link_links,
+ project_urls=url_locations,
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/index/package_finder.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/index/package_finder.py
new file mode 100644
index 0000000000000000000000000000000000000000..a74d78db5a6c1738d54e784840f97daddff18776
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/index/package_finder.py
@@ -0,0 +1,1013 @@
+"""Routines related to PyPI, indexes"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+from __future__ import absolute_import
+
+import logging
+import re
+
+from pip._vendor.packaging import specifiers
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.packaging.version import parse as parse_version
+
+from pip._internal.exceptions import (
+ BestVersionAlreadyInstalled,
+ DistributionNotFound,
+ InvalidWheelFilename,
+ UnsupportedWheel,
+)
+from pip._internal.index.collector import parse_links
+from pip._internal.models.candidate import InstallationCandidate
+from pip._internal.models.format_control import FormatControl
+from pip._internal.models.link import Link
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.models.target_python import TargetPython
+from pip._internal.models.wheel import Wheel
+from pip._internal.utils.filetypes import WHEEL_EXTENSION
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import build_netloc
+from pip._internal.utils.packaging import check_requires_python
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS
+from pip._internal.utils.urls import url_to_path
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ FrozenSet, Iterable, List, Optional, Set, Text, Tuple, Union,
+ )
+
+ from pip._vendor.packaging.tags import Tag
+ from pip._vendor.packaging.version import _BaseVersion
+
+ from pip._internal.index.collector import LinkCollector
+ from pip._internal.models.search_scope import SearchScope
+ from pip._internal.req import InstallRequirement
+ from pip._internal.utils.hashes import Hashes
+
+ BuildTag = Union[Tuple[()], Tuple[int, str]]
+ CandidateSortingKey = (
+ Tuple[int, int, int, _BaseVersion, BuildTag, Optional[int]]
+ )
+
+
+__all__ = ['FormatControl', 'BestCandidateResult', 'PackageFinder']
+
+
+logger = logging.getLogger(__name__)
+
+
+def _check_link_requires_python(
+ link, # type: Link
+ version_info, # type: Tuple[int, int, int]
+ ignore_requires_python=False, # type: bool
+):
+ # type: (...) -> bool
+ """
+ Return whether the given Python version is compatible with a link's
+ "Requires-Python" value.
+
+ :param version_info: A 3-tuple of ints representing the Python
+ major-minor-micro version to check.
+ :param ignore_requires_python: Whether to ignore the "Requires-Python"
+ value if the given Python version isn't compatible.
+ """
+ try:
+ is_compatible = check_requires_python(
+ link.requires_python, version_info=version_info,
+ )
+ except specifiers.InvalidSpecifier:
+ logger.debug(
+ "Ignoring invalid Requires-Python (%r) for link: %s",
+ link.requires_python, link,
+ )
+ else:
+ if not is_compatible:
+ version = '.'.join(map(str, version_info))
+ if not ignore_requires_python:
+ logger.debug(
+ 'Link requires a different Python (%s not in: %r): %s',
+ version, link.requires_python, link,
+ )
+ return False
+
+ logger.debug(
+ 'Ignoring failed Requires-Python check (%s not in: %r) '
+ 'for link: %s',
+ version, link.requires_python, link,
+ )
+
+ return True
+
+
+class LinkEvaluator(object):
+
+ """
+ Responsible for evaluating links for a particular project.
+ """
+
+ _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
+
+ # Don't include an allow_yanked default value to make sure each call
+ # site considers whether yanked releases are allowed. This also causes
+ # that decision to be made explicit in the calling code, which helps
+ # people when reading the code.
+ def __init__(
+ self,
+ project_name, # type: str
+ canonical_name, # type: str
+ formats, # type: FrozenSet[str]
+ target_python, # type: TargetPython
+ allow_yanked, # type: bool
+ ignore_requires_python=None, # type: Optional[bool]
+ ):
+ # type: (...) -> None
+ """
+ :param project_name: The user supplied package name.
+ :param canonical_name: The canonical package name.
+ :param formats: The formats allowed for this package. Should be a set
+ with 'binary' or 'source' or both in it.
+ :param target_python: The target Python interpreter to use when
+ evaluating link compatibility. This is used, for example, to
+ check wheel compatibility, as well as when checking the Python
+ version, e.g. the Python version embedded in a link filename
+ (or egg fragment) and against an HTML link's optional PEP 503
+ "data-requires-python" attribute.
+ :param allow_yanked: Whether files marked as yanked (in the sense
+ of PEP 592) are permitted to be candidates for install.
+ :param ignore_requires_python: Whether to ignore incompatible
+ PEP 503 "data-requires-python" values in HTML links. Defaults
+ to False.
+ """
+ if ignore_requires_python is None:
+ ignore_requires_python = False
+
+ self._allow_yanked = allow_yanked
+ self._canonical_name = canonical_name
+ self._ignore_requires_python = ignore_requires_python
+ self._formats = formats
+ self._target_python = target_python
+
+ self.project_name = project_name
+
+ def evaluate_link(self, link):
+ # type: (Link) -> Tuple[bool, Optional[Text]]
+ """
+ Determine whether a link is a candidate for installation.
+
+ :return: A tuple (is_candidate, result), where `result` is (1) a
+ version string if `is_candidate` is True, and (2) if
+ `is_candidate` is False, an optional string to log the reason
+ the link fails to qualify.
+ """
+ version = None
+ if link.is_yanked and not self._allow_yanked:
+ reason = link.yanked_reason or '<none given>'
+ # Mark this as a unicode string to prevent "UnicodeEncodeError:
+ # 'ascii' codec can't encode character" in Python 2 when
+ # the reason contains non-ascii characters.
+ return (False, u'yanked for reason: {}'.format(reason))
+
+ if link.egg_fragment:
+ egg_info = link.egg_fragment
+ ext = link.ext
+ else:
+ egg_info, ext = link.splitext()
+ if not ext:
+ return (False, 'not a file')
+ if ext not in SUPPORTED_EXTENSIONS:
+ return (False, 'unsupported archive format: %s' % ext)
+ if "binary" not in self._formats and ext == WHEEL_EXTENSION:
+ reason = 'No binaries permitted for %s' % self.project_name
+ return (False, reason)
+ if "macosx10" in link.path and ext == '.zip':
+ return (False, 'macosx10 one')
+ if ext == WHEEL_EXTENSION:
+ try:
+ wheel = Wheel(link.filename)
+ except InvalidWheelFilename:
+ return (False, 'invalid wheel filename')
+ if canonicalize_name(wheel.name) != self._canonical_name:
+ reason = 'wrong project name (not %s)' % self.project_name
+ return (False, reason)
+
+ supported_tags = self._target_python.get_tags()
+ if not wheel.supported(supported_tags):
+ # Include the wheel's tags in the reason string to
+ # simplify troubleshooting compatibility issues.
+ file_tags = wheel.get_formatted_file_tags()
+ reason = (
+ "none of the wheel's tags match: {}".format(
+ ', '.join(file_tags)
+ )
+ )
+ return (False, reason)
+
+ version = wheel.version
+
+ # This should be up by the self.ok_binary check, but see issue 2700.
+ if "source" not in self._formats and ext != WHEEL_EXTENSION:
+ return (False, 'No sources permitted for %s' % self.project_name)
+
+ if not version:
+ version = _extract_version_from_fragment(
+ egg_info, self._canonical_name,
+ )
+ if not version:
+ return (
+ False, 'Missing project version for %s' % self.project_name,
+ )
+
+ match = self._py_version_re.search(version)
+ if match:
+ version = version[:match.start()]
+ py_version = match.group(1)
+ if py_version != self._target_python.py_version:
+ return (False, 'Python version is incorrect')
+
+ supports_python = _check_link_requires_python(
+ link, version_info=self._target_python.py_version_info,
+ ignore_requires_python=self._ignore_requires_python,
+ )
+ if not supports_python:
+ # Return None for the reason text to suppress calling
+ # _log_skipped_link().
+ return (False, None)
+
+ logger.debug('Found link %s, version: %s', link, version)
+
+ return (True, version)
+
+
+def filter_unallowed_hashes(
+ candidates, # type: List[InstallationCandidate]
+ hashes, # type: Hashes
+ project_name, # type: str
+):
+ # type: (...) -> List[InstallationCandidate]
+ """
+ Filter out candidates whose hashes aren't allowed, and return a new
+ list of candidates.
+
+ If at least one candidate has an allowed hash, then all candidates with
+ either an allowed hash or no hash specified are returned. Otherwise,
+ the given candidates are returned.
+
+ Including the candidates with no hash specified when there is a match
+ allows a warning to be logged if there is a more preferred candidate
+ with no hash specified. Returning all candidates in the case of no
+ matches lets pip report the hash of the candidate that would otherwise
+ have been installed (e.g. permitting the user to more easily update
+ their requirements file with the desired hash).
+ """
+ if not hashes:
+ logger.debug(
+ 'Given no hashes to check %s links for project %r: '
+ 'discarding no candidates',
+ len(candidates),
+ project_name,
+ )
+ # Make sure we're not returning back the given value.
+ return list(candidates)
+
+ matches_or_no_digest = []
+ # Collect the non-matches for logging purposes.
+ non_matches = []
+ match_count = 0
+ for candidate in candidates:
+ link = candidate.link
+ if not link.has_hash:
+ pass
+ elif link.is_hash_allowed(hashes=hashes):
+ match_count += 1
+ else:
+ non_matches.append(candidate)
+ continue
+
+ matches_or_no_digest.append(candidate)
+
+ if match_count:
+ filtered = matches_or_no_digest
+ else:
+ # Make sure we're not returning back the given value.
+ filtered = list(candidates)
+
+ if len(filtered) == len(candidates):
+ discard_message = 'discarding no candidates'
+ else:
+ discard_message = 'discarding {} non-matches:\n {}'.format(
+ len(non_matches),
+ '\n '.join(str(candidate.link) for candidate in non_matches)
+ )
+
+ logger.debug(
+ 'Checked %s links for project %r against %s hashes '
+ '(%s matches, %s no digest): %s',
+ len(candidates),
+ project_name,
+ hashes.digest_count,
+ match_count,
+ len(matches_or_no_digest) - match_count,
+ discard_message
+ )
+
+ return filtered
+
+
+class CandidatePreferences(object):
+
+ """
+ Encapsulates some of the preferences for filtering and sorting
+ InstallationCandidate objects.
+ """
+
+ def __init__(
+ self,
+ prefer_binary=False, # type: bool
+ allow_all_prereleases=False, # type: bool
+ ):
+ # type: (...) -> None
+ """
+ :param allow_all_prereleases: Whether to allow all pre-releases.
+ """
+ self.allow_all_prereleases = allow_all_prereleases
+ self.prefer_binary = prefer_binary
+
+
+class BestCandidateResult(object):
+ """A collection of candidates, returned by `PackageFinder.find_best_candidate`.
+
+ This class is only intended to be instantiated by CandidateEvaluator's
+ `compute_best_candidate()` method.
+ """
+
+ def __init__(
+ self,
+ candidates, # type: List[InstallationCandidate]
+ applicable_candidates, # type: List[InstallationCandidate]
+ best_candidate, # type: Optional[InstallationCandidate]
+ ):
+ # type: (...) -> None
+ """
+ :param candidates: A sequence of all available candidates found.
+ :param applicable_candidates: The applicable candidates.
+ :param best_candidate: The most preferred candidate found, or None
+ if no applicable candidates were found.
+ """
+ assert set(applicable_candidates) <= set(candidates)
+
+ if best_candidate is None:
+ assert not applicable_candidates
+ else:
+ assert best_candidate in applicable_candidates
+
+ self._applicable_candidates = applicable_candidates
+ self._candidates = candidates
+
+ self.best_candidate = best_candidate
+
+ def iter_all(self):
+ # type: () -> Iterable[InstallationCandidate]
+ """Iterate through all candidates.
+ """
+ return iter(self._candidates)
+
+ def iter_applicable(self):
+ # type: () -> Iterable[InstallationCandidate]
+ """Iterate through the applicable candidates.
+ """
+ return iter(self._applicable_candidates)
+
+
+class CandidateEvaluator(object):
+
+ """
+ Responsible for filtering and sorting candidates for installation based
+ on what tags are valid.
+ """
+
+ @classmethod
+ def create(
+ cls,
+ project_name, # type: str
+ target_python=None, # type: Optional[TargetPython]
+ prefer_binary=False, # type: bool
+ allow_all_prereleases=False, # type: bool
+ specifier=None, # type: Optional[specifiers.BaseSpecifier]
+ hashes=None, # type: Optional[Hashes]
+ ):
+ # type: (...) -> CandidateEvaluator
+ """Create a CandidateEvaluator object.
+
+ :param target_python: The target Python interpreter to use when
+ checking compatibility. If None (the default), a TargetPython
+ object will be constructed from the running Python.
+ :param specifier: An optional object implementing `filter`
+ (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
+ versions.
+ :param hashes: An optional collection of allowed hashes.
+ """
+ if target_python is None:
+ target_python = TargetPython()
+ if specifier is None:
+ specifier = specifiers.SpecifierSet()
+
+ supported_tags = target_python.get_tags()
+
+ return cls(
+ project_name=project_name,
+ supported_tags=supported_tags,
+ specifier=specifier,
+ prefer_binary=prefer_binary,
+ allow_all_prereleases=allow_all_prereleases,
+ hashes=hashes,
+ )
+
+ def __init__(
+ self,
+ project_name, # type: str
+ supported_tags, # type: List[Tag]
+ specifier, # type: specifiers.BaseSpecifier
+ prefer_binary=False, # type: bool
+ allow_all_prereleases=False, # type: bool
+ hashes=None, # type: Optional[Hashes]
+ ):
+ # type: (...) -> None
+ """
+ :param supported_tags: The PEP 425 tags supported by the target
+ Python in order of preference (most preferred first).
+ """
+ self._allow_all_prereleases = allow_all_prereleases
+ self._hashes = hashes
+ self._prefer_binary = prefer_binary
+ self._project_name = project_name
+ self._specifier = specifier
+ self._supported_tags = supported_tags
+
+ def get_applicable_candidates(
+ self,
+ candidates, # type: List[InstallationCandidate]
+ ):
+ # type: (...) -> List[InstallationCandidate]
+ """
+ Return the applicable candidates from a list of candidates.
+ """
+ # Using None infers from the specifier instead.
+ allow_prereleases = self._allow_all_prereleases or None
+ specifier = self._specifier
+ versions = {
+ str(v) for v in specifier.filter(
+ # We turn the version object into a str here because otherwise
+ # when we're debundled but setuptools isn't, Python will see
+ # packaging.version.Version and
+ # pkg_resources._vendor.packaging.version.Version as different
+ # types. This way we'll use a str as a common data interchange
+ # format. If we stop using the pkg_resources provided specifier
+ # and start using our own, we can drop the cast to str().
+ (str(c.version) for c in candidates),
+ prereleases=allow_prereleases,
+ )
+ }
+
+ # Again, converting version to str to deal with debundling.
+ applicable_candidates = [
+ c for c in candidates if str(c.version) in versions
+ ]
+
+ filtered_applicable_candidates = filter_unallowed_hashes(
+ candidates=applicable_candidates,
+ hashes=self._hashes,
+ project_name=self._project_name,
+ )
+
+ return sorted(filtered_applicable_candidates, key=self._sort_key)
+
+ def _sort_key(self, candidate):
+ # type: (InstallationCandidate) -> CandidateSortingKey
+ """
+ Function to pass as the `key` argument to a call to sorted() to sort
+ InstallationCandidates by preference.
+
+ Returns a tuple such that tuples sorting as greater using Python's
+ default comparison operator are more preferred.
+
+ The preference is as follows:
+
+ First and foremost, candidates with allowed (matching) hashes are
+ always preferred over candidates without matching hashes. This is
+ because e.g. if the only candidate with an allowed hash is yanked,
+ we still want to use that candidate.
+
+ Second, excepting hash considerations, candidates that have been
+ yanked (in the sense of PEP 592) are always less preferred than
+ candidates that haven't been yanked. Then:
+
+ If not finding wheels, they are sorted by version only.
+ If finding wheels, then the sort order is by version, then:
+ 1. existing installs
+ 2. wheels ordered via Wheel.support_index_min(self._supported_tags)
+ 3. source archives
+ If prefer_binary was set, then all wheels are sorted above sources.
+
+ Note: it was considered to embed this logic into the Link
+ comparison operators, but then different sdist links
+ with the same version, would have to be considered equal
+ """
+ valid_tags = self._supported_tags
+ support_num = len(valid_tags)
+ build_tag = () # type: BuildTag
+ binary_preference = 0
+ link = candidate.link
+ if link.is_wheel:
+ # can raise InvalidWheelFilename
+ wheel = Wheel(link.filename)
+ if not wheel.supported(valid_tags):
+ raise UnsupportedWheel(
+ "%s is not a supported wheel for this platform. It "
+ "can't be sorted." % wheel.filename
+ )
+ if self._prefer_binary:
+ binary_preference = 1
+ pri = -(wheel.support_index_min(valid_tags))
+ if wheel.build_tag is not None:
+ match = re.match(r'^(\d+)(.*)$', wheel.build_tag)
+ build_tag_groups = match.groups()
+ build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
+ else: # sdist
+ pri = -(support_num)
+ has_allowed_hash = int(link.is_hash_allowed(self._hashes))
+ yank_value = -1 * int(link.is_yanked) # -1 for yanked.
+ return (
+ has_allowed_hash, yank_value, binary_preference, candidate.version,
+ build_tag, pri,
+ )
+
+ def sort_best_candidate(
+ self,
+ candidates, # type: List[InstallationCandidate]
+ ):
+ # type: (...) -> Optional[InstallationCandidate]
+ """
+ Return the best candidate per the instance's sort order, or None if
+ no candidate is acceptable.
+ """
+ if not candidates:
+ return None
+
+ best_candidate = max(candidates, key=self._sort_key)
+
+ # Log a warning per PEP 592 if necessary before returning.
+ link = best_candidate.link
+ if link.is_yanked:
+ reason = link.yanked_reason or '<none given>'
+ msg = (
+ # Mark this as a unicode string to prevent
+ # "UnicodeEncodeError: 'ascii' codec can't encode character"
+ # in Python 2 when the reason contains non-ascii characters.
+ u'The candidate selected for download or install is a '
+ 'yanked version: {candidate}\n'
+ 'Reason for being yanked: {reason}'
+ ).format(candidate=best_candidate, reason=reason)
+ logger.warning(msg)
+
+ return best_candidate
+
+ def compute_best_candidate(
+ self,
+ candidates, # type: List[InstallationCandidate]
+ ):
+ # type: (...) -> BestCandidateResult
+ """
+ Compute and return a `BestCandidateResult` instance.
+ """
+ applicable_candidates = self.get_applicable_candidates(candidates)
+
+ best_candidate = self.sort_best_candidate(applicable_candidates)
+
+ return BestCandidateResult(
+ candidates,
+ applicable_candidates=applicable_candidates,
+ best_candidate=best_candidate,
+ )
+
+
+class PackageFinder(object):
+ """This finds packages.
+
+ This is meant to match easy_install's technique for looking for
+ packages, by reading pages and looking for appropriate links.
+ """
+
+ def __init__(
+ self,
+ link_collector, # type: LinkCollector
+ target_python, # type: TargetPython
+ allow_yanked, # type: bool
+ format_control=None, # type: Optional[FormatControl]
+ candidate_prefs=None, # type: CandidatePreferences
+ ignore_requires_python=None, # type: Optional[bool]
+ ):
+ # type: (...) -> None
+ """
+ This constructor is primarily meant to be used by the create() class
+ method and from tests.
+
+ :param format_control: A FormatControl object, used to control
+ the selection of source packages / binary packages when consulting
+ the index and links.
+ :param candidate_prefs: Options to use when creating a
+ CandidateEvaluator object.
+ """
+ if candidate_prefs is None:
+ candidate_prefs = CandidatePreferences()
+
+ format_control = format_control or FormatControl(set(), set())
+
+ self._allow_yanked = allow_yanked
+ self._candidate_prefs = candidate_prefs
+ self._ignore_requires_python = ignore_requires_python
+ self._link_collector = link_collector
+ self._target_python = target_python
+
+ self.format_control = format_control
+
+ # These are boring links that have already been logged somehow.
+ self._logged_links = set() # type: Set[Link]
+
+ # Don't include an allow_yanked default value to make sure each call
+ # site considers whether yanked releases are allowed. This also causes
+ # that decision to be made explicit in the calling code, which helps
+ # people when reading the code.
+ @classmethod
+ def create(
+ cls,
+ link_collector, # type: LinkCollector
+ selection_prefs, # type: SelectionPreferences
+ target_python=None, # type: Optional[TargetPython]
+ ):
+ # type: (...) -> PackageFinder
+ """Create a PackageFinder.
+
+ :param selection_prefs: The candidate selection preferences, as a
+ SelectionPreferences object.
+ :param target_python: The target Python interpreter to use when
+ checking compatibility. If None (the default), a TargetPython
+ object will be constructed from the running Python.
+ """
+ if target_python is None:
+ target_python = TargetPython()
+
+ candidate_prefs = CandidatePreferences(
+ prefer_binary=selection_prefs.prefer_binary,
+ allow_all_prereleases=selection_prefs.allow_all_prereleases,
+ )
+
+ return cls(
+ candidate_prefs=candidate_prefs,
+ link_collector=link_collector,
+ target_python=target_python,
+ allow_yanked=selection_prefs.allow_yanked,
+ format_control=selection_prefs.format_control,
+ ignore_requires_python=selection_prefs.ignore_requires_python,
+ )
+
+ @property
+ def search_scope(self):
+ # type: () -> SearchScope
+ return self._link_collector.search_scope
+
+ @search_scope.setter
+ def search_scope(self, search_scope):
+ # type: (SearchScope) -> None
+ self._link_collector.search_scope = search_scope
+
+ @property
+ def find_links(self):
+ # type: () -> List[str]
+ return self._link_collector.find_links
+
+ @property
+ def index_urls(self):
+ # type: () -> List[str]
+ return self.search_scope.index_urls
+
+ @property
+ def trusted_hosts(self):
+ # type: () -> Iterable[str]
+ for host_port in self._link_collector.session.pip_trusted_origins:
+ yield build_netloc(*host_port)
+
+ @property
+ def allow_all_prereleases(self):
+ # type: () -> bool
+ return self._candidate_prefs.allow_all_prereleases
+
+ def set_allow_all_prereleases(self):
+ # type: () -> None
+ self._candidate_prefs.allow_all_prereleases = True
+
+ def make_link_evaluator(self, project_name):
+ # type: (str) -> LinkEvaluator
+ canonical_name = canonicalize_name(project_name)
+ formats = self.format_control.get_allowed_formats(canonical_name)
+
+ return LinkEvaluator(
+ project_name=project_name,
+ canonical_name=canonical_name,
+ formats=formats,
+ target_python=self._target_python,
+ allow_yanked=self._allow_yanked,
+ ignore_requires_python=self._ignore_requires_python,
+ )
+
+ def _sort_links(self, links):
+ # type: (Iterable[Link]) -> List[Link]
+ """
+ Returns elements of links in order, non-egg links first, egg links
+ second, while eliminating duplicates
+ """
+ eggs, no_eggs = [], []
+ seen = set() # type: Set[Link]
+ for link in links:
+ if link not in seen:
+ seen.add(link)
+ if link.egg_fragment:
+ eggs.append(link)
+ else:
+ no_eggs.append(link)
+ return no_eggs + eggs
+
+ def _log_skipped_link(self, link, reason):
+ # type: (Link, Text) -> None
+ if link not in self._logged_links:
+ # Mark this as a unicode string to prevent "UnicodeEncodeError:
+ # 'ascii' codec can't encode character" in Python 2 when
+ # the reason contains non-ascii characters.
+ # Also, put the link at the end so the reason is more visible
+ # and because the link string is usually very long.
+ logger.debug(u'Skipping link: %s: %s', reason, link)
+ self._logged_links.add(link)
+
+ def get_install_candidate(self, link_evaluator, link):
+ # type: (LinkEvaluator, Link) -> Optional[InstallationCandidate]
+ """
+ If the link is a candidate for install, convert it to an
+ InstallationCandidate and return it. Otherwise, return None.
+ """
+ is_candidate, result = link_evaluator.evaluate_link(link)
+ if not is_candidate:
+ if result:
+ self._log_skipped_link(link, reason=result)
+ return None
+
+ return InstallationCandidate(
+ name=link_evaluator.project_name,
+ link=link,
+ # Convert the Text result to str since InstallationCandidate
+ # accepts str.
+ version=str(result),
+ )
+
+ def evaluate_links(self, link_evaluator, links):
+ # type: (LinkEvaluator, Iterable[Link]) -> List[InstallationCandidate]
+ """
+ Convert links that are candidates to InstallationCandidate objects.
+ """
+ candidates = []
+ for link in self._sort_links(links):
+ candidate = self.get_install_candidate(link_evaluator, link)
+ if candidate is not None:
+ candidates.append(candidate)
+
+ return candidates
+
+ def process_project_url(self, project_url, link_evaluator):
+ # type: (Link, LinkEvaluator) -> List[InstallationCandidate]
+ logger.debug(
+ 'Fetching project page and analyzing links: %s', project_url,
+ )
+ html_page = self._link_collector.fetch_page(project_url)
+ if html_page is None:
+ return []
+
+ page_links = list(parse_links(html_page))
+
+ with indent_log():
+ package_links = self.evaluate_links(
+ link_evaluator,
+ links=page_links,
+ )
+
+ return package_links
+
+ def find_all_candidates(self, project_name):
+ # type: (str) -> List[InstallationCandidate]
+ """Find all available InstallationCandidate for project_name
+
+ This checks index_urls and find_links.
+ All versions found are returned as an InstallationCandidate list.
+
+ See LinkEvaluator.evaluate_link() for details on which files
+ are accepted.
+ """
+ collected_links = self._link_collector.collect_links(project_name)
+
+ link_evaluator = self.make_link_evaluator(project_name)
+
+ find_links_versions = self.evaluate_links(
+ link_evaluator,
+ links=collected_links.find_links,
+ )
+
+ page_versions = []
+ for project_url in collected_links.project_urls:
+ package_links = self.process_project_url(
+ project_url, link_evaluator=link_evaluator,
+ )
+ page_versions.extend(package_links)
+
+ file_versions = self.evaluate_links(
+ link_evaluator,
+ links=collected_links.files,
+ )
+ if file_versions:
+ file_versions.sort(reverse=True)
+ logger.debug(
+ 'Local files found: %s',
+ ', '.join([
+ url_to_path(candidate.link.url)
+ for candidate in file_versions
+ ])
+ )
+
+ # This is an intentional priority ordering
+ return file_versions + find_links_versions + page_versions
+
+ def make_candidate_evaluator(
+ self,
+ project_name, # type: str
+ specifier=None, # type: Optional[specifiers.BaseSpecifier]
+ hashes=None, # type: Optional[Hashes]
+ ):
+ # type: (...) -> CandidateEvaluator
+ """Create a CandidateEvaluator object to use.
+ """
+ candidate_prefs = self._candidate_prefs
+ return CandidateEvaluator.create(
+ project_name=project_name,
+ target_python=self._target_python,
+ prefer_binary=candidate_prefs.prefer_binary,
+ allow_all_prereleases=candidate_prefs.allow_all_prereleases,
+ specifier=specifier,
+ hashes=hashes,
+ )
+
+ def find_best_candidate(
+ self,
+ project_name, # type: str
+ specifier=None, # type: Optional[specifiers.BaseSpecifier]
+ hashes=None, # type: Optional[Hashes]
+ ):
+ # type: (...) -> BestCandidateResult
+ """Find matches for the given project and specifier.
+
+ :param specifier: An optional object implementing `filter`
+ (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
+ versions.
+
+ :return: A `BestCandidateResult` instance.
+ """
+ candidates = self.find_all_candidates(project_name)
+ candidate_evaluator = self.make_candidate_evaluator(
+ project_name=project_name,
+ specifier=specifier,
+ hashes=hashes,
+ )
+ return candidate_evaluator.compute_best_candidate(candidates)
+
+ def find_requirement(self, req, upgrade):
+ # type: (InstallRequirement, bool) -> Optional[Link]
+ """Try to find a Link matching req
+
+ Expects req, an InstallRequirement and upgrade, a boolean
+ Returns a Link if found,
+ Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
+ """
+ hashes = req.hashes(trust_internet=False)
+ best_candidate_result = self.find_best_candidate(
+ req.name, specifier=req.specifier, hashes=hashes,
+ )
+ best_candidate = best_candidate_result.best_candidate
+
+ installed_version = None # type: Optional[_BaseVersion]
+ if req.satisfied_by is not None:
+ installed_version = parse_version(req.satisfied_by.version)
+
+ def _format_versions(cand_iter):
+ # type: (Iterable[InstallationCandidate]) -> str
+ # This repeated parse_version and str() conversion is needed to
+ # handle different vendoring sources from pip and pkg_resources.
+ # If we stop using the pkg_resources provided specifier and start
+ # using our own, we can drop the cast to str().
+ return ", ".join(sorted(
+ {str(c.version) for c in cand_iter},
+ key=parse_version,
+ )) or "none"
+
+ if installed_version is None and best_candidate is None:
+ logger.critical(
+ 'Could not find a version that satisfies the requirement %s '
+ '(from versions: %s)',
+ req,
+ _format_versions(best_candidate_result.iter_all()),
+ )
+
+ raise DistributionNotFound(
+ 'No matching distribution found for %s' % req
+ )
+
+ best_installed = False
+ if installed_version and (
+ best_candidate is None or
+ best_candidate.version <= installed_version):
+ best_installed = True
+
+ if not upgrade and installed_version is not None:
+ if best_installed:
+ logger.debug(
+ 'Existing installed version (%s) is most up-to-date and '
+ 'satisfies requirement',
+ installed_version,
+ )
+ else:
+ logger.debug(
+ 'Existing installed version (%s) satisfies requirement '
+ '(most up-to-date version is %s)',
+ installed_version,
+ best_candidate.version,
+ )
+ return None
+
+ if best_installed:
+ # We have an existing version, and its the best version
+ logger.debug(
+ 'Installed version (%s) is most up-to-date (past versions: '
+ '%s)',
+ installed_version,
+ _format_versions(best_candidate_result.iter_applicable()),
+ )
+ raise BestVersionAlreadyInstalled
+
+ logger.debug(
+ 'Using version %s (newest of versions: %s)',
+ best_candidate.version,
+ _format_versions(best_candidate_result.iter_applicable()),
+ )
+ return best_candidate.link
+
+
+def _find_name_version_sep(fragment, canonical_name):
+ # type: (str, str) -> int
+ """Find the separator's index based on the package's canonical name.
+
+ :param fragment: A <package>+<version> filename "fragment" (stem) or
+ egg fragment.
+ :param canonical_name: The package's canonical name.
+
+ This function is needed since the canonicalized name does not necessarily
+ have the same length as the egg info's name part. An example::
+
+ >>> fragment = 'foo__bar-1.0'
+ >>> canonical_name = 'foo-bar'
+ >>> _find_name_version_sep(fragment, canonical_name)
+ 8
+ """
+ # Project name and version must be separated by one single dash. Find all
+ # occurrences of dashes; if the string in front of it matches the canonical
+ # name, this is the one separating the name and version parts.
+ for i, c in enumerate(fragment):
+ if c != "-":
+ continue
+ if canonicalize_name(fragment[:i]) == canonical_name:
+ return i
+ raise ValueError("{} does not match {}".format(fragment, canonical_name))
+
+
+def _extract_version_from_fragment(fragment, canonical_name):
+ # type: (str, str) -> Optional[str]
+ """Parse the version string from a <package>+<version> filename
+ "fragment" (stem) or egg fragment.
+
+ :param fragment: The string to parse. E.g. foo-2.1
+ :param canonical_name: The canonicalized name of the package this
+ belongs to.
+ """
+ try:
+ version_start = _find_name_version_sep(fragment, canonical_name) + 1
+ except ValueError:
+ return None
+ version = fragment[version_start:]
+ if not version:
+ return None
+ return version
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/legacy_resolve.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/legacy_resolve.py
new file mode 100644
index 0000000000000000000000000000000000000000..ca269121b60c1b792fbc1a08000c4f2e4503e706
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/legacy_resolve.py
@@ -0,0 +1,430 @@
+"""Dependency Resolution
+
+The dependency resolution in pip is performed as follows:
+
+for top-level requirements:
+ a. only one spec allowed per project, regardless of conflicts or not.
+ otherwise a "double requirement" exception is raised
+ b. they override sub-dependency requirements.
+for sub-dependencies
+ a. "first found, wins" (where the order is breadth first)
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+# mypy: disallow-untyped-defs=False
+
+import logging
+import sys
+from collections import defaultdict
+from itertools import chain
+
+from pip._vendor.packaging import specifiers
+
+from pip._internal.exceptions import (
+ BestVersionAlreadyInstalled,
+ DistributionNotFound,
+ HashError,
+ HashErrors,
+ UnsupportedPythonVersion,
+)
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import dist_in_usersite, normalize_version_info
+from pip._internal.utils.packaging import (
+ check_requires_python,
+ get_requires_python,
+)
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Callable, DefaultDict, List, Optional, Set, Tuple
+ from pip._vendor import pkg_resources
+
+ from pip._internal.distributions import AbstractDistribution
+ from pip._internal.index.package_finder import PackageFinder
+ from pip._internal.operations.prepare import RequirementPreparer
+ from pip._internal.req.req_install import InstallRequirement
+ from pip._internal.req.req_set import RequirementSet
+
+ InstallRequirementProvider = Callable[
+ [str, InstallRequirement], InstallRequirement
+ ]
+ DiscoveredDependencies = DefaultDict[str, List[InstallRequirement]]
+
+logger = logging.getLogger(__name__)
+
+
+def _check_dist_requires_python(
+ dist, # type: pkg_resources.Distribution
+ version_info, # type: Tuple[int, int, int]
+ ignore_requires_python=False, # type: bool
+):
+ # type: (...) -> None
+ """
+ Check whether the given Python version is compatible with a distribution's
+ "Requires-Python" value.
+
+ :param version_info: A 3-tuple of ints representing the Python
+ major-minor-micro version to check.
+ :param ignore_requires_python: Whether to ignore the "Requires-Python"
+ value if the given Python version isn't compatible.
+
+ :raises UnsupportedPythonVersion: When the given Python version isn't
+ compatible.
+ """
+ requires_python = get_requires_python(dist)
+ try:
+ is_compatible = check_requires_python(
+ requires_python, version_info=version_info,
+ )
+ except specifiers.InvalidSpecifier as exc:
+ logger.warning(
+ "Package %r has an invalid Requires-Python: %s",
+ dist.project_name, exc,
+ )
+ return
+
+ if is_compatible:
+ return
+
+ version = '.'.join(map(str, version_info))
+ if ignore_requires_python:
+ logger.debug(
+ 'Ignoring failed Requires-Python check for package %r: '
+ '%s not in %r',
+ dist.project_name, version, requires_python,
+ )
+ return
+
+ raise UnsupportedPythonVersion(
+ 'Package {!r} requires a different Python: {} not in {!r}'.format(
+ dist.project_name, version, requires_python,
+ ))
+
+
+class Resolver(object):
+ """Resolves which packages need to be installed/uninstalled to perform \
+ the requested operation without breaking the requirements of any package.
+ """
+
+ _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
+
+ def __init__(
+ self,
+ preparer, # type: RequirementPreparer
+ finder, # type: PackageFinder
+ make_install_req, # type: InstallRequirementProvider
+ use_user_site, # type: bool
+ ignore_dependencies, # type: bool
+ ignore_installed, # type: bool
+ ignore_requires_python, # type: bool
+ force_reinstall, # type: bool
+ upgrade_strategy, # type: str
+ py_version_info=None, # type: Optional[Tuple[int, ...]]
+ ):
+ # type: (...) -> None
+ super(Resolver, self).__init__()
+ assert upgrade_strategy in self._allowed_strategies
+
+ if py_version_info is None:
+ py_version_info = sys.version_info[:3]
+ else:
+ py_version_info = normalize_version_info(py_version_info)
+
+ self._py_version_info = py_version_info
+
+ self.preparer = preparer
+ self.finder = finder
+
+ self.upgrade_strategy = upgrade_strategy
+ self.force_reinstall = force_reinstall
+ self.ignore_dependencies = ignore_dependencies
+ self.ignore_installed = ignore_installed
+ self.ignore_requires_python = ignore_requires_python
+ self.use_user_site = use_user_site
+ self._make_install_req = make_install_req
+
+ self._discovered_dependencies = \
+ defaultdict(list) # type: DiscoveredDependencies
+
+ def resolve(self, requirement_set):
+ # type: (RequirementSet) -> None
+ """Resolve what operations need to be done
+
+ As a side-effect of this method, the packages (and their dependencies)
+ are downloaded, unpacked and prepared for installation. This
+ preparation is done by ``pip.operations.prepare``.
+
+ Once PyPI has static dependency metadata available, it would be
+ possible to move the preparation to become a step separated from
+ dependency resolution.
+ """
+ # If any top-level requirement has a hash specified, enter
+ # hash-checking mode, which requires hashes from all.
+ root_reqs = (
+ requirement_set.unnamed_requirements +
+ list(requirement_set.requirements.values())
+ )
+
+ # Actually prepare the files, and collect any exceptions. Most hash
+ # exceptions cannot be checked ahead of time, because
+ # req.populate_link() needs to be called before we can make decisions
+ # based on link type.
+ discovered_reqs = [] # type: List[InstallRequirement]
+ hash_errors = HashErrors()
+ for req in chain(root_reqs, discovered_reqs):
+ try:
+ discovered_reqs.extend(self._resolve_one(requirement_set, req))
+ except HashError as exc:
+ exc.req = req
+ hash_errors.append(exc)
+
+ if hash_errors:
+ raise hash_errors
+
+ def _is_upgrade_allowed(self, req):
+ # type: (InstallRequirement) -> bool
+ if self.upgrade_strategy == "to-satisfy-only":
+ return False
+ elif self.upgrade_strategy == "eager":
+ return True
+ else:
+ assert self.upgrade_strategy == "only-if-needed"
+ return req.is_direct
+
+ def _set_req_to_reinstall(self, req):
+ # type: (InstallRequirement) -> None
+ """
+ Set a requirement to be installed.
+ """
+ # Don't uninstall the conflict if doing a user install and the
+ # conflict is not a user install.
+ if not self.use_user_site or dist_in_usersite(req.satisfied_by):
+ req.should_reinstall = True
+ req.satisfied_by = None
+
+ def _check_skip_installed(self, req_to_install):
+ # type: (InstallRequirement) -> Optional[str]
+ """Check if req_to_install should be skipped.
+
+ This will check if the req is installed, and whether we should upgrade
+ or reinstall it, taking into account all the relevant user options.
+
+ After calling this req_to_install will only have satisfied_by set to
+ None if the req_to_install is to be upgraded/reinstalled etc. Any
+ other value will be a dist recording the current thing installed that
+ satisfies the requirement.
+
+ Note that for vcs urls and the like we can't assess skipping in this
+ routine - we simply identify that we need to pull the thing down,
+ then later on it is pulled down and introspected to assess upgrade/
+ reinstalls etc.
+
+ :return: A text reason for why it was skipped, or None.
+ """
+ if self.ignore_installed:
+ return None
+
+ req_to_install.check_if_exists(self.use_user_site)
+ if not req_to_install.satisfied_by:
+ return None
+
+ if self.force_reinstall:
+ self._set_req_to_reinstall(req_to_install)
+ return None
+
+ if not self._is_upgrade_allowed(req_to_install):
+ if self.upgrade_strategy == "only-if-needed":
+ return 'already satisfied, skipping upgrade'
+ return 'already satisfied'
+
+ # Check for the possibility of an upgrade. For link-based
+ # requirements we have to pull the tree down and inspect to assess
+ # the version #, so it's handled way down.
+ if not req_to_install.link:
+ try:
+ self.finder.find_requirement(req_to_install, upgrade=True)
+ except BestVersionAlreadyInstalled:
+ # Then the best version is installed.
+ return 'already up-to-date'
+ except DistributionNotFound:
+ # No distribution found, so we squash the error. It will
+ # be raised later when we re-try later to do the install.
+ # Why don't we just raise here?
+ pass
+
+ self._set_req_to_reinstall(req_to_install)
+ return None
+
+ def _get_abstract_dist_for(self, req):
+ # type: (InstallRequirement) -> AbstractDistribution
+ """Takes a InstallRequirement and returns a single AbstractDist \
+ representing a prepared variant of the same.
+ """
+ if req.editable:
+ return self.preparer.prepare_editable_requirement(req)
+
+ # satisfied_by is only evaluated by calling _check_skip_installed,
+ # so it must be None here.
+ assert req.satisfied_by is None
+ skip_reason = self._check_skip_installed(req)
+
+ if req.satisfied_by:
+ return self.preparer.prepare_installed_requirement(
+ req, skip_reason
+ )
+
+ upgrade_allowed = self._is_upgrade_allowed(req)
+
+ # We eagerly populate the link, since that's our "legacy" behavior.
+ require_hashes = self.preparer.require_hashes
+ req.populate_link(self.finder, upgrade_allowed, require_hashes)
+ abstract_dist = self.preparer.prepare_linked_requirement(req)
+
+ # NOTE
+ # The following portion is for determining if a certain package is
+ # going to be re-installed/upgraded or not and reporting to the user.
+ # This should probably get cleaned up in a future refactor.
+
+ # req.req is only avail after unpack for URL
+ # pkgs repeat check_if_exists to uninstall-on-upgrade
+ # (#14)
+ if not self.ignore_installed:
+ req.check_if_exists(self.use_user_site)
+
+ if req.satisfied_by:
+ should_modify = (
+ self.upgrade_strategy != "to-satisfy-only" or
+ self.force_reinstall or
+ self.ignore_installed or
+ req.link.scheme == 'file'
+ )
+ if should_modify:
+ self._set_req_to_reinstall(req)
+ else:
+ logger.info(
+ 'Requirement already satisfied (use --upgrade to upgrade):'
+ ' %s', req,
+ )
+
+ return abstract_dist
+
+ def _resolve_one(
+ self,
+ requirement_set, # type: RequirementSet
+ req_to_install, # type: InstallRequirement
+ ):
+ # type: (...) -> List[InstallRequirement]
+ """Prepare a single requirements file.
+
+ :return: A list of additional InstallRequirements to also install.
+ """
+ # Tell user what we are doing for this requirement:
+ # obtain (editable), skipping, processing (local url), collecting
+ # (remote url or package name)
+ if req_to_install.constraint or req_to_install.prepared:
+ return []
+
+ req_to_install.prepared = True
+
+ # register tmp src for cleanup in case something goes wrong
+ requirement_set.reqs_to_cleanup.append(req_to_install)
+
+ abstract_dist = self._get_abstract_dist_for(req_to_install)
+
+ # Parse and return dependencies
+ dist = abstract_dist.get_pkg_resources_distribution()
+ # This will raise UnsupportedPythonVersion if the given Python
+ # version isn't compatible with the distribution's Requires-Python.
+ _check_dist_requires_python(
+ dist, version_info=self._py_version_info,
+ ignore_requires_python=self.ignore_requires_python,
+ )
+
+ more_reqs = [] # type: List[InstallRequirement]
+
+ def add_req(subreq, extras_requested):
+ sub_install_req = self._make_install_req(
+ str(subreq),
+ req_to_install,
+ )
+ parent_req_name = req_to_install.name
+ to_scan_again, add_to_parent = requirement_set.add_requirement(
+ sub_install_req,
+ parent_req_name=parent_req_name,
+ extras_requested=extras_requested,
+ )
+ if parent_req_name and add_to_parent:
+ self._discovered_dependencies[parent_req_name].append(
+ add_to_parent
+ )
+ more_reqs.extend(to_scan_again)
+
+ with indent_log():
+ # We add req_to_install before its dependencies, so that we
+ # can refer to it when adding dependencies.
+ if not requirement_set.has_requirement(req_to_install.name):
+ # 'unnamed' requirements will get added here
+ # 'unnamed' requirements can only come from being directly
+ # provided by the user.
+ assert req_to_install.is_direct
+ requirement_set.add_requirement(
+ req_to_install, parent_req_name=None,
+ )
+
+ if not self.ignore_dependencies:
+ if req_to_install.extras:
+ logger.debug(
+ "Installing extra requirements: %r",
+ ','.join(req_to_install.extras),
+ )
+ missing_requested = sorted(
+ set(req_to_install.extras) - set(dist.extras)
+ )
+ for missing in missing_requested:
+ logger.warning(
+ '%s does not provide the extra \'%s\'',
+ dist, missing
+ )
+
+ available_requested = sorted(
+ set(dist.extras) & set(req_to_install.extras)
+ )
+ for subreq in dist.requires(available_requested):
+ add_req(subreq, extras_requested=available_requested)
+
+ if not req_to_install.editable and not req_to_install.satisfied_by:
+ # XXX: --no-install leads this to report 'Successfully
+ # downloaded' for only non-editable reqs, even though we took
+ # action on them.
+ requirement_set.successfully_downloaded.append(req_to_install)
+
+ return more_reqs
+
+ def get_installation_order(self, req_set):
+ # type: (RequirementSet) -> List[InstallRequirement]
+ """Create the installation order.
+
+ The installation order is topological - requirements are installed
+ before the requiring thing. We break cycles at an arbitrary point,
+ and make no other guarantees.
+ """
+ # The current implementation, which we may change at any point
+ # installs the user specified things in the order given, except when
+ # dependencies must come earlier to achieve topological order.
+ order = []
+ ordered_reqs = set() # type: Set[InstallRequirement]
+
+ def schedule(req):
+ if req.satisfied_by or req in ordered_reqs:
+ return
+ if req.constraint:
+ return
+ ordered_reqs.add(req)
+ for dep in self._discovered_dependencies[req.name]:
+ schedule(dep)
+ order.append(req)
+
+ for install_req in req_set.requirements.values():
+ schedule(install_req)
+ return order
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/locations.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/locations.py
new file mode 100644
index 0000000000000000000000000000000000000000..0c115531911af77f5eab69775c7cdd8e43b47e1d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/locations.py
@@ -0,0 +1,194 @@
+"""Locations where we look for configs, install stuff, etc"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+from __future__ import absolute_import
+
+import os
+import os.path
+import platform
+import site
+import sys
+import sysconfig
+from distutils import sysconfig as distutils_sysconfig
+from distutils.command.install import SCHEME_KEYS # type: ignore
+from distutils.command.install import install as distutils_install_command
+
+from pip._internal.models.scheme import Scheme
+from pip._internal.utils import appdirs
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING, cast
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+if MYPY_CHECK_RUNNING:
+ from typing import Dict, List, Optional, Union
+
+ from distutils.cmd import Command as DistutilsCommand
+
+
+# Application Directories
+USER_CACHE_DIR = appdirs.user_cache_dir("pip")
+
+
+def get_major_minor_version():
+ # type: () -> str
+ """
+ Return the major-minor version of the current Python as a string, e.g.
+ "3.7" or "3.10".
+ """
+ return '{}.{}'.format(*sys.version_info)
+
+
+def get_src_prefix():
+ # type: () -> str
+ if running_under_virtualenv():
+ src_prefix = os.path.join(sys.prefix, 'src')
+ else:
+ # FIXME: keep src in cwd for now (it is not a temporary folder)
+ try:
+ src_prefix = os.path.join(os.getcwd(), 'src')
+ except OSError:
+ # In case the current working directory has been renamed or deleted
+ sys.exit(
+ "The folder you are executing pip from can no longer be found."
+ )
+
+ # under macOS + virtualenv sys.prefix is not properly resolved
+ # it is something like /path/to/python/bin/..
+ return os.path.abspath(src_prefix)
+
+
+# FIXME doesn't account for venv linked to global site-packages
+
+site_packages = sysconfig.get_path("purelib") # type: Optional[str]
+
+# This is because of a bug in PyPy's sysconfig module, see
+# https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths
+# for more information.
+if platform.python_implementation().lower() == "pypy":
+ site_packages = distutils_sysconfig.get_python_lib()
+try:
+ # Use getusersitepackages if this is present, as it ensures that the
+ # value is initialised properly.
+ user_site = site.getusersitepackages()
+except AttributeError:
+ user_site = site.USER_SITE
+
+if WINDOWS:
+ bin_py = os.path.join(sys.prefix, 'Scripts')
+ bin_user = os.path.join(user_site, 'Scripts')
+ # buildout uses 'bin' on Windows too?
+ if not os.path.exists(bin_py):
+ bin_py = os.path.join(sys.prefix, 'bin')
+ bin_user = os.path.join(user_site, 'bin')
+else:
+ bin_py = os.path.join(sys.prefix, 'bin')
+ bin_user = os.path.join(user_site, 'bin')
+
+ # Forcing to use /usr/local/bin for standard macOS framework installs
+ # Also log to ~/Library/Logs/ for use with the Console.app log viewer
+ if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
+ bin_py = '/usr/local/bin'
+
+
+def distutils_scheme(
+ dist_name, user=False, home=None, root=None, isolated=False, prefix=None
+):
+ # type:(str, bool, str, str, bool, str) -> Dict[str, str]
+ """
+ Return a distutils install scheme
+ """
+ from distutils.dist import Distribution
+
+ dist_args = {'name': dist_name} # type: Dict[str, Union[str, List[str]]]
+ if isolated:
+ dist_args["script_args"] = ["--no-user-cfg"]
+
+ d = Distribution(dist_args)
+ d.parse_config_files()
+ obj = None # type: Optional[DistutilsCommand]
+ obj = d.get_command_obj('install', create=True)
+ assert obj is not None
+ i = cast(distutils_install_command, obj)
+ # NOTE: setting user or home has the side-effect of creating the home dir
+ # or user base for installations during finalize_options()
+ # ideally, we'd prefer a scheme class that has no side-effects.
+ assert not (user and prefix), "user={} prefix={}".format(user, prefix)
+ assert not (home and prefix), "home={} prefix={}".format(home, prefix)
+ i.user = user or i.user
+ if user or home:
+ i.prefix = ""
+ i.prefix = prefix or i.prefix
+ i.home = home or i.home
+ i.root = root or i.root
+ i.finalize_options()
+
+ scheme = {}
+ for key in SCHEME_KEYS:
+ scheme[key] = getattr(i, 'install_' + key)
+
+ # install_lib specified in setup.cfg should install *everything*
+ # into there (i.e. it takes precedence over both purelib and
+ # platlib). Note, i.install_lib is *always* set after
+ # finalize_options(); we only want to override here if the user
+ # has explicitly requested it hence going back to the config
+ if 'install_lib' in d.get_option_dict('install'):
+ scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
+
+ if running_under_virtualenv():
+ scheme['headers'] = os.path.join(
+ sys.prefix,
+ 'include',
+ 'site',
+ 'python{}'.format(get_major_minor_version()),
+ dist_name,
+ )
+
+ if root is not None:
+ path_no_drive = os.path.splitdrive(
+ os.path.abspath(scheme["headers"]))[1]
+ scheme["headers"] = os.path.join(
+ root,
+ path_no_drive[1:],
+ )
+
+ return scheme
+
+
+def get_scheme(
+ dist_name, # type: str
+ user=False, # type: bool
+ home=None, # type: Optional[str]
+ root=None, # type: Optional[str]
+ isolated=False, # type: bool
+ prefix=None, # type: Optional[str]
+):
+ # type: (...) -> Scheme
+ """
+ Get the "scheme" corresponding to the input parameters. The distutils
+ documentation provides the context for the available schemes:
+ https://docs.python.org/3/install/index.html#alternate-installation
+
+ :param dist_name: the name of the package to retrieve the scheme for, used
+ in the headers scheme path
+ :param user: indicates to use the "user" scheme
+ :param home: indicates to use the "home" scheme and provides the base
+ directory for the same
+ :param root: root under which other directories are re-based
+ :param isolated: equivalent to --no-user-cfg, i.e. do not consider
+ ~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for
+ scheme paths
+ :param prefix: indicates to use the "prefix" scheme and provides the
+ base directory for the same
+ """
+ scheme = distutils_scheme(
+ dist_name, user, home, root, isolated, prefix
+ )
+ return Scheme(
+ platlib=scheme["platlib"],
+ purelib=scheme["purelib"],
+ headers=scheme["headers"],
+ scripts=scheme["scripts"],
+ data=scheme["data"],
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/main.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/main.py
new file mode 100644
index 0000000000000000000000000000000000000000..3208d5b8820eadf8a1ebe4851c984c6033c289bd
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/main.py
@@ -0,0 +1,16 @@
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, List
+
+
+def main(args=None):
+ # type: (Optional[List[str]]) -> int
+ """This is preserved for old console scripts that may still be referencing
+ it.
+
+ For additional details, see https://github.com/pypa/pip/issues/7498.
+ """
+ from pip._internal.utils.entrypoints import _wrapper
+
+ return _wrapper(args)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__init__.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..7855226e4b500142deef8fb247cd33a9a991d122
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__init__.py
@@ -0,0 +1,2 @@
+"""A package that contains models that represent entities.
+"""
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..76c6d4fb813c7347f64b55fbce6f61ea3c962143
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/candidate.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/candidate.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8efca6b30b5e9b1c8d05e39c5c5639cf632ced35
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/candidate.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/format_control.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/format_control.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b6616bc996cef1fa6ced62bc262c15252ddd3348
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/format_control.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/index.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/index.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2378b1ba3c8e92efd0202ec11052ca9f35eb2311
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/index.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/link.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/link.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2a6df37c1da43327ddfc330a198817161a4b3c91
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/link.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/scheme.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/scheme.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..61a64344c93a7ee3c3b5370e2b13a5bb87dccf26
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/scheme.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fb782b71c72807a0091a3d69f5148a80e4c57f8f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6c91930ce3c13bca4e37f80269f34203069f1ab7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/target_python.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/target_python.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..829fb14f6236475892ce80384b4f877f9ff6613f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/target_python.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/wheel.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/wheel.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..89730572b5f2015ee046542e101a07f04676245b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/__pycache__/wheel.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/candidate.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/candidate.py
new file mode 100644
index 0000000000000000000000000000000000000000..1dc1a576eea788c23f5722bbb8e10ae950ef38bd
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/candidate.py
@@ -0,0 +1,36 @@
+from pip._vendor.packaging.version import parse as parse_version
+
+from pip._internal.utils.models import KeyBasedCompareMixin
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from pip._vendor.packaging.version import _BaseVersion
+ from pip._internal.models.link import Link
+
+
+class InstallationCandidate(KeyBasedCompareMixin):
+ """Represents a potential "candidate" for installation.
+ """
+
+ def __init__(self, name, version, link):
+ # type: (str, str, Link) -> None
+ self.name = name
+ self.version = parse_version(version) # type: _BaseVersion
+ self.link = link
+
+ super(InstallationCandidate, self).__init__(
+ key=(self.name, self.version, self.link),
+ defining_class=InstallationCandidate
+ )
+
+ def __repr__(self):
+ # type: () -> str
+ return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
+ self.name, self.version, self.link,
+ )
+
+ def __str__(self):
+ # type: () -> str
+ return '{!r} candidate (version {} at {})'.format(
+ self.name, self.version, self.link,
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/format_control.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/format_control.py
new file mode 100644
index 0000000000000000000000000000000000000000..2e13727ca006977f3fb2df30fd1a25bb1670cf3e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/format_control.py
@@ -0,0 +1,84 @@
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.exceptions import CommandError
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Set, FrozenSet
+
+
+class FormatControl(object):
+ """Helper for managing formats from which a package can be installed.
+ """
+
+ def __init__(self, no_binary=None, only_binary=None):
+ # type: (Optional[Set[str]], Optional[Set[str]]) -> None
+ if no_binary is None:
+ no_binary = set()
+ if only_binary is None:
+ only_binary = set()
+
+ self.no_binary = no_binary
+ self.only_binary = only_binary
+
+ def __eq__(self, other):
+ # type: (object) -> bool
+ return self.__dict__ == other.__dict__
+
+ def __ne__(self, other):
+ # type: (object) -> bool
+ return not self.__eq__(other)
+
+ def __repr__(self):
+ # type: () -> str
+ return "{}({}, {})".format(
+ self.__class__.__name__,
+ self.no_binary,
+ self.only_binary
+ )
+
+ @staticmethod
+ def handle_mutual_excludes(value, target, other):
+ # type: (str, Optional[Set[str]], Optional[Set[str]]) -> None
+ if value.startswith('-'):
+ raise CommandError(
+ "--no-binary / --only-binary option requires 1 argument."
+ )
+ new = value.split(',')
+ while ':all:' in new:
+ other.clear()
+ target.clear()
+ target.add(':all:')
+ del new[:new.index(':all:') + 1]
+ # Without a none, we want to discard everything as :all: covers it
+ if ':none:' not in new:
+ return
+ for name in new:
+ if name == ':none:':
+ target.clear()
+ continue
+ name = canonicalize_name(name)
+ other.discard(name)
+ target.add(name)
+
+ def get_allowed_formats(self, canonical_name):
+ # type: (str) -> FrozenSet[str]
+ result = {"binary", "source"}
+ if canonical_name in self.only_binary:
+ result.discard('source')
+ elif canonical_name in self.no_binary:
+ result.discard('binary')
+ elif ':all:' in self.only_binary:
+ result.discard('source')
+ elif ':all:' in self.no_binary:
+ result.discard('binary')
+ return frozenset(result)
+
+ def disallow_binaries(self):
+ # type: () -> None
+ self.handle_mutual_excludes(
+ ':all:', self.no_binary, self.only_binary,
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/index.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/index.py
new file mode 100644
index 0000000000000000000000000000000000000000..ead1efbda761ebed373700ce9e69797838c2b9d9
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/index.py
@@ -0,0 +1,31 @@
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+
+class PackageIndex(object):
+ """Represents a Package Index and provides easier access to endpoints
+ """
+
+ def __init__(self, url, file_storage_domain):
+ # type: (str, str) -> None
+ super(PackageIndex, self).__init__()
+ self.url = url
+ self.netloc = urllib_parse.urlsplit(url).netloc
+ self.simple_url = self._url_for_path('simple')
+ self.pypi_url = self._url_for_path('pypi')
+
+ # This is part of a temporary hack used to block installs of PyPI
+ # packages which depend on external urls only necessary until PyPI can
+ # block such packages themselves
+ self.file_storage_domain = file_storage_domain
+
+ def _url_for_path(self, path):
+ # type: (str) -> str
+ return urllib_parse.urljoin(self.url, path)
+
+
+PyPI = PackageIndex(
+ 'https://pypi.org/', file_storage_domain='files.pythonhosted.org'
+)
+TestPyPI = PackageIndex(
+ 'https://test.pypi.org/', file_storage_domain='test-files.pythonhosted.org'
+)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/link.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/link.py
new file mode 100644
index 0000000000000000000000000000000000000000..34fbcbfe7e4dcc6873288db8455890ce77405405
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/link.py
@@ -0,0 +1,227 @@
+import os
+import posixpath
+import re
+
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+from pip._internal.utils.filetypes import WHEEL_EXTENSION
+from pip._internal.utils.misc import (
+ redact_auth_from_url,
+ split_auth_from_netloc,
+ splitext,
+)
+from pip._internal.utils.models import KeyBasedCompareMixin
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.urls import path_to_url, url_to_path
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Text, Tuple, Union
+ from pip._internal.index.collector import HTMLPage
+ from pip._internal.utils.hashes import Hashes
+
+
+class Link(KeyBasedCompareMixin):
+ """Represents a parsed link from a Package Index's simple URL
+ """
+
+ def __init__(
+ self,
+ url, # type: str
+ comes_from=None, # type: Optional[Union[str, HTMLPage]]
+ requires_python=None, # type: Optional[str]
+ yanked_reason=None, # type: Optional[Text]
+ ):
+ # type: (...) -> None
+ """
+ :param url: url of the resource pointed to (href of the link)
+ :param comes_from: instance of HTMLPage where the link was found,
+ or string.
+ :param requires_python: String containing the `Requires-Python`
+ metadata field, specified in PEP 345. This may be specified by
+ a data-requires-python attribute in the HTML link tag, as
+ described in PEP 503.
+ :param yanked_reason: the reason the file has been yanked, if the
+ file has been yanked, or None if the file hasn't been yanked.
+ This is the value of the "data-yanked" attribute, if present, in
+ a simple repository HTML link. If the file has been yanked but
+ no reason was provided, this should be the empty string. See
+ PEP 592 for more information and the specification.
+ """
+
+ # url can be a UNC windows share
+ if url.startswith('\\\\'):
+ url = path_to_url(url)
+
+ self._parsed_url = urllib_parse.urlsplit(url)
+ # Store the url as a private attribute to prevent accidentally
+ # trying to set a new value.
+ self._url = url
+
+ self.comes_from = comes_from
+ self.requires_python = requires_python if requires_python else None
+ self.yanked_reason = yanked_reason
+
+ super(Link, self).__init__(key=url, defining_class=Link)
+
+ def __str__(self):
+ # type: () -> str
+ if self.requires_python:
+ rp = ' (requires-python:%s)' % self.requires_python
+ else:
+ rp = ''
+ if self.comes_from:
+ return '%s (from %s)%s' % (redact_auth_from_url(self._url),
+ self.comes_from, rp)
+ else:
+ return redact_auth_from_url(str(self._url))
+
+ def __repr__(self):
+ # type: () -> str
+ return '<Link %s>' % self
+
+ @property
+ def url(self):
+ # type: () -> str
+ return self._url
+
+ @property
+ def filename(self):
+ # type: () -> str
+ path = self.path.rstrip('/')
+ name = posixpath.basename(path)
+ if not name:
+ # Make sure we don't leak auth information if the netloc
+ # includes a username and password.
+ netloc, user_pass = split_auth_from_netloc(self.netloc)
+ return netloc
+
+ name = urllib_parse.unquote(name)
+ assert name, ('URL %r produced no filename' % self._url)
+ return name
+
+ @property
+ def file_path(self):
+ # type: () -> str
+ return url_to_path(self.url)
+
+ @property
+ def scheme(self):
+ # type: () -> str
+ return self._parsed_url.scheme
+
+ @property
+ def netloc(self):
+ # type: () -> str
+ """
+ This can contain auth information.
+ """
+ return self._parsed_url.netloc
+
+ @property
+ def path(self):
+ # type: () -> str
+ return urllib_parse.unquote(self._parsed_url.path)
+
+ def splitext(self):
+ # type: () -> Tuple[str, str]
+ return splitext(posixpath.basename(self.path.rstrip('/')))
+
+ @property
+ def ext(self):
+ # type: () -> str
+ return self.splitext()[1]
+
+ @property
+ def url_without_fragment(self):
+ # type: () -> str
+ scheme, netloc, path, query, fragment = self._parsed_url
+ return urllib_parse.urlunsplit((scheme, netloc, path, query, None))
+
+ _egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')
+
+ @property
+ def egg_fragment(self):
+ # type: () -> Optional[str]
+ match = self._egg_fragment_re.search(self._url)
+ if not match:
+ return None
+ return match.group(1)
+
+ _subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')
+
+ @property
+ def subdirectory_fragment(self):
+ # type: () -> Optional[str]
+ match = self._subdirectory_fragment_re.search(self._url)
+ if not match:
+ return None
+ return match.group(1)
+
+ _hash_re = re.compile(
+ r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
+ )
+
+ @property
+ def hash(self):
+ # type: () -> Optional[str]
+ match = self._hash_re.search(self._url)
+ if match:
+ return match.group(2)
+ return None
+
+ @property
+ def hash_name(self):
+ # type: () -> Optional[str]
+ match = self._hash_re.search(self._url)
+ if match:
+ return match.group(1)
+ return None
+
+ @property
+ def show_url(self):
+ # type: () -> str
+ return posixpath.basename(self._url.split('#', 1)[0].split('?', 1)[0])
+
+ @property
+ def is_file(self):
+ # type: () -> bool
+ return self.scheme == 'file'
+
+ def is_existing_dir(self):
+ # type: () -> bool
+ return self.is_file and os.path.isdir(self.file_path)
+
+ @property
+ def is_wheel(self):
+ # type: () -> bool
+ return self.ext == WHEEL_EXTENSION
+
+ @property
+ def is_vcs(self):
+ # type: () -> bool
+ from pip._internal.vcs import vcs
+
+ return self.scheme in vcs.all_schemes
+
+ @property
+ def is_yanked(self):
+ # type: () -> bool
+ return self.yanked_reason is not None
+
+ @property
+ def has_hash(self):
+ # type: () -> bool
+ return self.hash_name is not None
+
+ def is_hash_allowed(self, hashes):
+ # type: (Optional[Hashes]) -> bool
+ """
+ Return True if the link has a hash and it is allowed.
+ """
+ if hashes is None or not self.has_hash:
+ return False
+ # Assert non-None so mypy knows self.hash_name and self.hash are str.
+ assert self.hash_name is not None
+ assert self.hash is not None
+
+ return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/scheme.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/scheme.py
new file mode 100644
index 0000000000000000000000000000000000000000..af07b4078f997b5c6005c042ac178282c49fd5e7
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/scheme.py
@@ -0,0 +1,25 @@
+"""
+For types associated with installation schemes.
+
+For a general overview of available schemes and their context, see
+https://docs.python.org/3/install/index.html#alternate-installation.
+"""
+
+
+class Scheme(object):
+ """A Scheme holds paths which are used as the base directories for
+ artifacts associated with a Python package.
+ """
+ def __init__(
+ self,
+ platlib, # type: str
+ purelib, # type: str
+ headers, # type: str
+ scripts, # type: str
+ data, # type: str
+ ):
+ self.platlib = platlib
+ self.purelib = purelib
+ self.headers = headers
+ self.scripts = scripts
+ self.data = data
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/search_scope.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/search_scope.py
new file mode 100644
index 0000000000000000000000000000000000000000..138d1b6eedf8d5b58f25c821ac393408d1e73067
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/search_scope.py
@@ -0,0 +1,114 @@
+import itertools
+import logging
+import os
+import posixpath
+
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+from pip._internal.models.index import PyPI
+from pip._internal.utils.compat import has_tls
+from pip._internal.utils.misc import normalize_path, redact_auth_from_url
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List
+
+
+logger = logging.getLogger(__name__)
+
+
+class SearchScope(object):
+
+ """
+ Encapsulates the locations that pip is configured to search.
+ """
+
+ @classmethod
+ def create(
+ cls,
+ find_links, # type: List[str]
+ index_urls, # type: List[str]
+ ):
+ # type: (...) -> SearchScope
+ """
+ Create a SearchScope object after normalizing the `find_links`.
+ """
+ # Build find_links. If an argument starts with ~, it may be
+ # a local file relative to a home directory. So try normalizing
+ # it and if it exists, use the normalized version.
+ # This is deliberately conservative - it might be fine just to
+ # blindly normalize anything starting with a ~...
+ built_find_links = [] # type: List[str]
+ for link in find_links:
+ if link.startswith('~'):
+ new_link = normalize_path(link)
+ if os.path.exists(new_link):
+ link = new_link
+ built_find_links.append(link)
+
+ # If we don't have TLS enabled, then WARN if anyplace we're looking
+ # relies on TLS.
+ if not has_tls():
+ for link in itertools.chain(index_urls, built_find_links):
+ parsed = urllib_parse.urlparse(link)
+ if parsed.scheme == 'https':
+ logger.warning(
+ 'pip is configured with locations that require '
+ 'TLS/SSL, however the ssl module in Python is not '
+ 'available.'
+ )
+ break
+
+ return cls(
+ find_links=built_find_links,
+ index_urls=index_urls,
+ )
+
+ def __init__(
+ self,
+ find_links, # type: List[str]
+ index_urls, # type: List[str]
+ ):
+ # type: (...) -> None
+ self.find_links = find_links
+ self.index_urls = index_urls
+
+ def get_formatted_locations(self):
+ # type: () -> str
+ lines = []
+ if self.index_urls and self.index_urls != [PyPI.simple_url]:
+ lines.append(
+ 'Looking in indexes: {}'.format(', '.join(
+ redact_auth_from_url(url) for url in self.index_urls))
+ )
+ if self.find_links:
+ lines.append(
+ 'Looking in links: {}'.format(', '.join(
+ redact_auth_from_url(url) for url in self.find_links))
+ )
+ return '\n'.join(lines)
+
+ def get_index_urls_locations(self, project_name):
+ # type: (str) -> List[str]
+ """Returns the locations found via self.index_urls
+
+ Checks the url_name on the main (first in the list) index and
+ use this url_name to produce all locations
+ """
+
+ def mkurl_pypi_url(url):
+ # type: (str) -> str
+ loc = posixpath.join(
+ url,
+ urllib_parse.quote(canonicalize_name(project_name)))
+ # For maximum compatibility with easy_install, ensure the path
+ # ends in a trailing slash. Although this isn't in the spec
+ # (and PyPI can handle it without the slash) some other index
+ # implementations might break if they relied on easy_install's
+ # behavior.
+ if not loc.endswith('/'):
+ loc = loc + '/'
+ return loc
+
+ return [mkurl_pypi_url(url) for url in self.index_urls]
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/selection_prefs.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/selection_prefs.py
new file mode 100644
index 0000000000000000000000000000000000000000..f58fdce9cdfcb9320c09f0652ff20a9dc52f3701
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/selection_prefs.py
@@ -0,0 +1,47 @@
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional
+ from pip._internal.models.format_control import FormatControl
+
+
+class SelectionPreferences(object):
+
+ """
+ Encapsulates the candidate selection preferences for downloading
+ and installing files.
+ """
+
+ # Don't include an allow_yanked default value to make sure each call
+ # site considers whether yanked releases are allowed. This also causes
+ # that decision to be made explicit in the calling code, which helps
+ # people when reading the code.
+ def __init__(
+ self,
+ allow_yanked, # type: bool
+ allow_all_prereleases=False, # type: bool
+ format_control=None, # type: Optional[FormatControl]
+ prefer_binary=False, # type: bool
+ ignore_requires_python=None, # type: Optional[bool]
+ ):
+ # type: (...) -> None
+ """Create a SelectionPreferences object.
+
+ :param allow_yanked: Whether files marked as yanked (in the sense
+ of PEP 592) are permitted to be candidates for install.
+ :param format_control: A FormatControl object or None. Used to control
+ the selection of source packages / binary packages when consulting
+ the index and links.
+ :param prefer_binary: Whether to prefer an old, but valid, binary
+ dist over a new source dist.
+ :param ignore_requires_python: Whether to ignore incompatible
+ "Requires-Python" values in links. Defaults to False.
+ """
+ if ignore_requires_python is None:
+ ignore_requires_python = False
+
+ self.allow_yanked = allow_yanked
+ self.allow_all_prereleases = allow_all_prereleases
+ self.format_control = format_control
+ self.prefer_binary = prefer_binary
+ self.ignore_requires_python = ignore_requires_python
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/target_python.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/target_python.py
new file mode 100644
index 0000000000000000000000000000000000000000..97ae85a0945b88e63db603fbeb4d49bdc339fa6a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/target_python.py
@@ -0,0 +1,107 @@
+import sys
+
+from pip._internal.pep425tags import get_supported, version_info_to_nodot
+from pip._internal.utils.misc import normalize_version_info
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Optional, Tuple
+
+ from pip._vendor.packaging.tags import Tag
+
+
+class TargetPython(object):
+
+ """
+ Encapsulates the properties of a Python interpreter one is targeting
+ for a package install, download, etc.
+ """
+
+ def __init__(
+ self,
+ platform=None, # type: Optional[str]
+ py_version_info=None, # type: Optional[Tuple[int, ...]]
+ abi=None, # type: Optional[str]
+ implementation=None, # type: Optional[str]
+ ):
+ # type: (...) -> None
+ """
+ :param platform: A string or None. If None, searches for packages
+ that are supported by the current system. Otherwise, will find
+ packages that can be built on the platform passed in. These
+ packages will only be downloaded for distribution: they will
+ not be built locally.
+ :param py_version_info: An optional tuple of ints representing the
+ Python version information to use (e.g. `sys.version_info[:3]`).
+ This can have length 1, 2, or 3 when provided.
+ :param abi: A string or None. This is passed to pep425tags.py's
+ get_supported() function as is.
+ :param implementation: A string or None. This is passed to
+ pep425tags.py's get_supported() function as is.
+ """
+ # Store the given py_version_info for when we call get_supported().
+ self._given_py_version_info = py_version_info
+
+ if py_version_info is None:
+ py_version_info = sys.version_info[:3]
+ else:
+ py_version_info = normalize_version_info(py_version_info)
+
+ py_version = '.'.join(map(str, py_version_info[:2]))
+
+ self.abi = abi
+ self.implementation = implementation
+ self.platform = platform
+ self.py_version = py_version
+ self.py_version_info = py_version_info
+
+ # This is used to cache the return value of get_tags().
+ self._valid_tags = None # type: Optional[List[Tag]]
+
+ def format_given(self):
+ # type: () -> str
+ """
+ Format the given, non-None attributes for display.
+ """
+ display_version = None
+ if self._given_py_version_info is not None:
+ display_version = '.'.join(
+ str(part) for part in self._given_py_version_info
+ )
+
+ key_values = [
+ ('platform', self.platform),
+ ('version_info', display_version),
+ ('abi', self.abi),
+ ('implementation', self.implementation),
+ ]
+ return ' '.join(
+ '{}={!r}'.format(key, value) for key, value in key_values
+ if value is not None
+ )
+
+ def get_tags(self):
+ # type: () -> List[Tag]
+ """
+ Return the supported PEP 425 tags to check wheel candidates against.
+
+ The tags are returned in order of preference (most preferred first).
+ """
+ if self._valid_tags is None:
+ # Pass versions=None if no py_version_info was given since
+ # versions=None uses special default logic.
+ py_version_info = self._given_py_version_info
+ if py_version_info is None:
+ version = None
+ else:
+ version = version_info_to_nodot(py_version_info)
+
+ tags = get_supported(
+ version=version,
+ platform=self.platform,
+ abi=self.abi,
+ impl=self.implementation,
+ )
+ self._valid_tags = tags
+
+ return self._valid_tags
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/wheel.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/wheel.py
new file mode 100644
index 0000000000000000000000000000000000000000..f1e3f44c598a708322afd85d6da98e2179196b6a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/models/wheel.py
@@ -0,0 +1,78 @@
+"""Represents a wheel file and provides access to the various parts of the
+name that have meaning.
+"""
+import re
+
+from pip._vendor.packaging.tags import Tag
+
+from pip._internal.exceptions import InvalidWheelFilename
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List
+
+
+class Wheel(object):
+ """A wheel file"""
+
+ wheel_file_re = re.compile(
+ r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?))
+ ((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
+ \.whl|\.dist-info)$""",
+ re.VERBOSE
+ )
+
+ def __init__(self, filename):
+ # type: (str) -> None
+ """
+ :raises InvalidWheelFilename: when the filename is invalid for a wheel
+ """
+ wheel_info = self.wheel_file_re.match(filename)
+ if not wheel_info:
+ raise InvalidWheelFilename(
+ "%s is not a valid wheel filename." % filename
+ )
+ self.filename = filename
+ self.name = wheel_info.group('name').replace('_', '-')
+ # we'll assume "_" means "-" due to wheel naming scheme
+ # (https://github.com/pypa/pip/issues/1150)
+ self.version = wheel_info.group('ver').replace('_', '-')
+ self.build_tag = wheel_info.group('build')
+ self.pyversions = wheel_info.group('pyver').split('.')
+ self.abis = wheel_info.group('abi').split('.')
+ self.plats = wheel_info.group('plat').split('.')
+
+ # All the tag combinations from this file
+ self.file_tags = {
+ Tag(x, y, z) for x in self.pyversions
+ for y in self.abis for z in self.plats
+ }
+
+ def get_formatted_file_tags(self):
+ # type: () -> List[str]
+ """Return the wheel's tags as a sorted list of strings."""
+ return sorted(str(tag) for tag in self.file_tags)
+
+ def support_index_min(self, tags):
+ # type: (List[Tag]) -> int
+ """Return the lowest index that one of the wheel's file_tag combinations
+ achieves in the given list of supported tags.
+
+ For example, if there are 8 supported tags and one of the file tags
+ is first in the list, then return 0.
+
+ :param tags: the PEP 425 tags to check the wheel against, in order
+ with most preferred first.
+
+ :raises ValueError: If none of the wheel's file tags match one of
+ the supported tags.
+ """
+ return min(tags.index(tag) for tag in self.file_tags if tag in tags)
+
+ def supported(self, tags):
+ # type: (List[Tag]) -> bool
+ """Return whether the wheel is compatible with one of the given tags.
+
+ :param tags: the PEP 425 tags to check the wheel against.
+ """
+ return not self.file_tags.isdisjoint(tags)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__init__.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..b51bde91b2e5b4e557ed9b70fc113843cc3d49ae
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__init__.py
@@ -0,0 +1,2 @@
+"""Contains purely network-related utilities.
+"""
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..721455659b776ee398e5dea58280b48e30eea9de
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/auth.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/auth.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e18f4ac92519b1845076b9f4a9fbf346c66b3d11
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/auth.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/cache.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/cache.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..49dd51424e101a1b5b007b0f5a5de16daa1a16fd
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/cache.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/download.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/download.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8f279edc0786a6201e2e2fb8b56243b4b26259a5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/download.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/session.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/session.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d7b033daccab3f5075a9f6208f2c01d01ec94505
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/session.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/utils.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/utils.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5e962836239b2f86908d7e8ce62df301bda31512
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/utils.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b2608bfd5d61e892606615ab7833b087d344dbb9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/auth.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/auth.py
new file mode 100644
index 0000000000000000000000000000000000000000..1e1da54ca59d8d42b53b51f95b876b369f76b4a1
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/auth.py
@@ -0,0 +1,298 @@
+"""Network Authentication Helpers
+
+Contains interface (MultiDomainBasicAuth) and associated glue code for
+providing credentials in the context of network requests.
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+import logging
+
+from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
+from pip._vendor.requests.utils import get_netrc_auth
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+from pip._internal.utils.misc import (
+ ask,
+ ask_input,
+ ask_password,
+ remove_auth_from_url,
+ split_auth_netloc_from_url,
+)
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from optparse import Values
+ from typing import Dict, Optional, Tuple
+
+ from pip._internal.vcs.versioncontrol import AuthInfo
+
+ Credentials = Tuple[str, str, str]
+
+logger = logging.getLogger(__name__)
+
+try:
+ import keyring # noqa
+except ImportError:
+ keyring = None
+except Exception as exc:
+ logger.warning(
+ "Keyring is skipped due to an exception: %s", str(exc),
+ )
+ keyring = None
+
+
+def get_keyring_auth(url, username):
+ """Return the tuple auth for a given url from keyring."""
+ if not url or not keyring:
+ return None
+
+ try:
+ try:
+ get_credential = keyring.get_credential
+ except AttributeError:
+ pass
+ else:
+ logger.debug("Getting credentials from keyring for %s", url)
+ cred = get_credential(url, username)
+ if cred is not None:
+ return cred.username, cred.password
+ return None
+
+ if username:
+ logger.debug("Getting password from keyring for %s", url)
+ password = keyring.get_password(url, username)
+ if password:
+ return username, password
+
+ except Exception as exc:
+ logger.warning(
+ "Keyring is skipped due to an exception: %s", str(exc),
+ )
+
+
+class MultiDomainBasicAuth(AuthBase):
+
+ def __init__(self, prompting=True, index_urls=None):
+ # type: (bool, Optional[Values]) -> None
+ self.prompting = prompting
+ self.index_urls = index_urls
+ self.passwords = {} # type: Dict[str, AuthInfo]
+ # When the user is prompted to enter credentials and keyring is
+ # available, we will offer to save them. If the user accepts,
+ # this value is set to the credentials they entered. After the
+ # request authenticates, the caller should call
+ # ``save_credentials`` to save these.
+ self._credentials_to_save = None # type: Optional[Credentials]
+
+ def _get_index_url(self, url):
+ """Return the original index URL matching the requested URL.
+
+ Cached or dynamically generated credentials may work against
+ the original index URL rather than just the netloc.
+
+ The provided url should have had its username and password
+ removed already. If the original index url had credentials then
+ they will be included in the return value.
+
+ Returns None if no matching index was found, or if --no-index
+ was specified by the user.
+ """
+ if not url or not self.index_urls:
+ return None
+
+ for u in self.index_urls:
+ prefix = remove_auth_from_url(u).rstrip("/") + "/"
+ if url.startswith(prefix):
+ return u
+
+ def _get_new_credentials(self, original_url, allow_netrc=True,
+ allow_keyring=True):
+ """Find and return credentials for the specified URL."""
+ # Split the credentials and netloc from the url.
+ url, netloc, url_user_password = split_auth_netloc_from_url(
+ original_url,
+ )
+
+ # Start with the credentials embedded in the url
+ username, password = url_user_password
+ if username is not None and password is not None:
+ logger.debug("Found credentials in url for %s", netloc)
+ return url_user_password
+
+ # Find a matching index url for this request
+ index_url = self._get_index_url(url)
+ if index_url:
+ # Split the credentials from the url.
+ index_info = split_auth_netloc_from_url(index_url)
+ if index_info:
+ index_url, _, index_url_user_password = index_info
+ logger.debug("Found index url %s", index_url)
+
+ # If an index URL was found, try its embedded credentials
+ if index_url and index_url_user_password[0] is not None:
+ username, password = index_url_user_password
+ if username is not None and password is not None:
+ logger.debug("Found credentials in index url for %s", netloc)
+ return index_url_user_password
+
+ # Get creds from netrc if we still don't have them
+ if allow_netrc:
+ netrc_auth = get_netrc_auth(original_url)
+ if netrc_auth:
+ logger.debug("Found credentials in netrc for %s", netloc)
+ return netrc_auth
+
+ # If we don't have a password and keyring is available, use it.
+ if allow_keyring:
+ # The index url is more specific than the netloc, so try it first
+ kr_auth = (
+ get_keyring_auth(index_url, username) or
+ get_keyring_auth(netloc, username)
+ )
+ if kr_auth:
+ logger.debug("Found credentials in keyring for %s", netloc)
+ return kr_auth
+
+ return username, password
+
+ def _get_url_and_credentials(self, original_url):
+ """Return the credentials to use for the provided URL.
+
+ If allowed, netrc and keyring may be used to obtain the
+ correct credentials.
+
+ Returns (url_without_credentials, username, password). Note
+ that even if the original URL contains credentials, this
+ function may return a different username and password.
+ """
+ url, netloc, _ = split_auth_netloc_from_url(original_url)
+
+ # Use any stored credentials that we have for this netloc
+ username, password = self.passwords.get(netloc, (None, None))
+
+ if username is None and password is None:
+ # No stored credentials. Acquire new credentials without prompting
+ # the user. (e.g. from netrc, keyring, or the URL itself)
+ username, password = self._get_new_credentials(original_url)
+
+ if username is not None or password is not None:
+ # Convert the username and password if they're None, so that
+ # this netloc will show up as "cached" in the conditional above.
+ # Further, HTTPBasicAuth doesn't accept None, so it makes sense to
+ # cache the value that is going to be used.
+ username = username or ""
+ password = password or ""
+
+ # Store any acquired credentials.
+ self.passwords[netloc] = (username, password)
+
+ assert (
+ # Credentials were found
+ (username is not None and password is not None) or
+ # Credentials were not found
+ (username is None and password is None)
+ ), "Could not load credentials from url: {}".format(original_url)
+
+ return url, username, password
+
+ def __call__(self, req):
+ # Get credentials for this request
+ url, username, password = self._get_url_and_credentials(req.url)
+
+ # Set the url of the request to the url without any credentials
+ req.url = url
+
+ if username is not None and password is not None:
+ # Send the basic auth with this request
+ req = HTTPBasicAuth(username, password)(req)
+
+ # Attach a hook to handle 401 responses
+ req.register_hook("response", self.handle_401)
+
+ return req
+
+ # Factored out to allow for easy patching in tests
+ def _prompt_for_password(self, netloc):
+ username = ask_input("User for %s: " % netloc)
+ if not username:
+ return None, None
+ auth = get_keyring_auth(netloc, username)
+ if auth:
+ return auth[0], auth[1], False
+ password = ask_password("Password: ")
+ return username, password, True
+
+ # Factored out to allow for easy patching in tests
+ def _should_save_password_to_keyring(self):
+ if not keyring:
+ return False
+ return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
+
+ def handle_401(self, resp, **kwargs):
+ # We only care about 401 responses, anything else we want to just
+ # pass through the actual response
+ if resp.status_code != 401:
+ return resp
+
+ # We are not able to prompt the user so simply return the response
+ if not self.prompting:
+ return resp
+
+ parsed = urllib_parse.urlparse(resp.url)
+
+ # Prompt the user for a new username and password
+ username, password, save = self._prompt_for_password(parsed.netloc)
+
+ # Store the new username and password to use for future requests
+ self._credentials_to_save = None
+ if username is not None and password is not None:
+ self.passwords[parsed.netloc] = (username, password)
+
+ # Prompt to save the password to keyring
+ if save and self._should_save_password_to_keyring():
+ self._credentials_to_save = (parsed.netloc, username, password)
+
+ # Consume content and release the original connection to allow our new
+ # request to reuse the same one.
+ resp.content
+ resp.raw.release_conn()
+
+ # Add our new username and password to the request
+ req = HTTPBasicAuth(username or "", password or "")(resp.request)
+ req.register_hook("response", self.warn_on_401)
+
+ # On successful request, save the credentials that were used to
+ # keyring. (Note that if the user responded "no" above, this member
+ # is not set and nothing will be saved.)
+ if self._credentials_to_save:
+ req.register_hook("response", self.save_credentials)
+
+ # Send our new request
+ new_resp = resp.connection.send(req, **kwargs)
+ new_resp.history.append(resp)
+
+ return new_resp
+
+ def warn_on_401(self, resp, **kwargs):
+ """Response callback to warn about incorrect credentials."""
+ if resp.status_code == 401:
+ logger.warning(
+ '401 Error, Credentials not correct for %s', resp.request.url,
+ )
+
+ def save_credentials(self, resp, **kwargs):
+ """Response callback to save credentials on success."""
+ assert keyring is not None, "should never reach here without keyring"
+ if not keyring:
+ return
+
+ creds = self._credentials_to_save
+ self._credentials_to_save = None
+ if creds and resp.status_code < 400:
+ try:
+ logger.info('Saving credentials to keyring')
+ keyring.set_password(*creds)
+ except Exception:
+ logger.exception('Failed to save credentials')
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/cache.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/cache.py
new file mode 100644
index 0000000000000000000000000000000000000000..c9386e173600d58dacda2061f49d747de386a50a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/cache.py
@@ -0,0 +1,81 @@
+"""HTTP cache implementation.
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+import os
+from contextlib import contextmanager
+
+from pip._vendor.cachecontrol.cache import BaseCache
+from pip._vendor.cachecontrol.caches import FileCache
+from pip._vendor.requests.models import Response
+
+from pip._internal.utils.filesystem import adjacent_tmp_file, replace
+from pip._internal.utils.misc import ensure_dir
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional
+
+
+def is_from_cache(response):
+ # type: (Response) -> bool
+ return getattr(response, "from_cache", False)
+
+
+@contextmanager
+def suppressed_cache_errors():
+ """If we can't access the cache then we can just skip caching and process
+ requests as if caching wasn't enabled.
+ """
+ try:
+ yield
+ except (OSError, IOError):
+ pass
+
+
+class SafeFileCache(BaseCache):
+ """
+ A file based cache which is safe to use even when the target directory may
+ not be accessible or writable.
+ """
+
+ def __init__(self, directory):
+ # type: (str) -> None
+ assert directory is not None, "Cache directory must not be None."
+ super(SafeFileCache, self).__init__()
+ self.directory = directory
+
+ def _get_cache_path(self, name):
+ # type: (str) -> str
+ # From cachecontrol.caches.file_cache.FileCache._fn, brought into our
+ # class for backwards-compatibility and to avoid using a non-public
+ # method.
+ hashed = FileCache.encode(name)
+ parts = list(hashed[:5]) + [hashed]
+ return os.path.join(self.directory, *parts)
+
+ def get(self, key):
+ # type: (str) -> Optional[bytes]
+ path = self._get_cache_path(key)
+ with suppressed_cache_errors():
+ with open(path, 'rb') as f:
+ return f.read()
+
+ def set(self, key, value):
+ # type: (str, bytes) -> None
+ path = self._get_cache_path(key)
+ with suppressed_cache_errors():
+ ensure_dir(os.path.dirname(path))
+
+ with adjacent_tmp_file(path) as f:
+ f.write(value)
+
+ replace(f.name, path)
+
+ def delete(self, key):
+ # type: (str) -> None
+ path = self._get_cache_path(key)
+ with suppressed_cache_errors():
+ os.remove(path)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/download.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/download.py
new file mode 100644
index 0000000000000000000000000000000000000000..c90c4bf42cfe25c7c417c3776b7d5844417b9186
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/download.py
@@ -0,0 +1,200 @@
+"""Download files with progress indicators.
+"""
+import cgi
+import logging
+import mimetypes
+import os
+
+from pip._vendor import requests
+from pip._vendor.requests.models import CONTENT_CHUNK_SIZE
+
+from pip._internal.models.index import PyPI
+from pip._internal.network.cache import is_from_cache
+from pip._internal.network.utils import response_chunks
+from pip._internal.utils.misc import (
+ format_size,
+ redact_auth_from_url,
+ splitext,
+)
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.ui import DownloadProgressProvider
+
+if MYPY_CHECK_RUNNING:
+ from typing import Iterable, Optional
+
+ from pip._vendor.requests.models import Response
+
+ from pip._internal.models.link import Link
+ from pip._internal.network.session import PipSession
+
+logger = logging.getLogger(__name__)
+
+
+def _get_http_response_size(resp):
+ # type: (Response) -> Optional[int]
+ try:
+ return int(resp.headers['content-length'])
+ except (ValueError, KeyError, TypeError):
+ return None
+
+
+def _prepare_download(
+ resp, # type: Response
+ link, # type: Link
+ progress_bar # type: str
+):
+ # type: (...) -> Iterable[bytes]
+ total_length = _get_http_response_size(resp)
+
+ if link.netloc == PyPI.file_storage_domain:
+ url = link.show_url
+ else:
+ url = link.url_without_fragment
+
+ logged_url = redact_auth_from_url(url)
+
+ if total_length:
+ logged_url = '{} ({})'.format(logged_url, format_size(total_length))
+
+ if is_from_cache(resp):
+ logger.info("Using cached %s", logged_url)
+ else:
+ logger.info("Downloading %s", logged_url)
+
+ if logger.getEffectiveLevel() > logging.INFO:
+ show_progress = False
+ elif is_from_cache(resp):
+ show_progress = False
+ elif not total_length:
+ show_progress = True
+ elif total_length > (40 * 1000):
+ show_progress = True
+ else:
+ show_progress = False
+
+ chunks = response_chunks(resp, CONTENT_CHUNK_SIZE)
+
+ if not show_progress:
+ return chunks
+
+ return DownloadProgressProvider(
+ progress_bar, max=total_length
+ )(chunks)
+
+
+def sanitize_content_filename(filename):
+ # type: (str) -> str
+ """
+ Sanitize the "filename" value from a Content-Disposition header.
+ """
+ return os.path.basename(filename)
+
+
+def parse_content_disposition(content_disposition, default_filename):
+ # type: (str, str) -> str
+ """
+ Parse the "filename" value from a Content-Disposition header, and
+ return the default filename if the result is empty.
+ """
+ _type, params = cgi.parse_header(content_disposition)
+ filename = params.get('filename')
+ if filename:
+ # We need to sanitize the filename to prevent directory traversal
+ # in case the filename contains ".." path parts.
+ filename = sanitize_content_filename(filename)
+ return filename or default_filename
+
+
+def _get_http_response_filename(resp, link):
+ # type: (Response, Link) -> str
+ """Get an ideal filename from the given HTTP response, falling back to
+ the link filename if not provided.
+ """
+ filename = link.filename # fallback
+ # Have a look at the Content-Disposition header for a better guess
+ content_disposition = resp.headers.get('content-disposition')
+ if content_disposition:
+ filename = parse_content_disposition(content_disposition, filename)
+ ext = splitext(filename)[1] # type: Optional[str]
+ if not ext:
+ ext = mimetypes.guess_extension(
+ resp.headers.get('content-type', '')
+ )
+ if ext:
+ filename += ext
+ if not ext and link.url != resp.url:
+ ext = os.path.splitext(resp.url)[1]
+ if ext:
+ filename += ext
+ return filename
+
+
+def _http_get_download(session, link):
+ # type: (PipSession, Link) -> Response
+ target_url = link.url.split('#', 1)[0]
+ resp = session.get(
+ target_url,
+ # We use Accept-Encoding: identity here because requests
+ # defaults to accepting compressed responses. This breaks in
+ # a variety of ways depending on how the server is configured.
+ # - Some servers will notice that the file isn't a compressible
+ # file and will leave the file alone and with an empty
+ # Content-Encoding
+ # - Some servers will notice that the file is already
+ # compressed and will leave the file alone and will add a
+ # Content-Encoding: gzip header
+ # - Some servers won't notice anything at all and will take
+ # a file that's already been compressed and compress it again
+ # and set the Content-Encoding: gzip header
+ # By setting this to request only the identity encoding We're
+ # hoping to eliminate the third case. Hopefully there does not
+ # exist a server which when given a file will notice it is
+ # already compressed and that you're not asking for a
+ # compressed file and will then decompress it before sending
+ # because if that's the case I don't think it'll ever be
+ # possible to make this work.
+ headers={"Accept-Encoding": "identity"},
+ stream=True,
+ )
+ resp.raise_for_status()
+ return resp
+
+
+class Download(object):
+ def __init__(
+ self,
+ response, # type: Response
+ filename, # type: str
+ chunks, # type: Iterable[bytes]
+ ):
+ # type: (...) -> None
+ self.response = response
+ self.filename = filename
+ self.chunks = chunks
+
+
+class Downloader(object):
+ def __init__(
+ self,
+ session, # type: PipSession
+ progress_bar, # type: str
+ ):
+ # type: (...) -> None
+ self._session = session
+ self._progress_bar = progress_bar
+
+ def __call__(self, link):
+ # type: (Link) -> Download
+ try:
+ resp = _http_get_download(self._session, link)
+ except requests.HTTPError as e:
+ logger.critical(
+ "HTTP error %s while getting %s", e.response.status_code, link
+ )
+ raise
+
+ return Download(
+ resp,
+ _get_http_response_filename(resp, link),
+ _prepare_download(resp, link, self._progress_bar),
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/session.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/session.py
new file mode 100644
index 0000000000000000000000000000000000000000..f5eb15ef2f6245ee303b8f6297eb8c460945afca
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/session.py
@@ -0,0 +1,405 @@
+"""PipSession and supporting code, containing all pip-specific
+network request configuration and behavior.
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+import email.utils
+import json
+import logging
+import mimetypes
+import os
+import platform
+import sys
+import warnings
+
+from pip._vendor import requests, six, urllib3
+from pip._vendor.cachecontrol import CacheControlAdapter
+from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
+from pip._vendor.requests.models import Response
+from pip._vendor.requests.structures import CaseInsensitiveDict
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+from pip._vendor.urllib3.exceptions import InsecureRequestWarning
+
+from pip import __version__
+from pip._internal.network.auth import MultiDomainBasicAuth
+from pip._internal.network.cache import SafeFileCache
+# Import ssl from compat so the initial import occurs in only one place.
+from pip._internal.utils.compat import has_tls, ipaddress
+from pip._internal.utils.glibc import libc_ver
+from pip._internal.utils.misc import (
+ build_url_from_netloc,
+ get_installed_version,
+ parse_netloc,
+)
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.urls import url_to_path
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Iterator, List, Optional, Tuple, Union,
+ )
+
+ from pip._internal.models.link import Link
+
+ SecureOrigin = Tuple[str, str, Optional[Union[int, str]]]
+
+
+logger = logging.getLogger(__name__)
+
+
+# Ignore warning raised when using --trusted-host.
+warnings.filterwarnings("ignore", category=InsecureRequestWarning)
+
+
+SECURE_ORIGINS = [
+ # protocol, hostname, port
+ # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
+ ("https", "*", "*"),
+ ("*", "localhost", "*"),
+ ("*", "127.0.0.0/8", "*"),
+ ("*", "::1/128", "*"),
+ ("file", "*", None),
+ # ssh is always secure.
+ ("ssh", "*", "*"),
+] # type: List[SecureOrigin]
+
+
+# These are environment variables present when running under various
+# CI systems. For each variable, some CI systems that use the variable
+# are indicated. The collection was chosen so that for each of a number
+# of popular systems, at least one of the environment variables is used.
+# This list is used to provide some indication of and lower bound for
+# CI traffic to PyPI. Thus, it is okay if the list is not comprehensive.
+# For more background, see: https://github.com/pypa/pip/issues/5499
+CI_ENVIRONMENT_VARIABLES = (
+ # Azure Pipelines
+ 'BUILD_BUILDID',
+ # Jenkins
+ 'BUILD_ID',
+ # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI
+ 'CI',
+ # Explicit environment variable.
+ 'PIP_IS_CI',
+)
+
+
+def looks_like_ci():
+ # type: () -> bool
+ """
+ Return whether it looks like pip is running under CI.
+ """
+ # We don't use the method of checking for a tty (e.g. using isatty())
+ # because some CI systems mimic a tty (e.g. Travis CI). Thus that
+ # method doesn't provide definitive information in either direction.
+ return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES)
+
+
+def user_agent():
+ """
+ Return a string representing the user agent.
+ """
+ data = {
+ "installer": {"name": "pip", "version": __version__},
+ "python": platform.python_version(),
+ "implementation": {
+ "name": platform.python_implementation(),
+ },
+ }
+
+ if data["implementation"]["name"] == 'CPython':
+ data["implementation"]["version"] = platform.python_version()
+ elif data["implementation"]["name"] == 'PyPy':
+ if sys.pypy_version_info.releaselevel == 'final':
+ pypy_version_info = sys.pypy_version_info[:3]
+ else:
+ pypy_version_info = sys.pypy_version_info
+ data["implementation"]["version"] = ".".join(
+ [str(x) for x in pypy_version_info]
+ )
+ elif data["implementation"]["name"] == 'Jython':
+ # Complete Guess
+ data["implementation"]["version"] = platform.python_version()
+ elif data["implementation"]["name"] == 'IronPython':
+ # Complete Guess
+ data["implementation"]["version"] = platform.python_version()
+
+ if sys.platform.startswith("linux"):
+ from pip._vendor import distro
+ distro_infos = dict(filter(
+ lambda x: x[1],
+ zip(["name", "version", "id"], distro.linux_distribution()),
+ ))
+ libc = dict(filter(
+ lambda x: x[1],
+ zip(["lib", "version"], libc_ver()),
+ ))
+ if libc:
+ distro_infos["libc"] = libc
+ if distro_infos:
+ data["distro"] = distro_infos
+
+ if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
+ data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
+
+ if platform.system():
+ data.setdefault("system", {})["name"] = platform.system()
+
+ if platform.release():
+ data.setdefault("system", {})["release"] = platform.release()
+
+ if platform.machine():
+ data["cpu"] = platform.machine()
+
+ if has_tls():
+ import _ssl as ssl
+ data["openssl_version"] = ssl.OPENSSL_VERSION
+
+ setuptools_version = get_installed_version("setuptools")
+ if setuptools_version is not None:
+ data["setuptools_version"] = setuptools_version
+
+ # Use None rather than False so as not to give the impression that
+ # pip knows it is not being run under CI. Rather, it is a null or
+ # inconclusive result. Also, we include some value rather than no
+ # value to make it easier to know that the check has been run.
+ data["ci"] = True if looks_like_ci() else None
+
+ user_data = os.environ.get("PIP_USER_AGENT_USER_DATA")
+ if user_data is not None:
+ data["user_data"] = user_data
+
+ return "{data[installer][name]}/{data[installer][version]} {json}".format(
+ data=data,
+ json=json.dumps(data, separators=(",", ":"), sort_keys=True),
+ )
+
+
+class LocalFSAdapter(BaseAdapter):
+
+ def send(self, request, stream=None, timeout=None, verify=None, cert=None,
+ proxies=None):
+ pathname = url_to_path(request.url)
+
+ resp = Response()
+ resp.status_code = 200
+ resp.url = request.url
+
+ try:
+ stats = os.stat(pathname)
+ except OSError as exc:
+ resp.status_code = 404
+ resp.raw = exc
+ else:
+ modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
+ content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
+ resp.headers = CaseInsensitiveDict({
+ "Content-Type": content_type,
+ "Content-Length": stats.st_size,
+ "Last-Modified": modified,
+ })
+
+ resp.raw = open(pathname, "rb")
+ resp.close = resp.raw.close
+
+ return resp
+
+ def close(self):
+ pass
+
+
+class InsecureHTTPAdapter(HTTPAdapter):
+
+ def cert_verify(self, conn, url, verify, cert):
+ super(InsecureHTTPAdapter, self).cert_verify(
+ conn=conn, url=url, verify=False, cert=cert
+ )
+
+
+class PipSession(requests.Session):
+
+ timeout = None # type: Optional[int]
+
+ def __init__(self, *args, **kwargs):
+ """
+ :param trusted_hosts: Domains not to emit warnings for when not using
+ HTTPS.
+ """
+ retries = kwargs.pop("retries", 0)
+ cache = kwargs.pop("cache", None)
+ trusted_hosts = kwargs.pop("trusted_hosts", []) # type: List[str]
+ index_urls = kwargs.pop("index_urls", None)
+
+ super(PipSession, self).__init__(*args, **kwargs)
+
+ # Namespace the attribute with "pip_" just in case to prevent
+ # possible conflicts with the base class.
+ self.pip_trusted_origins = [] # type: List[Tuple[str, Optional[int]]]
+
+ # Attach our User Agent to the request
+ self.headers["User-Agent"] = user_agent()
+
+ # Attach our Authentication handler to the session
+ self.auth = MultiDomainBasicAuth(index_urls=index_urls)
+
+ # Create our urllib3.Retry instance which will allow us to customize
+ # how we handle retries.
+ retries = urllib3.Retry(
+ # Set the total number of retries that a particular request can
+ # have.
+ total=retries,
+
+ # A 503 error from PyPI typically means that the Fastly -> Origin
+ # connection got interrupted in some way. A 503 error in general
+ # is typically considered a transient error so we'll go ahead and
+ # retry it.
+ # A 500 may indicate transient error in Amazon S3
+ # A 520 or 527 - may indicate transient error in CloudFlare
+ status_forcelist=[500, 503, 520, 527],
+
+ # Add a small amount of back off between failed requests in
+ # order to prevent hammering the service.
+ backoff_factor=0.25,
+ )
+
+ # We want to _only_ cache responses on securely fetched origins. We do
+ # this because we can't validate the response of an insecurely fetched
+ # origin, and we don't want someone to be able to poison the cache and
+ # require manual eviction from the cache to fix it.
+ if cache:
+ secure_adapter = CacheControlAdapter(
+ cache=SafeFileCache(cache),
+ max_retries=retries,
+ )
+ else:
+ secure_adapter = HTTPAdapter(max_retries=retries)
+
+ # Our Insecure HTTPAdapter disables HTTPS validation. It does not
+ # support caching (see above) so we'll use it for all http:// URLs as
+ # well as any https:// host that we've marked as ignoring TLS errors
+ # for.
+ insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
+ # Save this for later use in add_insecure_host().
+ self._insecure_adapter = insecure_adapter
+
+ self.mount("https://", secure_adapter)
+ self.mount("http://", insecure_adapter)
+
+ # Enable file:// urls
+ self.mount("file://", LocalFSAdapter())
+
+ for host in trusted_hosts:
+ self.add_trusted_host(host, suppress_logging=True)
+
+ def add_trusted_host(self, host, source=None, suppress_logging=False):
+ # type: (str, Optional[str], bool) -> None
+ """
+ :param host: It is okay to provide a host that has previously been
+ added.
+ :param source: An optional source string, for logging where the host
+ string came from.
+ """
+ if not suppress_logging:
+ msg = 'adding trusted host: {!r}'.format(host)
+ if source is not None:
+ msg += ' (from {})'.format(source)
+ logger.info(msg)
+
+ host_port = parse_netloc(host)
+ if host_port not in self.pip_trusted_origins:
+ self.pip_trusted_origins.append(host_port)
+
+ self.mount(build_url_from_netloc(host) + '/', self._insecure_adapter)
+ if not host_port[1]:
+ # Mount wildcard ports for the same host.
+ self.mount(
+ build_url_from_netloc(host) + ':',
+ self._insecure_adapter
+ )
+
+ def iter_secure_origins(self):
+ # type: () -> Iterator[SecureOrigin]
+ for secure_origin in SECURE_ORIGINS:
+ yield secure_origin
+ for host, port in self.pip_trusted_origins:
+ yield ('*', host, '*' if port is None else port)
+
+ def is_secure_origin(self, location):
+ # type: (Link) -> bool
+ # Determine if this url used a secure transport mechanism
+ parsed = urllib_parse.urlparse(str(location))
+ origin_protocol, origin_host, origin_port = (
+ parsed.scheme, parsed.hostname, parsed.port,
+ )
+
+ # The protocol to use to see if the protocol matches.
+ # Don't count the repository type as part of the protocol: in
+ # cases such as "git+ssh", only use "ssh". (I.e., Only verify against
+ # the last scheme.)
+ origin_protocol = origin_protocol.rsplit('+', 1)[-1]
+
+ # Determine if our origin is a secure origin by looking through our
+ # hardcoded list of secure origins, as well as any additional ones
+ # configured on this PackageFinder instance.
+ for secure_origin in self.iter_secure_origins():
+ secure_protocol, secure_host, secure_port = secure_origin
+ if origin_protocol != secure_protocol and secure_protocol != "*":
+ continue
+
+ try:
+ addr = ipaddress.ip_address(
+ None
+ if origin_host is None
+ else six.ensure_text(origin_host)
+ )
+ network = ipaddress.ip_network(
+ six.ensure_text(secure_host)
+ )
+ except ValueError:
+ # We don't have both a valid address or a valid network, so
+ # we'll check this origin against hostnames.
+ if (
+ origin_host and
+ origin_host.lower() != secure_host.lower() and
+ secure_host != "*"
+ ):
+ continue
+ else:
+ # We have a valid address and network, so see if the address
+ # is contained within the network.
+ if addr not in network:
+ continue
+
+ # Check to see if the port matches.
+ if (
+ origin_port != secure_port and
+ secure_port != "*" and
+ secure_port is not None
+ ):
+ continue
+
+ # If we've gotten here, then this origin matches the current
+ # secure origin and we should return True
+ return True
+
+ # If we've gotten to this point, then the origin isn't secure and we
+ # will not accept it as a valid location to search. We will however
+ # log a warning that we are ignoring it.
+ logger.warning(
+ "The repository located at %s is not a trusted or secure host and "
+ "is being ignored. If this repository is available via HTTPS we "
+ "recommend you use HTTPS instead, otherwise you may silence "
+ "this warning and allow it anyway with '--trusted-host %s'.",
+ origin_host,
+ origin_host,
+ )
+
+ return False
+
+ def request(self, method, url, *args, **kwargs):
+ # Allow setting a default timeout on a session
+ kwargs.setdefault("timeout", self.timeout)
+
+ # Dispatch the actual request
+ return super(PipSession, self).request(method, url, *args, **kwargs)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/utils.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..a19050b0f7082809f277bc74e516a9af8e537136
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/utils.py
@@ -0,0 +1,48 @@
+from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Iterator
+
+
+def response_chunks(response, chunk_size=CONTENT_CHUNK_SIZE):
+ # type: (Response, int) -> Iterator[bytes]
+ """Given a requests Response, provide the data chunks.
+ """
+ try:
+ # Special case for urllib3.
+ for chunk in response.raw.stream(
+ chunk_size,
+ # We use decode_content=False here because we don't
+ # want urllib3 to mess with the raw bytes we get
+ # from the server. If we decompress inside of
+ # urllib3 then we cannot verify the checksum
+ # because the checksum will be of the compressed
+ # file. This breakage will only occur if the
+ # server adds a Content-Encoding header, which
+ # depends on how the server was configured:
+ # - Some servers will notice that the file isn't a
+ # compressible file and will leave the file alone
+ # and with an empty Content-Encoding
+ # - Some servers will notice that the file is
+ # already compressed and will leave the file
+ # alone and will add a Content-Encoding: gzip
+ # header
+ # - Some servers won't notice anything at all and
+ # will take a file that's already been compressed
+ # and compress it again and set the
+ # Content-Encoding: gzip header
+ #
+ # By setting this not to decode automatically we
+ # hope to eliminate problems with the second case.
+ decode_content=False,
+ ):
+ yield chunk
+ except AttributeError:
+ # Standard file-like object.
+ while True:
+ chunk = response.raw.read(chunk_size)
+ if not chunk:
+ break
+ yield chunk
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/xmlrpc.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/xmlrpc.py
new file mode 100644
index 0000000000000000000000000000000000000000..121edd93056f57c7717e6e48e2d7432cfc18ada4
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/network/xmlrpc.py
@@ -0,0 +1,44 @@
+"""xmlrpclib.Transport implementation
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+import logging
+
+from pip._vendor import requests
+# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
+# why we ignore the type on this import
+from pip._vendor.six.moves import xmlrpc_client # type: ignore
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+logger = logging.getLogger(__name__)
+
+
+class PipXmlrpcTransport(xmlrpc_client.Transport):
+ """Provide a `xmlrpclib.Transport` implementation via a `PipSession`
+ object.
+ """
+
+ def __init__(self, index_url, session, use_datetime=False):
+ xmlrpc_client.Transport.__init__(self, use_datetime)
+ index_parts = urllib_parse.urlparse(index_url)
+ self._scheme = index_parts.scheme
+ self._session = session
+
+ def request(self, host, handler, request_body, verbose=False):
+ parts = (self._scheme, host, handler, None, None, None)
+ url = urllib_parse.urlunparse(parts)
+ try:
+ headers = {'Content-Type': 'text/xml'}
+ response = self._session.post(url, data=request_body,
+ headers=headers, stream=True)
+ response.raise_for_status()
+ self.verbose = verbose
+ return self.parse_response(response.raw)
+ except requests.HTTPError as exc:
+ logger.critical(
+ "HTTP error %s while getting %s",
+ exc.response.status_code, url,
+ )
+ raise
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/__init__.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cee02b3dcf32c55b483c2eafbc570195c530f09d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/check.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/check.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2e5e1ba6237afc1301041f8470376b331b328fd9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/check.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..332ceda028ae64384c4b5b790c6926196e180785
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ba4d6a47ae267063490b52050a55795796d1ae95
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/__init__.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0b111440ec8655d3c15bd0b3cbedd943cae6ac98
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c6746b69ce5e8e1ebaa5e1feb267c0a41985bfe1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7717ae66a0acf583a3e6d1e3825f4570a07c3485
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9b6825a16816c5f83876a1f4d5db2cad7fb014e2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..65eab02b03f156443ac8be7be02cd18b1d9b0698
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/metadata.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/metadata.py
new file mode 100644
index 0000000000000000000000000000000000000000..b13fbdef93357da3d1b3b0303b49a28990736256
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/metadata.py
@@ -0,0 +1,40 @@
+"""Metadata generation logic for source distributions.
+"""
+
+import logging
+import os
+
+from pip._internal.utils.subprocess import runner_with_spinner_message
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from pip._internal.build_env import BuildEnvironment
+ from pip._vendor.pep517.wrappers import Pep517HookCaller
+
+logger = logging.getLogger(__name__)
+
+
+def generate_metadata(build_env, backend):
+ # type: (BuildEnvironment, Pep517HookCaller) -> str
+ """Generate metadata using mechanisms described in PEP 517.
+
+ Returns the generated metadata directory.
+ """
+ metadata_tmpdir = TempDirectory(
+ kind="modern-metadata", globally_managed=True
+ )
+
+ metadata_dir = metadata_tmpdir.path
+
+ with build_env:
+ # Note that Pep517HookCaller implements a fallback for
+ # prepare_metadata_for_build_wheel, so we don't have to
+ # consider the possibility that this hook doesn't exist.
+ runner = runner_with_spinner_message("Preparing wheel metadata")
+ with backend.subprocess_runner(runner):
+ distinfo_dir = backend.prepare_metadata_for_build_wheel(
+ metadata_dir
+ )
+
+ return os.path.join(metadata_dir, distinfo_dir)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/metadata_legacy.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/metadata_legacy.py
new file mode 100644
index 0000000000000000000000000000000000000000..b6813f89ba7dd5ea88c59dc618ddb18701ae2194
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/metadata_legacy.py
@@ -0,0 +1,122 @@
+"""Metadata generation logic for legacy source distributions.
+"""
+
+import logging
+import os
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.utils.misc import ensure_dir
+from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
+from pip._internal.utils.subprocess import call_subprocess
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.vcs import vcs
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Optional
+
+ from pip._internal.build_env import BuildEnvironment
+
+logger = logging.getLogger(__name__)
+
+
+def _find_egg_info(source_directory, is_editable):
+ # type: (str, bool) -> str
+ """Find an .egg-info in `source_directory`, based on `is_editable`.
+ """
+
+ def looks_like_virtual_env(path):
+ # type: (str) -> bool
+ return (
+ os.path.lexists(os.path.join(path, 'bin', 'python')) or
+ os.path.exists(os.path.join(path, 'Scripts', 'Python.exe'))
+ )
+
+ def locate_editable_egg_info(base):
+ # type: (str) -> List[str]
+ candidates = [] # type: List[str]
+ for root, dirs, files in os.walk(base):
+ for dir_ in vcs.dirnames:
+ if dir_ in dirs:
+ dirs.remove(dir_)
+ # Iterate over a copy of ``dirs``, since mutating
+ # a list while iterating over it can cause trouble.
+ # (See https://github.com/pypa/pip/pull/462.)
+ for dir_ in list(dirs):
+ if looks_like_virtual_env(os.path.join(root, dir_)):
+ dirs.remove(dir_)
+ # Also don't search through tests
+ elif dir_ == 'test' or dir_ == 'tests':
+ dirs.remove(dir_)
+ candidates.extend(os.path.join(root, dir_) for dir_ in dirs)
+ return [f for f in candidates if f.endswith('.egg-info')]
+
+ def depth_of_directory(dir_):
+ # type: (str) -> int
+ return (
+ dir_.count(os.path.sep) +
+ (os.path.altsep and dir_.count(os.path.altsep) or 0)
+ )
+
+ base = source_directory
+ if is_editable:
+ filenames = locate_editable_egg_info(base)
+ else:
+ base = os.path.join(base, 'pip-egg-info')
+ filenames = os.listdir(base)
+
+ if not filenames:
+ raise InstallationError(
+ "Files/directories not found in {}".format(base)
+ )
+
+ # If we have more than one match, we pick the toplevel one. This
+ # can easily be the case if there is a dist folder which contains
+ # an extracted tarball for testing purposes.
+ if len(filenames) > 1:
+ filenames.sort(key=depth_of_directory)
+
+ return os.path.join(base, filenames[0])
+
+
+def generate_metadata(
+ build_env, # type: BuildEnvironment
+ setup_py_path, # type: str
+ source_dir, # type: str
+ editable, # type: bool
+ isolated, # type: bool
+ details, # type: str
+):
+ # type: (...) -> str
+ """Generate metadata using setup.py-based defacto mechanisms.
+
+ Returns the generated metadata directory.
+ """
+ logger.debug(
+ 'Running setup.py (path:%s) egg_info for package %s',
+ setup_py_path, details,
+ )
+
+ egg_info_dir = None # type: Optional[str]
+ # For non-editable installs, don't put the .egg-info files at the root,
+ # to avoid confusion due to the source code being considered an installed
+ # egg.
+ if not editable:
+ egg_info_dir = os.path.join(source_dir, 'pip-egg-info')
+ # setuptools complains if the target directory does not exist.
+ ensure_dir(egg_info_dir)
+
+ args = make_setuptools_egg_info_args(
+ setup_py_path,
+ egg_info_dir=egg_info_dir,
+ no_user_config=isolated,
+ )
+
+ with build_env:
+ call_subprocess(
+ args,
+ cwd=source_dir,
+ command_desc='python setup.py egg_info',
+ )
+
+ # Return the .egg-info directory.
+ return _find_egg_info(source_dir, editable)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/wheel.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/wheel.py
new file mode 100644
index 0000000000000000000000000000000000000000..1266ce05c6f4fddeec7f40a00ad4d2d85f531552
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/wheel.py
@@ -0,0 +1,46 @@
+import logging
+import os
+
+from pip._internal.utils.subprocess import runner_with_spinner_message
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Optional
+ from pip._vendor.pep517.wrappers import Pep517HookCaller
+
+logger = logging.getLogger(__name__)
+
+
+def build_wheel_pep517(
+ name, # type: str
+ backend, # type: Pep517HookCaller
+ metadata_directory, # type: str
+ build_options, # type: List[str]
+ tempd, # type: str
+):
+ # type: (...) -> Optional[str]
+ """Build one InstallRequirement using the PEP 517 build process.
+
+ Returns path to wheel if successfully built. Otherwise, returns None.
+ """
+ assert metadata_directory is not None
+ if build_options:
+ # PEP 517 does not support --build-options
+ logger.error('Cannot build wheel for %s using PEP 517 when '
+ '--build-option is present' % (name,))
+ return None
+ try:
+ logger.debug('Destination directory: %s', tempd)
+
+ runner = runner_with_spinner_message(
+ 'Building wheel for {} (PEP 517)'.format(name)
+ )
+ with backend.subprocess_runner(runner):
+ wheel_name = backend.build_wheel(
+ tempd,
+ metadata_directory=metadata_directory,
+ )
+ except Exception:
+ logger.error('Failed building wheel for %s', name)
+ return None
+ return os.path.join(tempd, wheel_name)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/wheel_legacy.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/wheel_legacy.py
new file mode 100644
index 0000000000000000000000000000000000000000..3ebd9fe444bddc4bafae14af8dda297ddb98ce40
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/build/wheel_legacy.py
@@ -0,0 +1,115 @@
+import logging
+import os.path
+
+from pip._internal.utils.setuptools_build import (
+ make_setuptools_bdist_wheel_args,
+)
+from pip._internal.utils.subprocess import (
+ LOG_DIVIDER,
+ call_subprocess,
+ format_command_args,
+)
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.ui import open_spinner
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Optional, Text
+
+logger = logging.getLogger(__name__)
+
+
+def format_command_result(
+ command_args, # type: List[str]
+ command_output, # type: Text
+):
+ # type: (...) -> str
+ """Format command information for logging."""
+ command_desc = format_command_args(command_args)
+ text = 'Command arguments: {}\n'.format(command_desc)
+
+ if not command_output:
+ text += 'Command output: None'
+ elif logger.getEffectiveLevel() > logging.DEBUG:
+ text += 'Command output: [use --verbose to show]'
+ else:
+ if not command_output.endswith('\n'):
+ command_output += '\n'
+ text += 'Command output:\n{}{}'.format(command_output, LOG_DIVIDER)
+
+ return text
+
+
+def get_legacy_build_wheel_path(
+ names, # type: List[str]
+ temp_dir, # type: str
+ name, # type: str
+ command_args, # type: List[str]
+ command_output, # type: Text
+):
+ # type: (...) -> Optional[str]
+ """Return the path to the wheel in the temporary build directory."""
+ # Sort for determinism.
+ names = sorted(names)
+ if not names:
+ msg = (
+ 'Legacy build of wheel for {!r} created no files.\n'
+ ).format(name)
+ msg += format_command_result(command_args, command_output)
+ logger.warning(msg)
+ return None
+
+ if len(names) > 1:
+ msg = (
+ 'Legacy build of wheel for {!r} created more than one file.\n'
+ 'Filenames (choosing first): {}\n'
+ ).format(name, names)
+ msg += format_command_result(command_args, command_output)
+ logger.warning(msg)
+
+ return os.path.join(temp_dir, names[0])
+
+
+def build_wheel_legacy(
+ name, # type: str
+ setup_py_path, # type: str
+ source_dir, # type: str
+ global_options, # type: List[str]
+ build_options, # type: List[str]
+ tempd, # type: str
+):
+ # type: (...) -> Optional[str]
+ """Build one unpacked package using the "legacy" build process.
+
+ Returns path to wheel if successfully built. Otherwise, returns None.
+ """
+ wheel_args = make_setuptools_bdist_wheel_args(
+ setup_py_path,
+ global_options=global_options,
+ build_options=build_options,
+ destination_dir=tempd,
+ )
+
+ spin_message = 'Building wheel for %s (setup.py)' % (name,)
+ with open_spinner(spin_message) as spinner:
+ logger.debug('Destination directory: %s', tempd)
+
+ try:
+ output = call_subprocess(
+ wheel_args,
+ cwd=source_dir,
+ spinner=spinner,
+ )
+ except Exception:
+ spinner.finish("error")
+ logger.error('Failed building wheel for %s', name)
+ return None
+
+ names = os.listdir(tempd)
+ wheel_path = get_legacy_build_wheel_path(
+ names=names,
+ temp_dir=tempd,
+ name=name,
+ command_args=wheel_args,
+ command_output=output,
+ )
+ return wheel_path
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/check.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/check.py
new file mode 100644
index 0000000000000000000000000000000000000000..b85a12306a4f9008ae072b5f2c88df5b9d1d3db3
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/check.py
@@ -0,0 +1,163 @@
+"""Validation of dependencies of packages
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+# mypy: disallow-untyped-defs=False
+
+import logging
+from collections import namedtuple
+
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.pkg_resources import RequirementParseError
+
+from pip._internal.distributions import (
+ make_distribution_for_install_requirement,
+)
+from pip._internal.utils.misc import get_installed_distributions
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+logger = logging.getLogger(__name__)
+
+if MYPY_CHECK_RUNNING:
+ from pip._internal.req.req_install import InstallRequirement
+ from typing import (
+ Any, Callable, Dict, Optional, Set, Tuple, List
+ )
+
+ # Shorthands
+ PackageSet = Dict[str, 'PackageDetails']
+ Missing = Tuple[str, Any]
+ Conflicting = Tuple[str, str, Any]
+
+ MissingDict = Dict[str, List[Missing]]
+ ConflictingDict = Dict[str, List[Conflicting]]
+ CheckResult = Tuple[MissingDict, ConflictingDict]
+
+PackageDetails = namedtuple('PackageDetails', ['version', 'requires'])
+
+
+def create_package_set_from_installed(**kwargs):
+ # type: (**Any) -> Tuple[PackageSet, bool]
+ """Converts a list of distributions into a PackageSet.
+ """
+ # Default to using all packages installed on the system
+ if kwargs == {}:
+ kwargs = {"local_only": False, "skip": ()}
+
+ package_set = {}
+ problems = False
+ for dist in get_installed_distributions(**kwargs):
+ name = canonicalize_name(dist.project_name)
+ try:
+ package_set[name] = PackageDetails(dist.version, dist.requires())
+ except RequirementParseError as e:
+ # Don't crash on broken metadata
+ logger.warning("Error parsing requirements for %s: %s", name, e)
+ problems = True
+ return package_set, problems
+
+
+def check_package_set(package_set, should_ignore=None):
+ # type: (PackageSet, Optional[Callable[[str], bool]]) -> CheckResult
+ """Check if a package set is consistent
+
+ If should_ignore is passed, it should be a callable that takes a
+ package name and returns a boolean.
+ """
+ if should_ignore is None:
+ def should_ignore(name):
+ return False
+
+ missing = {}
+ conflicting = {}
+
+ for package_name in package_set:
+ # Info about dependencies of package_name
+ missing_deps = set() # type: Set[Missing]
+ conflicting_deps = set() # type: Set[Conflicting]
+
+ if should_ignore(package_name):
+ continue
+
+ for req in package_set[package_name].requires:
+ name = canonicalize_name(req.project_name) # type: str
+
+ # Check if it's missing
+ if name not in package_set:
+ missed = True
+ if req.marker is not None:
+ missed = req.marker.evaluate()
+ if missed:
+ missing_deps.add((name, req))
+ continue
+
+ # Check if there's a conflict
+ version = package_set[name].version # type: str
+ if not req.specifier.contains(version, prereleases=True):
+ conflicting_deps.add((name, version, req))
+
+ if missing_deps:
+ missing[package_name] = sorted(missing_deps, key=str)
+ if conflicting_deps:
+ conflicting[package_name] = sorted(conflicting_deps, key=str)
+
+ return missing, conflicting
+
+
+def check_install_conflicts(to_install):
+ # type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult]
+ """For checking if the dependency graph would be consistent after \
+ installing given requirements
+ """
+ # Start from the current state
+ package_set, _ = create_package_set_from_installed()
+ # Install packages
+ would_be_installed = _simulate_installation_of(to_install, package_set)
+
+ # Only warn about directly-dependent packages; create a whitelist of them
+ whitelist = _create_whitelist(would_be_installed, package_set)
+
+ return (
+ package_set,
+ check_package_set(
+ package_set, should_ignore=lambda name: name not in whitelist
+ )
+ )
+
+
+def _simulate_installation_of(to_install, package_set):
+ # type: (List[InstallRequirement], PackageSet) -> Set[str]
+ """Computes the version of packages after installing to_install.
+ """
+
+ # Keep track of packages that were installed
+ installed = set()
+
+ # Modify it as installing requirement_set would (assuming no errors)
+ for inst_req in to_install:
+ abstract_dist = make_distribution_for_install_requirement(inst_req)
+ dist = abstract_dist.get_pkg_resources_distribution()
+
+ name = canonicalize_name(dist.key)
+ package_set[name] = PackageDetails(dist.version, dist.requires())
+
+ installed.add(name)
+
+ return installed
+
+
+def _create_whitelist(would_be_installed, package_set):
+ # type: (Set[str], PackageSet) -> Set[str]
+ packages_affected = set(would_be_installed)
+
+ for package_name in package_set:
+ if package_name in packages_affected:
+ continue
+
+ for req in package_set[package_name].requires:
+ if canonicalize_name(req.name) in packages_affected:
+ packages_affected.add(package_name)
+ break
+
+ return packages_affected
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/freeze.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/freeze.py
new file mode 100644
index 0000000000000000000000000000000000000000..36a5c339a2ab22debec595af17a520a803f2a783
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/freeze.py
@@ -0,0 +1,265 @@
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import collections
+import logging
+import os
+import re
+
+from pip._vendor import six
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.pkg_resources import RequirementParseError
+
+from pip._internal.exceptions import BadCommand, InstallationError
+from pip._internal.req.constructors import (
+ install_req_from_editable,
+ install_req_from_line,
+)
+from pip._internal.req.req_file import COMMENT_RE
+from pip._internal.utils.misc import (
+ dist_is_editable,
+ get_installed_distributions,
+)
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Iterator, Optional, List, Container, Set, Dict, Tuple, Iterable, Union
+ )
+ from pip._internal.cache import WheelCache
+ from pip._vendor.pkg_resources import (
+ Distribution, Requirement
+ )
+
+ RequirementInfo = Tuple[Optional[Union[str, Requirement]], bool, List[str]]
+
+
+logger = logging.getLogger(__name__)
+
+
+def freeze(
+ requirement=None, # type: Optional[List[str]]
+ find_links=None, # type: Optional[List[str]]
+ local_only=None, # type: Optional[bool]
+ user_only=None, # type: Optional[bool]
+ paths=None, # type: Optional[List[str]]
+ skip_regex=None, # type: Optional[str]
+ isolated=False, # type: bool
+ wheel_cache=None, # type: Optional[WheelCache]
+ exclude_editable=False, # type: bool
+ skip=() # type: Container[str]
+):
+ # type: (...) -> Iterator[str]
+ find_links = find_links or []
+ skip_match = None
+
+ if skip_regex:
+ skip_match = re.compile(skip_regex).search
+
+ for link in find_links:
+ yield '-f %s' % link
+ installations = {} # type: Dict[str, FrozenRequirement]
+ for dist in get_installed_distributions(local_only=local_only,
+ skip=(),
+ user_only=user_only,
+ paths=paths):
+ try:
+ req = FrozenRequirement.from_dist(dist)
+ except RequirementParseError as exc:
+ # We include dist rather than dist.project_name because the
+ # dist string includes more information, like the version and
+ # location. We also include the exception message to aid
+ # troubleshooting.
+ logger.warning(
+ 'Could not generate requirement for distribution %r: %s',
+ dist, exc
+ )
+ continue
+ if exclude_editable and req.editable:
+ continue
+ installations[req.canonical_name] = req
+
+ if requirement:
+ # the options that don't get turned into an InstallRequirement
+ # should only be emitted once, even if the same option is in multiple
+ # requirements files, so we need to keep track of what has been emitted
+ # so that we don't emit it again if it's seen again
+ emitted_options = set() # type: Set[str]
+ # keep track of which files a requirement is in so that we can
+ # give an accurate warning if a requirement appears multiple times.
+ req_files = collections.defaultdict(list) # type: Dict[str, List[str]]
+ for req_file_path in requirement:
+ with open(req_file_path) as req_file:
+ for line in req_file:
+ if (not line.strip() or
+ line.strip().startswith('#') or
+ (skip_match and skip_match(line)) or
+ line.startswith((
+ '-r', '--requirement',
+ '-Z', '--always-unzip',
+ '-f', '--find-links',
+ '-i', '--index-url',
+ '--pre',
+ '--trusted-host',
+ '--process-dependency-links',
+ '--extra-index-url'))):
+ line = line.rstrip()
+ if line not in emitted_options:
+ emitted_options.add(line)
+ yield line
+ continue
+
+ if line.startswith('-e') or line.startswith('--editable'):
+ if line.startswith('-e'):
+ line = line[2:].strip()
+ else:
+ line = line[len('--editable'):].strip().lstrip('=')
+ line_req = install_req_from_editable(
+ line,
+ isolated=isolated,
+ wheel_cache=wheel_cache,
+ )
+ else:
+ line_req = install_req_from_line(
+ COMMENT_RE.sub('', line).strip(),
+ isolated=isolated,
+ wheel_cache=wheel_cache,
+ )
+
+ if not line_req.name:
+ logger.info(
+ "Skipping line in requirement file [%s] because "
+ "it's not clear what it would install: %s",
+ req_file_path, line.strip(),
+ )
+ logger.info(
+ " (add #egg=PackageName to the URL to avoid"
+ " this warning)"
+ )
+ else:
+ line_req_canonical_name = canonicalize_name(
+ line_req.name)
+ if line_req_canonical_name not in installations:
+ # either it's not installed, or it is installed
+ # but has been processed already
+ if not req_files[line_req.name]:
+ logger.warning(
+ "Requirement file [%s] contains %s, but "
+ "package %r is not installed",
+ req_file_path,
+ COMMENT_RE.sub('', line).strip(),
+ line_req.name
+ )
+ else:
+ req_files[line_req.name].append(req_file_path)
+ else:
+ yield str(installations[
+ line_req_canonical_name]).rstrip()
+ del installations[line_req_canonical_name]
+ req_files[line_req.name].append(req_file_path)
+
+ # Warn about requirements that were included multiple times (in a
+ # single requirements file or in different requirements files).
+ for name, files in six.iteritems(req_files):
+ if len(files) > 1:
+ logger.warning("Requirement %s included multiple times [%s]",
+ name, ', '.join(sorted(set(files))))
+
+ yield(
+ '## The following requirements were added by '
+ 'pip freeze:'
+ )
+ for installation in sorted(
+ installations.values(), key=lambda x: x.name.lower()):
+ if installation.canonical_name not in skip:
+ yield str(installation).rstrip()
+
+
+def get_requirement_info(dist):
+ # type: (Distribution) -> RequirementInfo
+ """
+ Compute and return values (req, editable, comments) for use in
+ FrozenRequirement.from_dist().
+ """
+ if not dist_is_editable(dist):
+ return (None, False, [])
+
+ location = os.path.normcase(os.path.abspath(dist.location))
+
+ from pip._internal.vcs import vcs, RemoteNotFoundError
+ vcs_backend = vcs.get_backend_for_dir(location)
+
+ if vcs_backend is None:
+ req = dist.as_requirement()
+ logger.debug(
+ 'No VCS found for editable requirement "%s" in: %r', req,
+ location,
+ )
+ comments = [
+ '# Editable install with no version control ({})'.format(req)
+ ]
+ return (location, True, comments)
+
+ try:
+ req = vcs_backend.get_src_requirement(location, dist.project_name)
+ except RemoteNotFoundError:
+ req = dist.as_requirement()
+ comments = [
+ '# Editable {} install with no remote ({})'.format(
+ type(vcs_backend).__name__, req,
+ )
+ ]
+ return (location, True, comments)
+
+ except BadCommand:
+ logger.warning(
+ 'cannot determine version of editable source in %s '
+ '(%s command not found in path)',
+ location,
+ vcs_backend.name,
+ )
+ return (None, True, [])
+
+ except InstallationError as exc:
+ logger.warning(
+ "Error when trying to get requirement for VCS system %s, "
+ "falling back to uneditable format", exc
+ )
+ else:
+ if req is not None:
+ return (req, True, [])
+
+ logger.warning(
+ 'Could not determine repository location of %s', location
+ )
+ comments = ['## !! Could not determine repository location']
+
+ return (None, False, comments)
+
+
+class FrozenRequirement(object):
+ def __init__(self, name, req, editable, comments=()):
+ # type: (str, Union[str, Requirement], bool, Iterable[str]) -> None
+ self.name = name
+ self.canonical_name = canonicalize_name(name)
+ self.req = req
+ self.editable = editable
+ self.comments = comments
+
+ @classmethod
+ def from_dist(cls, dist):
+ # type: (Distribution) -> FrozenRequirement
+ req, editable, comments = get_requirement_info(dist)
+ if req is None:
+ req = dist.as_requirement()
+
+ return cls(dist.project_name, req, editable, comments=comments)
+
+ def __str__(self):
+ req = self.req
+ if self.editable:
+ req = '-e %s' % req
+ return '\n'.join(list(self.comments) + [str(req)]) + '\n'
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/__init__.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..24d6a5dd31fe33b03f90ed0f9ee465253686900c
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/__init__.py
@@ -0,0 +1,2 @@
+"""For modules related to installing packages.
+"""
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..26bf8bc50c2f8c3dcf799e03cf257814d9ef2f28
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c130d4f6a49bbfe07a4f8bcc78540f582b94479a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..620c0d9f1b096b9b8e96e600a1b0ad8b2f7dea7b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..51a3a1f7e422f775a81f7faf7b6a96819f441b6e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/editable_legacy.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/editable_legacy.py
new file mode 100644
index 0000000000000000000000000000000000000000..a668a61dc60f50963186b5a358e1e581bb6bbf09
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/editable_legacy.py
@@ -0,0 +1,52 @@
+"""Legacy editable installation process, i.e. `setup.py develop`.
+"""
+import logging
+
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.setuptools_build import make_setuptools_develop_args
+from pip._internal.utils.subprocess import call_subprocess
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Optional, Sequence
+
+ from pip._internal.build_env import BuildEnvironment
+
+
+logger = logging.getLogger(__name__)
+
+
+def install_editable(
+ install_options, # type: List[str]
+ global_options, # type: Sequence[str]
+ prefix, # type: Optional[str]
+ home, # type: Optional[str]
+ use_user_site, # type: bool
+ name, # type: str
+ setup_py_path, # type: str
+ isolated, # type: bool
+ build_env, # type: BuildEnvironment
+ unpacked_source_directory, # type: str
+):
+ # type: (...) -> None
+ """Install a package in editable mode. Most arguments are pass-through
+ to setuptools.
+ """
+ logger.info('Running setup.py develop for %s', name)
+
+ args = make_setuptools_develop_args(
+ setup_py_path,
+ global_options=global_options,
+ install_options=install_options,
+ no_user_config=isolated,
+ prefix=prefix,
+ home=home,
+ use_user_site=use_user_site,
+ )
+
+ with indent_log():
+ with build_env:
+ call_subprocess(
+ args,
+ cwd=unpacked_source_directory,
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/legacy.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/legacy.py
new file mode 100644
index 0000000000000000000000000000000000000000..2d4adc4f62c81f0dcb2cd48c340102234052fac7
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/legacy.py
@@ -0,0 +1,129 @@
+"""Legacy installation process, i.e. `setup.py install`.
+"""
+
+import logging
+import os
+from distutils.util import change_root
+
+from pip._internal.utils.deprecation import deprecated
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import ensure_dir
+from pip._internal.utils.setuptools_build import make_setuptools_install_args
+from pip._internal.utils.subprocess import runner_with_spinner_message
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Optional, Sequence
+
+ from pip._internal.models.scheme import Scheme
+ from pip._internal.req.req_install import InstallRequirement
+
+
+logger = logging.getLogger(__name__)
+
+
+def install(
+ install_req, # type: InstallRequirement
+ install_options, # type: List[str]
+ global_options, # type: Sequence[str]
+ root, # type: Optional[str]
+ home, # type: Optional[str]
+ prefix, # type: Optional[str]
+ use_user_site, # type: bool
+ pycompile, # type: bool
+ scheme, # type: Scheme
+):
+ # type: (...) -> None
+ # Extend the list of global and install options passed on to
+ # the setup.py call with the ones from the requirements file.
+ # Options specified in requirements file override those
+ # specified on the command line, since the last option given
+ # to setup.py is the one that is used.
+ global_options = list(global_options) + \
+ install_req.options.get('global_options', [])
+ install_options = list(install_options) + \
+ install_req.options.get('install_options', [])
+
+ header_dir = scheme.headers
+
+ with TempDirectory(kind="record") as temp_dir:
+ record_filename = os.path.join(temp_dir.path, 'install-record.txt')
+ install_args = make_setuptools_install_args(
+ install_req.setup_py_path,
+ global_options=global_options,
+ install_options=install_options,
+ record_filename=record_filename,
+ root=root,
+ prefix=prefix,
+ header_dir=header_dir,
+ home=home,
+ use_user_site=use_user_site,
+ no_user_config=install_req.isolated,
+ pycompile=pycompile,
+ )
+
+ runner = runner_with_spinner_message(
+ "Running setup.py install for {}".format(install_req.name)
+ )
+ with indent_log(), install_req.build_env:
+ runner(
+ cmd=install_args,
+ cwd=install_req.unpacked_source_directory,
+ )
+
+ if not os.path.exists(record_filename):
+ logger.debug('Record file %s not found', record_filename)
+ return
+ install_req.install_succeeded = True
+
+ # We intentionally do not use any encoding to read the file because
+ # setuptools writes the file using distutils.file_util.write_file,
+ # which does not specify an encoding.
+ with open(record_filename) as f:
+ record_lines = f.read().splitlines()
+
+ def prepend_root(path):
+ # type: (str) -> str
+ if root is None or not os.path.isabs(path):
+ return path
+ else:
+ return change_root(root, path)
+
+ for line in record_lines:
+ directory = os.path.dirname(line)
+ if directory.endswith('.egg-info'):
+ egg_info_dir = prepend_root(directory)
+ break
+ else:
+ deprecated(
+ reason=(
+ "{} did not indicate that it installed an "
+ ".egg-info directory. Only setup.py projects "
+ "generating .egg-info directories are supported."
+ ).format(install_req),
+ replacement=(
+ "for maintainers: updating the setup.py of {0}. "
+ "For users: contact the maintainers of {0} to let "
+ "them know to update their setup.py.".format(
+ install_req.name
+ )
+ ),
+ gone_in="20.2",
+ issue=6998,
+ )
+ # FIXME: put the record somewhere
+ return
+ new_lines = []
+ for line in record_lines:
+ filename = line.strip()
+ if os.path.isdir(filename):
+ filename += os.path.sep
+ new_lines.append(
+ os.path.relpath(prepend_root(filename), egg_info_dir)
+ )
+ new_lines.sort()
+ ensure_dir(egg_info_dir)
+ inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
+ with open(inst_files_path, 'w') as f:
+ f.write('\n'.join(new_lines) + '\n')
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/wheel.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/wheel.py
new file mode 100644
index 0000000000000000000000000000000000000000..aac975c3ac8ebfed2f3d54e229a2d8d28d878865
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/install/wheel.py
@@ -0,0 +1,615 @@
+"""Support for installing and building the "wheel" binary package format.
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+from __future__ import absolute_import
+
+import collections
+import compileall
+import csv
+import logging
+import os.path
+import re
+import shutil
+import stat
+import sys
+import warnings
+from base64 import urlsafe_b64encode
+from zipfile import ZipFile
+
+from pip._vendor import pkg_resources
+from pip._vendor.distlib.scripts import ScriptMaker
+from pip._vendor.distlib.util import get_export_entry
+from pip._vendor.six import StringIO
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.locations import get_major_minor_version
+from pip._internal.utils.misc import captured_stdout, ensure_dir, hash_file
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.unpacking import unpack_file
+from pip._internal.utils.wheel import parse_wheel
+
+if MYPY_CHECK_RUNNING:
+ from email.message import Message
+ from typing import (
+ Dict, List, Optional, Sequence, Tuple, IO, Text, Any,
+ Iterable, Callable, Set,
+ )
+
+ from pip._internal.models.scheme import Scheme
+
+ InstalledCSVRow = Tuple[str, ...]
+
+
+logger = logging.getLogger(__name__)
+
+
+def normpath(src, p):
+ # type: (str, str) -> str
+ return os.path.relpath(src, p).replace(os.path.sep, '/')
+
+
+def rehash(path, blocksize=1 << 20):
+ # type: (str, int) -> Tuple[str, str]
+ """Return (encoded_digest, length) for path using hashlib.sha256()"""
+ h, length = hash_file(path, blocksize)
+ digest = 'sha256=' + urlsafe_b64encode(
+ h.digest()
+ ).decode('latin1').rstrip('=')
+ # unicode/str python2 issues
+ return (digest, str(length)) # type: ignore
+
+
+def open_for_csv(name, mode):
+ # type: (str, Text) -> IO[Any]
+ if sys.version_info[0] < 3:
+ nl = {} # type: Dict[str, Any]
+ bin = 'b'
+ else:
+ nl = {'newline': ''} # type: Dict[str, Any]
+ bin = ''
+ return open(name, mode + bin, **nl)
+
+
+def fix_script(path):
+ # type: (str) -> Optional[bool]
+ """Replace #!python with #!/path/to/python
+ Return True if file was changed.
+ """
+ # XXX RECORD hashes will need to be updated
+ if os.path.isfile(path):
+ with open(path, 'rb') as script:
+ firstline = script.readline()
+ if not firstline.startswith(b'#!python'):
+ return False
+ exename = sys.executable.encode(sys.getfilesystemencoding())
+ firstline = b'#!' + exename + os.linesep.encode("ascii")
+ rest = script.read()
+ with open(path, 'wb') as script:
+ script.write(firstline)
+ script.write(rest)
+ return True
+ return None
+
+
+def wheel_root_is_purelib(metadata):
+ # type: (Message) -> bool
+ return metadata.get("Root-Is-Purelib", "").lower() == "true"
+
+
+def get_entrypoints(filename):
+ # type: (str) -> Tuple[Dict[str, str], Dict[str, str]]
+ if not os.path.exists(filename):
+ return {}, {}
+
+ # This is done because you can pass a string to entry_points wrappers which
+ # means that they may or may not be valid INI files. The attempt here is to
+ # strip leading and trailing whitespace in order to make them valid INI
+ # files.
+ with open(filename) as fp:
+ data = StringIO()
+ for line in fp:
+ data.write(line.strip())
+ data.write("\n")
+ data.seek(0)
+
+ # get the entry points and then the script names
+ entry_points = pkg_resources.EntryPoint.parse_map(data)
+ console = entry_points.get('console_scripts', {})
+ gui = entry_points.get('gui_scripts', {})
+
+ def _split_ep(s):
+ # type: (pkg_resources.EntryPoint) -> Tuple[str, str]
+ """get the string representation of EntryPoint,
+ remove space and split on '='
+ """
+ split_parts = str(s).replace(" ", "").split("=")
+ return split_parts[0], split_parts[1]
+
+ # convert the EntryPoint objects into strings with module:function
+ console = dict(_split_ep(v) for v in console.values())
+ gui = dict(_split_ep(v) for v in gui.values())
+ return console, gui
+
+
+def message_about_scripts_not_on_PATH(scripts):
+ # type: (Sequence[str]) -> Optional[str]
+ """Determine if any scripts are not on PATH and format a warning.
+ Returns a warning message if one or more scripts are not on PATH,
+ otherwise None.
+ """
+ if not scripts:
+ return None
+
+ # Group scripts by the path they were installed in
+ grouped_by_dir = collections.defaultdict(set) # type: Dict[str, Set[str]]
+ for destfile in scripts:
+ parent_dir = os.path.dirname(destfile)
+ script_name = os.path.basename(destfile)
+ grouped_by_dir[parent_dir].add(script_name)
+
+ # We don't want to warn for directories that are on PATH.
+ not_warn_dirs = [
+ os.path.normcase(i).rstrip(os.sep) for i in
+ os.environ.get("PATH", "").split(os.pathsep)
+ ]
+ # If an executable sits with sys.executable, we don't warn for it.
+ # This covers the case of venv invocations without activating the venv.
+ not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
+ warn_for = {
+ parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
+ if os.path.normcase(parent_dir) not in not_warn_dirs
+ } # type: Dict[str, Set[str]]
+ if not warn_for:
+ return None
+
+ # Format a message
+ msg_lines = []
+ for parent_dir, dir_scripts in warn_for.items():
+ sorted_scripts = sorted(dir_scripts) # type: List[str]
+ if len(sorted_scripts) == 1:
+ start_text = "script {} is".format(sorted_scripts[0])
+ else:
+ start_text = "scripts {} are".format(
+ ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
+ )
+
+ msg_lines.append(
+ "The {} installed in '{}' which is not on PATH."
+ .format(start_text, parent_dir)
+ )
+
+ last_line_fmt = (
+ "Consider adding {} to PATH or, if you prefer "
+ "to suppress this warning, use --no-warn-script-location."
+ )
+ if len(msg_lines) == 1:
+ msg_lines.append(last_line_fmt.format("this directory"))
+ else:
+ msg_lines.append(last_line_fmt.format("these directories"))
+
+ # Add a note if any directory starts with ~
+ warn_for_tilde = any(
+ i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i
+ )
+ if warn_for_tilde:
+ tilde_warning_msg = (
+ "NOTE: The current PATH contains path(s) starting with `~`, "
+ "which may not be expanded by all applications."
+ )
+ msg_lines.append(tilde_warning_msg)
+
+ # Returns the formatted multiline message
+ return "\n".join(msg_lines)
+
+
+def sorted_outrows(outrows):
+ # type: (Iterable[InstalledCSVRow]) -> List[InstalledCSVRow]
+ """Return the given rows of a RECORD file in sorted order.
+
+ Each row is a 3-tuple (path, hash, size) and corresponds to a record of
+ a RECORD file (see PEP 376 and PEP 427 for details). For the rows
+ passed to this function, the size can be an integer as an int or string,
+ or the empty string.
+ """
+ # Normally, there should only be one row per path, in which case the
+ # second and third elements don't come into play when sorting.
+ # However, in cases in the wild where a path might happen to occur twice,
+ # we don't want the sort operation to trigger an error (but still want
+ # determinism). Since the third element can be an int or string, we
+ # coerce each element to a string to avoid a TypeError in this case.
+ # For additional background, see--
+ # https://github.com/pypa/pip/issues/5868
+ return sorted(outrows, key=lambda row: tuple(str(x) for x in row))
+
+
+def get_csv_rows_for_installed(
+ old_csv_rows, # type: Iterable[List[str]]
+ installed, # type: Dict[str, str]
+ changed, # type: Set[str]
+ generated, # type: List[str]
+ lib_dir, # type: str
+):
+ # type: (...) -> List[InstalledCSVRow]
+ """
+ :param installed: A map from archive RECORD path to installation RECORD
+ path.
+ """
+ installed_rows = [] # type: List[InstalledCSVRow]
+ for row in old_csv_rows:
+ if len(row) > 3:
+ logger.warning(
+ 'RECORD line has more than three elements: {}'.format(row)
+ )
+ # Make a copy because we are mutating the row.
+ row = list(row)
+ old_path = row[0]
+ new_path = installed.pop(old_path, old_path)
+ row[0] = new_path
+ if new_path in changed:
+ digest, length = rehash(new_path)
+ row[1] = digest
+ row[2] = length
+ installed_rows.append(tuple(row))
+ for f in generated:
+ digest, length = rehash(f)
+ installed_rows.append((normpath(f, lib_dir), digest, str(length)))
+ for f in installed:
+ installed_rows.append((installed[f], '', ''))
+ return installed_rows
+
+
+class MissingCallableSuffix(Exception):
+ pass
+
+
+def _raise_for_invalid_entrypoint(specification):
+ # type: (str) -> None
+ entry = get_export_entry(specification)
+ if entry is not None and entry.suffix is None:
+ raise MissingCallableSuffix(str(entry))
+
+
+class PipScriptMaker(ScriptMaker):
+ def make(self, specification, options=None):
+ # type: (str, Dict[str, Any]) -> List[str]
+ _raise_for_invalid_entrypoint(specification)
+ return super(PipScriptMaker, self).make(specification, options)
+
+
+def install_unpacked_wheel(
+ name, # type: str
+ wheeldir, # type: str
+ wheel_zip, # type: ZipFile
+ scheme, # type: Scheme
+ req_description, # type: str
+ pycompile=True, # type: bool
+ warn_script_location=True # type: bool
+):
+ # type: (...) -> None
+ """Install a wheel.
+
+ :param name: Name of the project to install
+ :param wheeldir: Base directory of the unpacked wheel
+ :param wheel_zip: open ZipFile for wheel being installed
+ :param scheme: Distutils scheme dictating the install directories
+ :param req_description: String used in place of the requirement, for
+ logging
+ :param pycompile: Whether to byte-compile installed Python files
+ :param warn_script_location: Whether to check that scripts are installed
+ into a directory on PATH
+ :raises UnsupportedWheel:
+ * when the directory holds an unpacked wheel with incompatible
+ Wheel-Version
+ * when the .dist-info dir does not match the wheel
+ """
+ # TODO: Investigate and break this up.
+ # TODO: Look into moving this into a dedicated class for representing an
+ # installation.
+
+ source = wheeldir.rstrip(os.path.sep) + os.path.sep
+
+ info_dir, metadata = parse_wheel(wheel_zip, name)
+
+ if wheel_root_is_purelib(metadata):
+ lib_dir = scheme.purelib
+ else:
+ lib_dir = scheme.platlib
+
+ subdirs = os.listdir(source)
+ data_dirs = [s for s in subdirs if s.endswith('.data')]
+
+ # Record details of the files moved
+ # installed = files copied from the wheel to the destination
+ # changed = files changed while installing (scripts #! line typically)
+ # generated = files newly generated during the install (script wrappers)
+ installed = {} # type: Dict[str, str]
+ changed = set()
+ generated = [] # type: List[str]
+
+ # Compile all of the pyc files that we're going to be installing
+ if pycompile:
+ with captured_stdout() as stdout:
+ with warnings.catch_warnings():
+ warnings.filterwarnings('ignore')
+ compileall.compile_dir(source, force=True, quiet=True)
+ logger.debug(stdout.getvalue())
+
+ def record_installed(srcfile, destfile, modified=False):
+ # type: (str, str, bool) -> None
+ """Map archive RECORD paths to installation RECORD paths."""
+ oldpath = normpath(srcfile, wheeldir)
+ newpath = normpath(destfile, lib_dir)
+ installed[oldpath] = newpath
+ if modified:
+ changed.add(destfile)
+
+ def clobber(
+ source, # type: str
+ dest, # type: str
+ is_base, # type: bool
+ fixer=None, # type: Optional[Callable[[str], Any]]
+ filter=None # type: Optional[Callable[[str], bool]]
+ ):
+ # type: (...) -> None
+ ensure_dir(dest) # common for the 'include' path
+
+ for dir, subdirs, files in os.walk(source):
+ basedir = dir[len(source):].lstrip(os.path.sep)
+ destdir = os.path.join(dest, basedir)
+ if is_base and basedir == '':
+ subdirs[:] = [s for s in subdirs if not s.endswith('.data')]
+ for f in files:
+ # Skip unwanted files
+ if filter and filter(f):
+ continue
+ srcfile = os.path.join(dir, f)
+ destfile = os.path.join(dest, basedir, f)
+ # directory creation is lazy and after the file filtering above
+ # to ensure we don't install empty dirs; empty dirs can't be
+ # uninstalled.
+ ensure_dir(destdir)
+
+ # copyfile (called below) truncates the destination if it
+ # exists and then writes the new contents. This is fine in most
+ # cases, but can cause a segfault if pip has loaded a shared
+ # object (e.g. from pyopenssl through its vendored urllib3)
+ # Since the shared object is mmap'd an attempt to call a
+ # symbol in it will then cause a segfault. Unlinking the file
+ # allows writing of new contents while allowing the process to
+ # continue to use the old copy.
+ if os.path.exists(destfile):
+ os.unlink(destfile)
+
+ # We use copyfile (not move, copy, or copy2) to be extra sure
+ # that we are not moving directories over (copyfile fails for
+ # directories) as well as to ensure that we are not copying
+ # over any metadata because we want more control over what
+ # metadata we actually copy over.
+ shutil.copyfile(srcfile, destfile)
+
+ # Copy over the metadata for the file, currently this only
+ # includes the atime and mtime.
+ st = os.stat(srcfile)
+ if hasattr(os, "utime"):
+ os.utime(destfile, (st.st_atime, st.st_mtime))
+
+ # If our file is executable, then make our destination file
+ # executable.
+ if os.access(srcfile, os.X_OK):
+ st = os.stat(srcfile)
+ permissions = (
+ st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
+ )
+ os.chmod(destfile, permissions)
+
+ changed = False
+ if fixer:
+ changed = fixer(destfile)
+ record_installed(srcfile, destfile, changed)
+
+ clobber(source, lib_dir, True)
+
+ dest_info_dir = os.path.join(lib_dir, info_dir)
+
+ # Get the defined entry points
+ ep_file = os.path.join(dest_info_dir, 'entry_points.txt')
+ console, gui = get_entrypoints(ep_file)
+
+ def is_entrypoint_wrapper(name):
+ # type: (str) -> bool
+ # EP, EP.exe and EP-script.py are scripts generated for
+ # entry point EP by setuptools
+ if name.lower().endswith('.exe'):
+ matchname = name[:-4]
+ elif name.lower().endswith('-script.py'):
+ matchname = name[:-10]
+ elif name.lower().endswith(".pya"):
+ matchname = name[:-4]
+ else:
+ matchname = name
+ # Ignore setuptools-generated scripts
+ return (matchname in console or matchname in gui)
+
+ for datadir in data_dirs:
+ fixer = None
+ filter = None
+ for subdir in os.listdir(os.path.join(wheeldir, datadir)):
+ fixer = None
+ if subdir == 'scripts':
+ fixer = fix_script
+ filter = is_entrypoint_wrapper
+ source = os.path.join(wheeldir, datadir, subdir)
+ dest = getattr(scheme, subdir)
+ clobber(source, dest, False, fixer=fixer, filter=filter)
+
+ maker = PipScriptMaker(None, scheme.scripts)
+
+ # Ensure old scripts are overwritten.
+ # See https://github.com/pypa/pip/issues/1800
+ maker.clobber = True
+
+ # Ensure we don't generate any variants for scripts because this is almost
+ # never what somebody wants.
+ # See https://bitbucket.org/pypa/distlib/issue/35/
+ maker.variants = {''}
+
+ # This is required because otherwise distlib creates scripts that are not
+ # executable.
+ # See https://bitbucket.org/pypa/distlib/issue/32/
+ maker.set_mode = True
+
+ scripts_to_generate = []
+
+ # Special case pip and setuptools to generate versioned wrappers
+ #
+ # The issue is that some projects (specifically, pip and setuptools) use
+ # code in setup.py to create "versioned" entry points - pip2.7 on Python
+ # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
+ # the wheel metadata at build time, and so if the wheel is installed with
+ # a *different* version of Python the entry points will be wrong. The
+ # correct fix for this is to enhance the metadata to be able to describe
+ # such versioned entry points, but that won't happen till Metadata 2.0 is
+ # available.
+ # In the meantime, projects using versioned entry points will either have
+ # incorrect versioned entry points, or they will not be able to distribute
+ # "universal" wheels (i.e., they will need a wheel per Python version).
+ #
+ # Because setuptools and pip are bundled with _ensurepip and virtualenv,
+ # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
+ # override the versioned entry points in the wheel and generate the
+ # correct ones. This code is purely a short-term measure until Metadata 2.0
+ # is available.
+ #
+ # To add the level of hack in this section of code, in order to support
+ # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
+ # variable which will control which version scripts get installed.
+ #
+ # ENSUREPIP_OPTIONS=altinstall
+ # - Only pipX.Y and easy_install-X.Y will be generated and installed
+ # ENSUREPIP_OPTIONS=install
+ # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
+ # that this option is technically if ENSUREPIP_OPTIONS is set and is
+ # not altinstall
+ # DEFAULT
+ # - The default behavior is to install pip, pipX, pipX.Y, easy_install
+ # and easy_install-X.Y.
+ pip_script = console.pop('pip', None)
+ if pip_script:
+ if "ENSUREPIP_OPTIONS" not in os.environ:
+ scripts_to_generate.append('pip = ' + pip_script)
+
+ if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
+ scripts_to_generate.append(
+ 'pip%s = %s' % (sys.version_info[0], pip_script)
+ )
+
+ scripts_to_generate.append(
+ 'pip%s = %s' % (get_major_minor_version(), pip_script)
+ )
+ # Delete any other versioned pip entry points
+ pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
+ for k in pip_ep:
+ del console[k]
+ easy_install_script = console.pop('easy_install', None)
+ if easy_install_script:
+ if "ENSUREPIP_OPTIONS" not in os.environ:
+ scripts_to_generate.append(
+ 'easy_install = ' + easy_install_script
+ )
+
+ scripts_to_generate.append(
+ 'easy_install-%s = %s' % (
+ get_major_minor_version(), easy_install_script
+ )
+ )
+ # Delete any other versioned easy_install entry points
+ easy_install_ep = [
+ k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
+ ]
+ for k in easy_install_ep:
+ del console[k]
+
+ # Generate the console and GUI entry points specified in the wheel
+ scripts_to_generate.extend(
+ '%s = %s' % kv for kv in console.items()
+ )
+
+ gui_scripts_to_generate = [
+ '%s = %s' % kv for kv in gui.items()
+ ]
+
+ generated_console_scripts = [] # type: List[str]
+
+ try:
+ generated_console_scripts = maker.make_multiple(scripts_to_generate)
+ generated.extend(generated_console_scripts)
+
+ generated.extend(
+ maker.make_multiple(gui_scripts_to_generate, {'gui': True})
+ )
+ except MissingCallableSuffix as e:
+ entry = e.args[0]
+ raise InstallationError(
+ "Invalid script entry point: {} for req: {} - A callable "
+ "suffix is required. Cf https://packaging.python.org/"
+ "specifications/entry-points/#use-for-scripts for more "
+ "information.".format(entry, req_description)
+ )
+
+ if warn_script_location:
+ msg = message_about_scripts_not_on_PATH(generated_console_scripts)
+ if msg is not None:
+ logger.warning(msg)
+
+ # Record pip as the installer
+ installer = os.path.join(dest_info_dir, 'INSTALLER')
+ temp_installer = os.path.join(dest_info_dir, 'INSTALLER.pip')
+ with open(temp_installer, 'wb') as installer_file:
+ installer_file.write(b'pip\n')
+ shutil.move(temp_installer, installer)
+ generated.append(installer)
+
+ # Record details of all files installed
+ record = os.path.join(dest_info_dir, 'RECORD')
+ temp_record = os.path.join(dest_info_dir, 'RECORD.pip')
+ with open_for_csv(record, 'r') as record_in:
+ with open_for_csv(temp_record, 'w+') as record_out:
+ reader = csv.reader(record_in)
+ outrows = get_csv_rows_for_installed(
+ reader, installed=installed, changed=changed,
+ generated=generated, lib_dir=lib_dir,
+ )
+ writer = csv.writer(record_out)
+ # Sort to simplify testing.
+ for row in sorted_outrows(outrows):
+ writer.writerow(row)
+ shutil.move(temp_record, record)
+
+
+def install_wheel(
+ name, # type: str
+ wheel_path, # type: str
+ scheme, # type: Scheme
+ req_description, # type: str
+ pycompile=True, # type: bool
+ warn_script_location=True, # type: bool
+ _temp_dir_for_testing=None, # type: Optional[str]
+):
+ # type: (...) -> None
+ with TempDirectory(
+ path=_temp_dir_for_testing, kind="unpacked-wheel"
+ ) as unpacked_dir, ZipFile(wheel_path, allowZip64=True) as z:
+ unpack_file(wheel_path, unpacked_dir.path)
+ install_unpacked_wheel(
+ name=name,
+ wheeldir=unpacked_dir.path,
+ wheel_zip=z,
+ scheme=scheme,
+ req_description=req_description,
+ pycompile=pycompile,
+ warn_script_location=warn_script_location,
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/prepare.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/prepare.py
new file mode 100644
index 0000000000000000000000000000000000000000..0b61f20524d976cd2bdc2fbc0a7e32bf13729d41
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/operations/prepare.py
@@ -0,0 +1,591 @@
+"""Prepares a distribution for installation
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+import logging
+import mimetypes
+import os
+import shutil
+import sys
+
+from pip._vendor import requests
+from pip._vendor.six import PY2
+
+from pip._internal.distributions import (
+ make_distribution_for_install_requirement,
+)
+from pip._internal.distributions.installed import InstalledDistribution
+from pip._internal.exceptions import (
+ DirectoryUrlHashUnsupported,
+ HashMismatch,
+ HashUnpinned,
+ InstallationError,
+ PreviousBuildDirError,
+ VcsHashUnsupported,
+)
+from pip._internal.utils.filesystem import copy2_fixed
+from pip._internal.utils.hashes import MissingHashes
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.marker_files import write_delete_marker_file
+from pip._internal.utils.misc import (
+ ask_path_exists,
+ backup_dir,
+ display_path,
+ hide_url,
+ path_to_display,
+ rmtree,
+)
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.unpacking import unpack_file
+from pip._internal.vcs import vcs
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Callable, List, Optional, Tuple,
+ )
+
+ from mypy_extensions import TypedDict
+
+ from pip._internal.distributions import AbstractDistribution
+ from pip._internal.index.package_finder import PackageFinder
+ from pip._internal.models.link import Link
+ from pip._internal.network.download import Downloader
+ from pip._internal.req.req_install import InstallRequirement
+ from pip._internal.req.req_tracker import RequirementTracker
+ from pip._internal.utils.hashes import Hashes
+
+ if PY2:
+ CopytreeKwargs = TypedDict(
+ 'CopytreeKwargs',
+ {
+ 'ignore': Callable[[str, List[str]], List[str]],
+ 'symlinks': bool,
+ },
+ total=False,
+ )
+ else:
+ CopytreeKwargs = TypedDict(
+ 'CopytreeKwargs',
+ {
+ 'copy_function': Callable[[str, str], None],
+ 'ignore': Callable[[str, List[str]], List[str]],
+ 'ignore_dangling_symlinks': bool,
+ 'symlinks': bool,
+ },
+ total=False,
+ )
+
+logger = logging.getLogger(__name__)
+
+
+def _get_prepared_distribution(
+ req, # type: InstallRequirement
+ req_tracker, # type: RequirementTracker
+ finder, # type: PackageFinder
+ build_isolation # type: bool
+):
+ # type: (...) -> AbstractDistribution
+ """Prepare a distribution for installation.
+ """
+ abstract_dist = make_distribution_for_install_requirement(req)
+ with req_tracker.track(req):
+ abstract_dist.prepare_distribution_metadata(finder, build_isolation)
+ return abstract_dist
+
+
+def unpack_vcs_link(link, location):
+ # type: (Link, str) -> None
+ vcs_backend = vcs.get_backend_for_scheme(link.scheme)
+ assert vcs_backend is not None
+ vcs_backend.unpack(location, url=hide_url(link.url))
+
+
+def _copy_file(filename, location, link):
+ # type: (str, str, Link) -> None
+ copy = True
+ download_location = os.path.join(location, link.filename)
+ if os.path.exists(download_location):
+ response = ask_path_exists(
+ 'The file {} exists. (i)gnore, (w)ipe, (b)ackup, (a)abort'.format(
+ display_path(download_location)
+ ),
+ ('i', 'w', 'b', 'a'),
+ )
+ if response == 'i':
+ copy = False
+ elif response == 'w':
+ logger.warning('Deleting %s', display_path(download_location))
+ os.remove(download_location)
+ elif response == 'b':
+ dest_file = backup_dir(download_location)
+ logger.warning(
+ 'Backing up %s to %s',
+ display_path(download_location),
+ display_path(dest_file),
+ )
+ shutil.move(download_location, dest_file)
+ elif response == 'a':
+ sys.exit(-1)
+ if copy:
+ shutil.copy(filename, download_location)
+ logger.info('Saved %s', display_path(download_location))
+
+
+def unpack_http_url(
+ link, # type: Link
+ location, # type: str
+ downloader, # type: Downloader
+ download_dir=None, # type: Optional[str]
+ hashes=None, # type: Optional[Hashes]
+):
+ # type: (...) -> str
+ temp_dir = TempDirectory(kind="unpack", globally_managed=True)
+ # If a download dir is specified, is the file already downloaded there?
+ already_downloaded_path = None
+ if download_dir:
+ already_downloaded_path = _check_download_dir(
+ link, download_dir, hashes
+ )
+
+ if already_downloaded_path:
+ from_path = already_downloaded_path
+ content_type = mimetypes.guess_type(from_path)[0]
+ else:
+ # let's download to a tmp dir
+ from_path, content_type = _download_http_url(
+ link, downloader, temp_dir.path, hashes
+ )
+
+ # unpack the archive to the build dir location. even when only
+ # downloading archives, they have to be unpacked to parse dependencies
+ unpack_file(from_path, location, content_type)
+
+ return from_path
+
+
+def _copy2_ignoring_special_files(src, dest):
+ # type: (str, str) -> None
+ """Copying special files is not supported, but as a convenience to users
+ we skip errors copying them. This supports tools that may create e.g.
+ socket files in the project source directory.
+ """
+ try:
+ copy2_fixed(src, dest)
+ except shutil.SpecialFileError as e:
+ # SpecialFileError may be raised due to either the source or
+ # destination. If the destination was the cause then we would actually
+ # care, but since the destination directory is deleted prior to
+ # copy we ignore all of them assuming it is caused by the source.
+ logger.warning(
+ "Ignoring special file error '%s' encountered copying %s to %s.",
+ str(e),
+ path_to_display(src),
+ path_to_display(dest),
+ )
+
+
+def _copy_source_tree(source, target):
+ # type: (str, str) -> None
+ def ignore(d, names):
+ # type: (str, List[str]) -> List[str]
+ # Pulling in those directories can potentially be very slow,
+ # exclude the following directories if they appear in the top
+ # level dir (and only it).
+ # See discussion at https://github.com/pypa/pip/pull/6770
+ return ['.tox', '.nox'] if d == source else []
+
+ kwargs = dict(ignore=ignore, symlinks=True) # type: CopytreeKwargs
+
+ if not PY2:
+ # Python 2 does not support copy_function, so we only ignore
+ # errors on special file copy in Python 3.
+ kwargs['copy_function'] = _copy2_ignoring_special_files
+
+ shutil.copytree(source, target, **kwargs)
+
+
+def unpack_file_url(
+ link, # type: Link
+ location, # type: str
+ download_dir=None, # type: Optional[str]
+ hashes=None # type: Optional[Hashes]
+):
+ # type: (...) -> Optional[str]
+ """Unpack link into location.
+ """
+ link_path = link.file_path
+ # If it's a url to a local directory
+ if link.is_existing_dir():
+ if os.path.isdir(location):
+ rmtree(location)
+ _copy_source_tree(link_path, location)
+ return None
+
+ # If a download dir is specified, is the file already there and valid?
+ already_downloaded_path = None
+ if download_dir:
+ already_downloaded_path = _check_download_dir(
+ link, download_dir, hashes
+ )
+
+ if already_downloaded_path:
+ from_path = already_downloaded_path
+ else:
+ from_path = link_path
+
+ # If --require-hashes is off, `hashes` is either empty, the
+ # link's embedded hash, or MissingHashes; it is required to
+ # match. If --require-hashes is on, we are satisfied by any
+ # hash in `hashes` matching: a URL-based or an option-based
+ # one; no internet-sourced hash will be in `hashes`.
+ if hashes:
+ hashes.check_against_path(from_path)
+
+ content_type = mimetypes.guess_type(from_path)[0]
+
+ # unpack the archive to the build dir location. even when only downloading
+ # archives, they have to be unpacked to parse dependencies
+ unpack_file(from_path, location, content_type)
+
+ return from_path
+
+
+def unpack_url(
+ link, # type: Link
+ location, # type: str
+ downloader, # type: Downloader
+ download_dir=None, # type: Optional[str]
+ hashes=None, # type: Optional[Hashes]
+):
+ # type: (...) -> Optional[str]
+ """Unpack link into location, downloading if required.
+
+ :param hashes: A Hashes object, one of whose embedded hashes must match,
+ or HashMismatch will be raised. If the Hashes is empty, no matches are
+ required, and unhashable types of requirements (like VCS ones, which
+ would ordinarily raise HashUnsupported) are allowed.
+ """
+ # non-editable vcs urls
+ if link.is_vcs:
+ unpack_vcs_link(link, location)
+ return None
+
+ # file urls
+ elif link.is_file:
+ return unpack_file_url(link, location, download_dir, hashes=hashes)
+
+ # http urls
+ else:
+ return unpack_http_url(
+ link,
+ location,
+ downloader,
+ download_dir,
+ hashes=hashes,
+ )
+
+
+def _download_http_url(
+ link, # type: Link
+ downloader, # type: Downloader
+ temp_dir, # type: str
+ hashes, # type: Optional[Hashes]
+):
+ # type: (...) -> Tuple[str, str]
+ """Download link url into temp_dir using provided session"""
+ download = downloader(link)
+
+ file_path = os.path.join(temp_dir, download.filename)
+ with open(file_path, 'wb') as content_file:
+ for chunk in download.chunks:
+ content_file.write(chunk)
+
+ if hashes:
+ hashes.check_against_path(file_path)
+
+ return file_path, download.response.headers.get('content-type', '')
+
+
+def _check_download_dir(link, download_dir, hashes):
+ # type: (Link, str, Optional[Hashes]) -> Optional[str]
+ """ Check download_dir for previously downloaded file with correct hash
+ If a correct file is found return its path else None
+ """
+ download_path = os.path.join(download_dir, link.filename)
+
+ if not os.path.exists(download_path):
+ return None
+
+ # If already downloaded, does its hash match?
+ logger.info('File was already downloaded %s', download_path)
+ if hashes:
+ try:
+ hashes.check_against_path(download_path)
+ except HashMismatch:
+ logger.warning(
+ 'Previously-downloaded file %s has bad hash. '
+ 'Re-downloading.',
+ download_path
+ )
+ os.unlink(download_path)
+ return None
+ return download_path
+
+
+class RequirementPreparer(object):
+ """Prepares a Requirement
+ """
+
+ def __init__(
+ self,
+ build_dir, # type: str
+ download_dir, # type: Optional[str]
+ src_dir, # type: str
+ wheel_download_dir, # type: Optional[str]
+ build_isolation, # type: bool
+ req_tracker, # type: RequirementTracker
+ downloader, # type: Downloader
+ finder, # type: PackageFinder
+ require_hashes, # type: bool
+ use_user_site, # type: bool
+ ):
+ # type: (...) -> None
+ super(RequirementPreparer, self).__init__()
+
+ self.src_dir = src_dir
+ self.build_dir = build_dir
+ self.req_tracker = req_tracker
+ self.downloader = downloader
+ self.finder = finder
+
+ # Where still-packed archives should be written to. If None, they are
+ # not saved, and are deleted immediately after unpacking.
+ self.download_dir = download_dir
+
+ # Where still-packed .whl files should be written to. If None, they are
+ # written to the download_dir parameter. Separate to download_dir to
+ # permit only keeping wheel archives for pip wheel.
+ self.wheel_download_dir = wheel_download_dir
+
+ # NOTE
+ # download_dir and wheel_download_dir overlap semantically and may
+ # be combined if we're willing to have non-wheel archives present in
+ # the wheelhouse output by 'pip wheel'.
+
+ # Is build isolation allowed?
+ self.build_isolation = build_isolation
+
+ # Should hash-checking be required?
+ self.require_hashes = require_hashes
+
+ # Should install in user site-packages?
+ self.use_user_site = use_user_site
+
+ @property
+ def _download_should_save(self):
+ # type: () -> bool
+ if not self.download_dir:
+ return False
+
+ if os.path.exists(self.download_dir):
+ return True
+
+ logger.critical('Could not find download directory')
+ raise InstallationError(
+ "Could not find or access download directory '{}'"
+ .format(self.download_dir))
+
+ def prepare_linked_requirement(
+ self,
+ req, # type: InstallRequirement
+ ):
+ # type: (...) -> AbstractDistribution
+ """Prepare a requirement that would be obtained from req.link
+ """
+ assert req.link
+ link = req.link
+
+ # TODO: Breakup into smaller functions
+ if link.scheme == 'file':
+ path = link.file_path
+ logger.info('Processing %s', display_path(path))
+ else:
+ logger.info('Collecting %s', req.req or req)
+
+ with indent_log():
+ # @@ if filesystem packages are not marked
+ # editable in a req, a non deterministic error
+ # occurs when the script attempts to unpack the
+ # build directory
+ # Since source_dir is only set for editable requirements.
+ assert req.source_dir is None
+ req.ensure_has_source_dir(self.build_dir)
+ # If a checkout exists, it's unwise to keep going. version
+ # inconsistencies are logged later, but do not fail the
+ # installation.
+ # FIXME: this won't upgrade when there's an existing
+ # package unpacked in `req.source_dir`
+ if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
+ raise PreviousBuildDirError(
+ "pip can't proceed with requirements '{}' due to a"
+ " pre-existing build directory ({}). This is "
+ "likely due to a previous installation that failed"
+ ". pip is being responsible and not assuming it "
+ "can delete this. Please delete it and try again."
+ .format(req, req.source_dir)
+ )
+
+ # Now that we have the real link, we can tell what kind of
+ # requirements we have and raise some more informative errors
+ # than otherwise. (For example, we can raise VcsHashUnsupported
+ # for a VCS URL rather than HashMissing.)
+ if self.require_hashes:
+ # We could check these first 2 conditions inside
+ # unpack_url and save repetition of conditions, but then
+ # we would report less-useful error messages for
+ # unhashable requirements, complaining that there's no
+ # hash provided.
+ if link.is_vcs:
+ raise VcsHashUnsupported()
+ elif link.is_existing_dir():
+ raise DirectoryUrlHashUnsupported()
+ if not req.original_link and not req.is_pinned:
+ # Unpinned packages are asking for trouble when a new
+ # version is uploaded. This isn't a security check, but
+ # it saves users a surprising hash mismatch in the
+ # future.
+ #
+ # file:/// URLs aren't pinnable, so don't complain
+ # about them not being pinned.
+ raise HashUnpinned()
+
+ hashes = req.hashes(trust_internet=not self.require_hashes)
+ if self.require_hashes and not hashes:
+ # Known-good hashes are missing for this requirement, so
+ # shim it with a facade object that will provoke hash
+ # computation and then raise a HashMissing exception
+ # showing the user what the hash should be.
+ hashes = MissingHashes()
+
+ download_dir = self.download_dir
+ if link.is_wheel and self.wheel_download_dir:
+ # when doing 'pip wheel` we download wheels to a
+ # dedicated dir.
+ download_dir = self.wheel_download_dir
+
+ try:
+ local_path = unpack_url(
+ link, req.source_dir, self.downloader, download_dir,
+ hashes=hashes,
+ )
+ except requests.HTTPError as exc:
+ logger.critical(
+ 'Could not install requirement %s because of error %s',
+ req,
+ exc,
+ )
+ raise InstallationError(
+ 'Could not install requirement {} because of HTTP '
+ 'error {} for URL {}'.format(req, exc, link)
+ )
+
+ # For use in later processing, preserve the file path on the
+ # requirement.
+ if local_path:
+ req.local_file_path = local_path
+
+ if link.is_wheel:
+ if download_dir:
+ # When downloading, we only unpack wheels to get
+ # metadata.
+ autodelete_unpacked = True
+ else:
+ # When installing a wheel, we use the unpacked
+ # wheel.
+ autodelete_unpacked = False
+ else:
+ # We always delete unpacked sdists after pip runs.
+ autodelete_unpacked = True
+ if autodelete_unpacked:
+ write_delete_marker_file(req.source_dir)
+
+ abstract_dist = _get_prepared_distribution(
+ req, self.req_tracker, self.finder, self.build_isolation,
+ )
+
+ if download_dir:
+ if link.is_existing_dir():
+ logger.info('Link is a directory, ignoring download_dir')
+ elif local_path and not os.path.exists(
+ os.path.join(download_dir, link.filename)
+ ):
+ _copy_file(local_path, download_dir, link)
+
+ if self._download_should_save:
+ # Make a .zip of the source_dir we already created.
+ if link.is_vcs:
+ req.archive(self.download_dir)
+ return abstract_dist
+
+ def prepare_editable_requirement(
+ self,
+ req, # type: InstallRequirement
+ ):
+ # type: (...) -> AbstractDistribution
+ """Prepare an editable requirement
+ """
+ assert req.editable, "cannot prepare a non-editable req as editable"
+
+ logger.info('Obtaining %s', req)
+
+ with indent_log():
+ if self.require_hashes:
+ raise InstallationError(
+ 'The editable requirement {} cannot be installed when '
+ 'requiring hashes, because there is no single file to '
+ 'hash.'.format(req)
+ )
+ req.ensure_has_source_dir(self.src_dir)
+ req.update_editable(not self._download_should_save)
+
+ abstract_dist = _get_prepared_distribution(
+ req, self.req_tracker, self.finder, self.build_isolation,
+ )
+
+ if self._download_should_save:
+ req.archive(self.download_dir)
+ req.check_if_exists(self.use_user_site)
+
+ return abstract_dist
+
+ def prepare_installed_requirement(
+ self,
+ req, # type: InstallRequirement
+ skip_reason # type: str
+ ):
+ # type: (...) -> AbstractDistribution
+ """Prepare an already-installed requirement
+ """
+ assert req.satisfied_by, "req should have been satisfied but isn't"
+ assert skip_reason is not None, (
+ "did not get skip reason skipped but req.satisfied_by "
+ "is set to {}".format(req.satisfied_by)
+ )
+ logger.info(
+ 'Requirement %s: %s (%s)',
+ skip_reason, req, req.satisfied_by.version
+ )
+ with indent_log():
+ if self.require_hashes:
+ logger.debug(
+ 'Since it is already installed, we are trusting this '
+ 'package without checking its hash. To ensure a '
+ 'completely repeatable environment, install into an '
+ 'empty virtualenv.'
+ )
+ abstract_dist = InstalledDistribution(req)
+
+ return abstract_dist
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/pep425tags.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/pep425tags.py
new file mode 100644
index 0000000000000000000000000000000000000000..a2386ee75b893d3e52ac54cd8f35c785b47d5519
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/pep425tags.py
@@ -0,0 +1,167 @@
+"""Generate and work with PEP 425 Compatibility Tags."""
+from __future__ import absolute_import
+
+import logging
+import re
+
+from pip._vendor.packaging.tags import (
+ Tag,
+ compatible_tags,
+ cpython_tags,
+ generic_tags,
+ interpreter_name,
+ interpreter_version,
+ mac_platforms,
+)
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Optional, Tuple
+
+ from pip._vendor.packaging.tags import PythonVersion
+
+logger = logging.getLogger(__name__)
+
+_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')
+
+
+def version_info_to_nodot(version_info):
+ # type: (Tuple[int, ...]) -> str
+ # Only use up to the first two numbers.
+ return ''.join(map(str, version_info[:2]))
+
+
+def _mac_platforms(arch):
+ # type: (str) -> List[str]
+ match = _osx_arch_pat.match(arch)
+ if match:
+ name, major, minor, actual_arch = match.groups()
+ mac_version = (int(major), int(minor))
+ arches = [
+ # Since we have always only checked that the platform starts
+ # with "macosx", for backwards-compatibility we extract the
+ # actual prefix provided by the user in case they provided
+ # something like "macosxcustom_". It may be good to remove
+ # this as undocumented or deprecate it in the future.
+ '{}_{}'.format(name, arch[len('macosx_'):])
+ for arch in mac_platforms(mac_version, actual_arch)
+ ]
+ else:
+ # arch pattern didn't match (?!)
+ arches = [arch]
+ return arches
+
+
+def _custom_manylinux_platforms(arch):
+ # type: (str) -> List[str]
+ arches = [arch]
+ arch_prefix, arch_sep, arch_suffix = arch.partition('_')
+ if arch_prefix == 'manylinux2014':
+ # manylinux1/manylinux2010 wheels run on most manylinux2014 systems
+ # with the exception of wheels depending on ncurses. PEP 599 states
+ # manylinux1/manylinux2010 wheels should be considered
+ # manylinux2014 wheels:
+ # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels
+ if arch_suffix in {'i686', 'x86_64'}:
+ arches.append('manylinux2010' + arch_sep + arch_suffix)
+ arches.append('manylinux1' + arch_sep + arch_suffix)
+ elif arch_prefix == 'manylinux2010':
+ # manylinux1 wheels run on most manylinux2010 systems with the
+ # exception of wheels depending on ncurses. PEP 571 states
+ # manylinux1 wheels should be considered manylinux2010 wheels:
+ # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels
+ arches.append('manylinux1' + arch_sep + arch_suffix)
+ return arches
+
+
+def _get_custom_platforms(arch):
+ # type: (str) -> List[str]
+ arch_prefix, arch_sep, arch_suffix = arch.partition('_')
+ if arch.startswith('macosx'):
+ arches = _mac_platforms(arch)
+ elif arch_prefix in ['manylinux2014', 'manylinux2010']:
+ arches = _custom_manylinux_platforms(arch)
+ else:
+ arches = [arch]
+ return arches
+
+
+def _get_python_version(version):
+ # type: (str) -> PythonVersion
+ if len(version) > 1:
+ return int(version[0]), int(version[1:])
+ else:
+ return (int(version[0]),)
+
+
+def _get_custom_interpreter(implementation=None, version=None):
+ # type: (Optional[str], Optional[str]) -> str
+ if implementation is None:
+ implementation = interpreter_name()
+ if version is None:
+ version = interpreter_version()
+ return "{}{}".format(implementation, version)
+
+
+def get_supported(
+ version=None, # type: Optional[str]
+ platform=None, # type: Optional[str]
+ impl=None, # type: Optional[str]
+ abi=None # type: Optional[str]
+):
+ # type: (...) -> List[Tag]
+ """Return a list of supported tags for each version specified in
+ `versions`.
+
+ :param version: a string version, of the form "33" or "32",
+ or None. The version will be assumed to support our ABI.
+ :param platform: specify the exact platform you want valid
+ tags for, or None. If None, use the local system platform.
+ :param impl: specify the exact implementation you want valid
+ tags for, or None. If None, use the local interpreter impl.
+ :param abi: specify the exact abi you want valid
+ tags for, or None. If None, use the local interpreter abi.
+ """
+ supported = [] # type: List[Tag]
+
+ python_version = None # type: Optional[PythonVersion]
+ if version is not None:
+ python_version = _get_python_version(version)
+
+ interpreter = _get_custom_interpreter(impl, version)
+
+ abis = None # type: Optional[List[str]]
+ if abi is not None:
+ abis = [abi]
+
+ platforms = None # type: Optional[List[str]]
+ if platform is not None:
+ platforms = _get_custom_platforms(platform)
+
+ is_cpython = (impl or interpreter_name()) == "cp"
+ if is_cpython:
+ supported.extend(
+ cpython_tags(
+ python_version=python_version,
+ abis=abis,
+ platforms=platforms,
+ )
+ )
+ else:
+ supported.extend(
+ generic_tags(
+ interpreter=interpreter,
+ abis=abis,
+ platforms=platforms,
+ )
+ )
+ supported.extend(
+ compatible_tags(
+ python_version=python_version,
+ interpreter=interpreter,
+ platforms=platforms,
+ )
+ )
+
+ return supported
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/pyproject.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/pyproject.py
new file mode 100644
index 0000000000000000000000000000000000000000..6b4faf7a7527cecf3fdd1cb32f8193d358f3c8fe
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/pyproject.py
@@ -0,0 +1,196 @@
+from __future__ import absolute_import
+
+import io
+import os
+import sys
+from collections import namedtuple
+
+from pip._vendor import six, toml
+from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, Optional, List
+
+
+def _is_list_of_str(obj):
+ # type: (Any) -> bool
+ return (
+ isinstance(obj, list) and
+ all(isinstance(item, six.string_types) for item in obj)
+ )
+
+
+def make_pyproject_path(unpacked_source_directory):
+ # type: (str) -> str
+ path = os.path.join(unpacked_source_directory, 'pyproject.toml')
+
+ # Python2 __file__ should not be unicode
+ if six.PY2 and isinstance(path, six.text_type):
+ path = path.encode(sys.getfilesystemencoding())
+
+ return path
+
+
+BuildSystemDetails = namedtuple('BuildSystemDetails', [
+ 'requires', 'backend', 'check', 'backend_path'
+])
+
+
+def load_pyproject_toml(
+ use_pep517, # type: Optional[bool]
+ pyproject_toml, # type: str
+ setup_py, # type: str
+ req_name # type: str
+):
+ # type: (...) -> Optional[BuildSystemDetails]
+ """Load the pyproject.toml file.
+
+ Parameters:
+ use_pep517 - Has the user requested PEP 517 processing? None
+ means the user hasn't explicitly specified.
+ pyproject_toml - Location of the project's pyproject.toml file
+ setup_py - Location of the project's setup.py file
+ req_name - The name of the requirement we're processing (for
+ error reporting)
+
+ Returns:
+ None if we should use the legacy code path, otherwise a tuple
+ (
+ requirements from pyproject.toml,
+ name of PEP 517 backend,
+ requirements we should check are installed after setting
+ up the build environment
+ directory paths to import the backend from (backend-path),
+ relative to the project root.
+ )
+ """
+ has_pyproject = os.path.isfile(pyproject_toml)
+ has_setup = os.path.isfile(setup_py)
+
+ if has_pyproject:
+ with io.open(pyproject_toml, encoding="utf-8") as f:
+ pp_toml = toml.load(f)
+ build_system = pp_toml.get("build-system")
+ else:
+ build_system = None
+
+ # The following cases must use PEP 517
+ # We check for use_pep517 being non-None and falsey because that means
+ # the user explicitly requested --no-use-pep517. The value 0 as
+ # opposed to False can occur when the value is provided via an
+ # environment variable or config file option (due to the quirk of
+ # strtobool() returning an integer in pip's configuration code).
+ if has_pyproject and not has_setup:
+ if use_pep517 is not None and not use_pep517:
+ raise InstallationError(
+ "Disabling PEP 517 processing is invalid: "
+ "project does not have a setup.py"
+ )
+ use_pep517 = True
+ elif build_system and "build-backend" in build_system:
+ if use_pep517 is not None and not use_pep517:
+ raise InstallationError(
+ "Disabling PEP 517 processing is invalid: "
+ "project specifies a build backend of {} "
+ "in pyproject.toml".format(
+ build_system["build-backend"]
+ )
+ )
+ use_pep517 = True
+
+ # If we haven't worked out whether to use PEP 517 yet,
+ # and the user hasn't explicitly stated a preference,
+ # we do so if the project has a pyproject.toml file.
+ elif use_pep517 is None:
+ use_pep517 = has_pyproject
+
+ # At this point, we know whether we're going to use PEP 517.
+ assert use_pep517 is not None
+
+ # If we're using the legacy code path, there is nothing further
+ # for us to do here.
+ if not use_pep517:
+ return None
+
+ if build_system is None:
+ # Either the user has a pyproject.toml with no build-system
+ # section, or the user has no pyproject.toml, but has opted in
+ # explicitly via --use-pep517.
+ # In the absence of any explicit backend specification, we
+ # assume the setuptools backend that most closely emulates the
+ # traditional direct setup.py execution, and require wheel and
+ # a version of setuptools that supports that backend.
+
+ build_system = {
+ "requires": ["setuptools>=40.8.0", "wheel"],
+ "build-backend": "setuptools.build_meta:__legacy__",
+ }
+
+ # If we're using PEP 517, we have build system information (either
+ # from pyproject.toml, or defaulted by the code above).
+ # Note that at this point, we do not know if the user has actually
+ # specified a backend, though.
+ assert build_system is not None
+
+ # Ensure that the build-system section in pyproject.toml conforms
+ # to PEP 518.
+ error_template = (
+ "{package} has a pyproject.toml file that does not comply "
+ "with PEP 518: {reason}"
+ )
+
+ # Specifying the build-system table but not the requires key is invalid
+ if "requires" not in build_system:
+ raise InstallationError(
+ error_template.format(package=req_name, reason=(
+ "it has a 'build-system' table but not "
+ "'build-system.requires' which is mandatory in the table"
+ ))
+ )
+
+ # Error out if requires is not a list of strings
+ requires = build_system["requires"]
+ if not _is_list_of_str(requires):
+ raise InstallationError(error_template.format(
+ package=req_name,
+ reason="'build-system.requires' is not a list of strings.",
+ ))
+
+ # Each requirement must be valid as per PEP 508
+ for requirement in requires:
+ try:
+ Requirement(requirement)
+ except InvalidRequirement:
+ raise InstallationError(
+ error_template.format(
+ package=req_name,
+ reason=(
+ "'build-system.requires' contains an invalid "
+ "requirement: {!r}".format(requirement)
+ ),
+ )
+ )
+
+ backend = build_system.get("build-backend")
+ backend_path = build_system.get("backend-path", [])
+ check = [] # type: List[str]
+ if backend is None:
+ # If the user didn't specify a backend, we assume they want to use
+ # the setuptools backend. But we can't be sure they have included
+ # a version of setuptools which supplies the backend, or wheel
+ # (which is needed by the backend) in their requirements. So we
+ # make a note to check that those requirements are present once
+ # we have set up the environment.
+ # This is quite a lot of work to check for a very specific case. But
+ # the problem is, that case is potentially quite common - projects that
+ # adopted PEP 518 early for the ability to specify requirements to
+ # execute setup.py, but never considered needing to mention the build
+ # tools themselves. The original PEP 518 code had a similar check (but
+ # implemented in a different way).
+ backend = "setuptools.build_meta:__legacy__"
+ check = ["setuptools>=40.8.0", "wheel"]
+
+ return BuildSystemDetails(requires, backend, check, backend_path)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__init__.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..d2d027adeec4dbedaf62f95b070d7fd9f1fbbe60
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__init__.py
@@ -0,0 +1,92 @@
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+from __future__ import absolute_import
+
+import logging
+
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+from .req_file import parse_requirements
+from .req_install import InstallRequirement
+from .req_set import RequirementSet
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, List, Sequence
+
+__all__ = [
+ "RequirementSet", "InstallRequirement",
+ "parse_requirements", "install_given_reqs",
+]
+
+logger = logging.getLogger(__name__)
+
+
+class InstallationResult(object):
+ def __init__(self, name):
+ # type: (str) -> None
+ self.name = name
+
+ def __repr__(self):
+ # type: () -> str
+ return "InstallationResult(name={!r})".format(self.name)
+
+
+def install_given_reqs(
+ to_install, # type: List[InstallRequirement]
+ install_options, # type: List[str]
+ global_options=(), # type: Sequence[str]
+ *args, # type: Any
+ **kwargs # type: Any
+):
+ # type: (...) -> List[InstallationResult]
+ """
+ Install everything in the given list.
+
+ (to be called after having downloaded and unpacked the packages)
+ """
+
+ if to_install:
+ logger.info(
+ 'Installing collected packages: %s',
+ ', '.join([req.name for req in to_install]),
+ )
+
+ installed = []
+
+ with indent_log():
+ for requirement in to_install:
+ if requirement.should_reinstall:
+ logger.info('Attempting uninstall: %s', requirement.name)
+ with indent_log():
+ uninstalled_pathset = requirement.uninstall(
+ auto_confirm=True
+ )
+ try:
+ requirement.install(
+ install_options,
+ global_options,
+ *args,
+ **kwargs
+ )
+ except Exception:
+ should_rollback = (
+ requirement.should_reinstall and
+ not requirement.install_succeeded
+ )
+ # if install did not succeed, rollback previous uninstall
+ if should_rollback:
+ uninstalled_pathset.rollback()
+ raise
+ else:
+ should_commit = (
+ requirement.should_reinstall and
+ requirement.install_succeeded
+ )
+ if should_commit:
+ uninstalled_pathset.commit()
+
+ installed.append(InstallationResult(requirement.name))
+
+ return installed
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..65afb9768c0b35c16821811f92bdb2983faf478f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/constructors.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/constructors.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a80dfa7e6e8971d1e10d775e820dd3ebd2f25a1c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/constructors.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_file.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_file.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9164f19508c9f06a0b3cb9107b7ae6695f733e9c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_file.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_install.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_install.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..827a7e25a1702d55b5e52f0fa004c49105177553
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_install.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_set.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_set.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9b68adbb376bb05492761a7bbe025ecf648aab30
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_set.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bd937a25ba5f065153bafd1952e741ba78658e05
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e202f31293cb7f4c892053e1503693ed840c2cdb
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/constructors.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/constructors.py
new file mode 100644
index 0000000000000000000000000000000000000000..1f3cd8a104c92d804f3086bb519a9ccf24cc46de
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/constructors.py
@@ -0,0 +1,436 @@
+"""Backing implementation for InstallRequirement's various constructors
+
+The idea here is that these formed a major chunk of InstallRequirement's size
+so, moving them and support code dedicated to them outside of that class
+helps creates for better understandability for the rest of the code.
+
+These are meant to be used elsewhere within pip to create instances of
+InstallRequirement.
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+import logging
+import os
+import re
+
+from pip._vendor.packaging.markers import Marker
+from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
+from pip._vendor.packaging.specifiers import Specifier
+from pip._vendor.pkg_resources import RequirementParseError, parse_requirements
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.models.index import PyPI, TestPyPI
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.pyproject import make_pyproject_path
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.filetypes import ARCHIVE_EXTENSIONS
+from pip._internal.utils.misc import is_installable_dir, splitext
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.urls import path_to_url
+from pip._internal.vcs import is_url, vcs
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, Dict, Optional, Set, Tuple, Union,
+ )
+ from pip._internal.cache import WheelCache
+
+
+__all__ = [
+ "install_req_from_editable", "install_req_from_line",
+ "parse_editable"
+]
+
+logger = logging.getLogger(__name__)
+operators = Specifier._operators.keys()
+
+
+def is_archive_file(name):
+ # type: (str) -> bool
+ """Return True if `name` is a considered as an archive file."""
+ ext = splitext(name)[1].lower()
+ if ext in ARCHIVE_EXTENSIONS:
+ return True
+ return False
+
+
+def _strip_extras(path):
+ # type: (str) -> Tuple[str, Optional[str]]
+ m = re.match(r'^(.+)(\[[^\]]+\])$', path)
+ extras = None
+ if m:
+ path_no_extras = m.group(1)
+ extras = m.group(2)
+ else:
+ path_no_extras = path
+
+ return path_no_extras, extras
+
+
+def convert_extras(extras):
+ # type: (Optional[str]) -> Set[str]
+ if not extras:
+ return set()
+ return Requirement("placeholder" + extras.lower()).extras
+
+
+def parse_editable(editable_req):
+ # type: (str) -> Tuple[Optional[str], str, Optional[Set[str]]]
+ """Parses an editable requirement into:
+ - a requirement name
+ - an URL
+ - extras
+ - editable options
+ Accepted requirements:
+ svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
+ .[some_extra]
+ """
+
+ url = editable_req
+
+ # If a file path is specified with extras, strip off the extras.
+ url_no_extras, extras = _strip_extras(url)
+
+ if os.path.isdir(url_no_extras):
+ if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
+ msg = (
+ 'File "setup.py" not found. Directory cannot be installed '
+ 'in editable mode: {}'.format(os.path.abspath(url_no_extras))
+ )
+ pyproject_path = make_pyproject_path(url_no_extras)
+ if os.path.isfile(pyproject_path):
+ msg += (
+ '\n(A "pyproject.toml" file was found, but editable '
+ 'mode currently requires a setup.py based build.)'
+ )
+ raise InstallationError(msg)
+
+ # Treating it as code that has already been checked out
+ url_no_extras = path_to_url(url_no_extras)
+
+ if url_no_extras.lower().startswith('file:'):
+ package_name = Link(url_no_extras).egg_fragment
+ if extras:
+ return (
+ package_name,
+ url_no_extras,
+ Requirement("placeholder" + extras.lower()).extras,
+ )
+ else:
+ return package_name, url_no_extras, None
+
+ for version_control in vcs:
+ if url.lower().startswith('%s:' % version_control):
+ url = '%s+%s' % (version_control, url)
+ break
+
+ if '+' not in url:
+ raise InstallationError(
+ '{} is not a valid editable requirement. '
+ 'It should either be a path to a local project or a VCS URL '
+ '(beginning with svn+, git+, hg+, or bzr+).'.format(editable_req)
+ )
+
+ vc_type = url.split('+', 1)[0].lower()
+
+ if not vcs.get_backend(vc_type):
+ error_message = 'For --editable=%s only ' % editable_req + \
+ ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \
+ ' is currently supported'
+ raise InstallationError(error_message)
+
+ package_name = Link(url).egg_fragment
+ if not package_name:
+ raise InstallationError(
+ "Could not detect requirement name for '%s', please specify one "
+ "with #egg=your_package_name" % editable_req
+ )
+ return package_name, url, None
+
+
+def deduce_helpful_msg(req):
+ # type: (str) -> str
+ """Returns helpful msg in case requirements file does not exist,
+ or cannot be parsed.
+
+ :params req: Requirements file path
+ """
+ msg = ""
+ if os.path.exists(req):
+ msg = " It does exist."
+ # Try to parse and check if it is a requirements file.
+ try:
+ with open(req, 'r') as fp:
+ # parse first line only
+ next(parse_requirements(fp.read()))
+ msg += " The argument you provided " + \
+ "(%s) appears to be a" % (req) + \
+ " requirements file. If that is the" + \
+ " case, use the '-r' flag to install" + \
+ " the packages specified within it."
+ except RequirementParseError:
+ logger.debug("Cannot parse '%s' as requirements \
+ file" % (req), exc_info=True)
+ else:
+ msg += " File '%s' does not exist." % (req)
+ return msg
+
+
+class RequirementParts(object):
+ def __init__(
+ self,
+ requirement, # type: Optional[Requirement]
+ link, # type: Optional[Link]
+ markers, # type: Optional[Marker]
+ extras, # type: Set[str]
+ ):
+ self.requirement = requirement
+ self.link = link
+ self.markers = markers
+ self.extras = extras
+
+
+def parse_req_from_editable(editable_req):
+ # type: (str) -> RequirementParts
+ name, url, extras_override = parse_editable(editable_req)
+
+ if name is not None:
+ try:
+ req = Requirement(name)
+ except InvalidRequirement:
+ raise InstallationError("Invalid requirement: '%s'" % name)
+ else:
+ req = None
+
+ link = Link(url)
+
+ return RequirementParts(req, link, None, extras_override)
+
+
+# ---- The actual constructors follow ----
+
+
+def install_req_from_editable(
+ editable_req, # type: str
+ comes_from=None, # type: Optional[str]
+ use_pep517=None, # type: Optional[bool]
+ isolated=False, # type: bool
+ options=None, # type: Optional[Dict[str, Any]]
+ wheel_cache=None, # type: Optional[WheelCache]
+ constraint=False # type: bool
+):
+ # type: (...) -> InstallRequirement
+
+ parts = parse_req_from_editable(editable_req)
+
+ source_dir = parts.link.file_path if parts.link.scheme == 'file' else None
+
+ return InstallRequirement(
+ parts.requirement, comes_from, source_dir=source_dir,
+ editable=True,
+ link=parts.link,
+ constraint=constraint,
+ use_pep517=use_pep517,
+ isolated=isolated,
+ options=options if options else {},
+ wheel_cache=wheel_cache,
+ extras=parts.extras,
+ )
+
+
+def _looks_like_path(name):
+ # type: (str) -> bool
+ """Checks whether the string "looks like" a path on the filesystem.
+
+ This does not check whether the target actually exists, only judge from the
+ appearance.
+
+ Returns true if any of the following conditions is true:
+ * a path separator is found (either os.path.sep or os.path.altsep);
+ * a dot is found (which represents the current directory).
+ """
+ if os.path.sep in name:
+ return True
+ if os.path.altsep is not None and os.path.altsep in name:
+ return True
+ if name.startswith("."):
+ return True
+ return False
+
+
+def _get_url_from_path(path, name):
+ # type: (str, str) -> str
+ """
+ First, it checks whether a provided path is an installable directory
+ (e.g. it has a setup.py). If it is, returns the path.
+
+ If false, check if the path is an archive file (such as a .whl).
+ The function checks if the path is a file. If false, if the path has
+ an @, it will treat it as a PEP 440 URL requirement and return the path.
+ """
+ if _looks_like_path(name) and os.path.isdir(path):
+ if is_installable_dir(path):
+ return path_to_url(path)
+ raise InstallationError(
+ "Directory %r is not installable. Neither 'setup.py' "
+ "nor 'pyproject.toml' found." % name
+ )
+ if not is_archive_file(path):
+ return None
+ if os.path.isfile(path):
+ return path_to_url(path)
+ urlreq_parts = name.split('@', 1)
+ if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]):
+ # If the path contains '@' and the part before it does not look
+ # like a path, try to treat it as a PEP 440 URL req instead.
+ return None
+ logger.warning(
+ 'Requirement %r looks like a filename, but the '
+ 'file does not exist',
+ name
+ )
+ return path_to_url(path)
+
+
+def parse_req_from_line(name, line_source):
+ # type: (str, Optional[str]) -> RequirementParts
+ if is_url(name):
+ marker_sep = '; '
+ else:
+ marker_sep = ';'
+ if marker_sep in name:
+ name, markers_as_string = name.split(marker_sep, 1)
+ markers_as_string = markers_as_string.strip()
+ if not markers_as_string:
+ markers = None
+ else:
+ markers = Marker(markers_as_string)
+ else:
+ markers = None
+ name = name.strip()
+ req_as_string = None
+ path = os.path.normpath(os.path.abspath(name))
+ link = None
+ extras_as_string = None
+
+ if is_url(name):
+ link = Link(name)
+ else:
+ p, extras_as_string = _strip_extras(path)
+ url = _get_url_from_path(p, name)
+ if url is not None:
+ link = Link(url)
+
+ # it's a local file, dir, or url
+ if link:
+ # Handle relative file URLs
+ if link.scheme == 'file' and re.search(r'\.\./', link.url):
+ link = Link(
+ path_to_url(os.path.normpath(os.path.abspath(link.path))))
+ # wheel file
+ if link.is_wheel:
+ wheel = Wheel(link.filename) # can raise InvalidWheelFilename
+ req_as_string = "%s==%s" % (wheel.name, wheel.version)
+ else:
+ # set the req to the egg fragment. when it's not there, this
+ # will become an 'unnamed' requirement
+ req_as_string = link.egg_fragment
+
+ # a requirement specifier
+ else:
+ req_as_string = name
+
+ extras = convert_extras(extras_as_string)
+
+ def with_source(text):
+ # type: (str) -> str
+ if not line_source:
+ return text
+ return '{} (from {})'.format(text, line_source)
+
+ if req_as_string is not None:
+ try:
+ req = Requirement(req_as_string)
+ except InvalidRequirement:
+ if os.path.sep in req_as_string:
+ add_msg = "It looks like a path."
+ add_msg += deduce_helpful_msg(req_as_string)
+ elif ('=' in req_as_string and
+ not any(op in req_as_string for op in operators)):
+ add_msg = "= is not a valid operator. Did you mean == ?"
+ else:
+ add_msg = ''
+ msg = with_source(
+ 'Invalid requirement: {!r}'.format(req_as_string)
+ )
+ if add_msg:
+ msg += '\nHint: {}'.format(add_msg)
+ raise InstallationError(msg)
+ else:
+ req = None
+
+ return RequirementParts(req, link, markers, extras)
+
+
+def install_req_from_line(
+ name, # type: str
+ comes_from=None, # type: Optional[Union[str, InstallRequirement]]
+ use_pep517=None, # type: Optional[bool]
+ isolated=False, # type: bool
+ options=None, # type: Optional[Dict[str, Any]]
+ wheel_cache=None, # type: Optional[WheelCache]
+ constraint=False, # type: bool
+ line_source=None, # type: Optional[str]
+):
+ # type: (...) -> InstallRequirement
+ """Creates an InstallRequirement from a name, which might be a
+ requirement, directory containing 'setup.py', filename, or URL.
+
+ :param line_source: An optional string describing where the line is from,
+ for logging purposes in case of an error.
+ """
+ parts = parse_req_from_line(name, line_source)
+
+ return InstallRequirement(
+ parts.requirement, comes_from, link=parts.link, markers=parts.markers,
+ use_pep517=use_pep517, isolated=isolated,
+ options=options if options else {},
+ wheel_cache=wheel_cache,
+ constraint=constraint,
+ extras=parts.extras,
+ )
+
+
+def install_req_from_req_string(
+ req_string, # type: str
+ comes_from=None, # type: Optional[InstallRequirement]
+ isolated=False, # type: bool
+ wheel_cache=None, # type: Optional[WheelCache]
+ use_pep517=None # type: Optional[bool]
+):
+ # type: (...) -> InstallRequirement
+ try:
+ req = Requirement(req_string)
+ except InvalidRequirement:
+ raise InstallationError("Invalid requirement: '%s'" % req_string)
+
+ domains_not_allowed = [
+ PyPI.file_storage_domain,
+ TestPyPI.file_storage_domain,
+ ]
+ if (req.url and comes_from and comes_from.link and
+ comes_from.link.netloc in domains_not_allowed):
+ # Explicitly disallow pypi packages that depend on external urls
+ raise InstallationError(
+ "Packages installed from PyPI cannot depend on packages "
+ "which are not also hosted on PyPI.\n"
+ "%s depends on %s " % (comes_from.name, req)
+ )
+
+ return InstallRequirement(
+ req, comes_from, isolated=isolated, wheel_cache=wheel_cache,
+ use_pep517=use_pep517
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/req_file.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/req_file.py
new file mode 100644
index 0000000000000000000000000000000000000000..8c7810481ee6504faaf08a0b7d1e0790ad9cc089
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/req_file.py
@@ -0,0 +1,546 @@
+"""
+Requirements file parsing
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+from __future__ import absolute_import
+
+import optparse
+import os
+import re
+import shlex
+import sys
+
+from pip._vendor.six.moves import filterfalse
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+from pip._internal.cli import cmdoptions
+from pip._internal.exceptions import (
+ InstallationError,
+ RequirementsFileParseError,
+)
+from pip._internal.models.search_scope import SearchScope
+from pip._internal.req.constructors import (
+ install_req_from_editable,
+ install_req_from_line,
+)
+from pip._internal.utils.encoding import auto_decode
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.urls import get_url_scheme
+
+if MYPY_CHECK_RUNNING:
+ from optparse import Values
+ from typing import (
+ Any, Callable, Iterator, List, NoReturn, Optional, Text, Tuple,
+ )
+
+ from pip._internal.req import InstallRequirement
+ from pip._internal.cache import WheelCache
+ from pip._internal.index.package_finder import PackageFinder
+ from pip._internal.network.session import PipSession
+
+ ReqFileLines = Iterator[Tuple[int, Text]]
+
+ LineParser = Callable[[Text], Tuple[str, Values]]
+
+
+__all__ = ['parse_requirements']
+
+SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
+COMMENT_RE = re.compile(r'(^|\s+)#.*$')
+
+# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
+# variable name consisting of only uppercase letters, digits or the '_'
+# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
+# 2013 Edition.
+ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})')
+
+SUPPORTED_OPTIONS = [
+ cmdoptions.index_url,
+ cmdoptions.extra_index_url,
+ cmdoptions.no_index,
+ cmdoptions.constraints,
+ cmdoptions.requirements,
+ cmdoptions.editable,
+ cmdoptions.find_links,
+ cmdoptions.no_binary,
+ cmdoptions.only_binary,
+ cmdoptions.require_hashes,
+ cmdoptions.pre,
+ cmdoptions.trusted_host,
+ cmdoptions.always_unzip, # Deprecated
+] # type: List[Callable[..., optparse.Option]]
+
+# options to be passed to requirements
+SUPPORTED_OPTIONS_REQ = [
+ cmdoptions.install_options,
+ cmdoptions.global_options,
+ cmdoptions.hash,
+] # type: List[Callable[..., optparse.Option]]
+
+# the 'dest' string values
+SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
+
+
+class ParsedLine(object):
+ def __init__(
+ self,
+ filename, # type: str
+ lineno, # type: int
+ comes_from, # type: str
+ args, # type: str
+ opts, # type: Values
+ constraint, # type: bool
+ ):
+ # type: (...) -> None
+ self.filename = filename
+ self.lineno = lineno
+ self.comes_from = comes_from
+ self.args = args
+ self.opts = opts
+ self.constraint = constraint
+
+
+def parse_requirements(
+ filename, # type: str
+ session, # type: PipSession
+ finder=None, # type: Optional[PackageFinder]
+ comes_from=None, # type: Optional[str]
+ options=None, # type: Optional[optparse.Values]
+ constraint=False, # type: bool
+ wheel_cache=None, # type: Optional[WheelCache]
+ use_pep517=None # type: Optional[bool]
+):
+ # type: (...) -> Iterator[InstallRequirement]
+ """Parse a requirements file and yield InstallRequirement instances.
+
+ :param filename: Path or url of requirements file.
+ :param session: PipSession instance.
+ :param finder: Instance of pip.index.PackageFinder.
+ :param comes_from: Origin description of requirements.
+ :param options: cli options.
+ :param constraint: If true, parsing a constraint file rather than
+ requirements file.
+ :param wheel_cache: Instance of pip.wheel.WheelCache
+ :param use_pep517: Value of the --use-pep517 option.
+ """
+ skip_requirements_regex = (
+ options.skip_requirements_regex if options else None
+ )
+ line_parser = get_line_parser(finder)
+ parser = RequirementsFileParser(
+ session, line_parser, comes_from, skip_requirements_regex
+ )
+
+ for parsed_line in parser.parse(filename, constraint):
+ req = handle_line(
+ parsed_line, finder, options, session, wheel_cache, use_pep517
+ )
+ if req is not None:
+ yield req
+
+
+def preprocess(content, skip_requirements_regex):
+ # type: (Text, Optional[str]) -> ReqFileLines
+ """Split, filter, and join lines, and return a line iterator
+
+ :param content: the content of the requirements file
+ :param options: cli options
+ """
+ lines_enum = enumerate(content.splitlines(), start=1) # type: ReqFileLines
+ lines_enum = join_lines(lines_enum)
+ lines_enum = ignore_comments(lines_enum)
+ if skip_requirements_regex:
+ lines_enum = skip_regex(lines_enum, skip_requirements_regex)
+ lines_enum = expand_env_variables(lines_enum)
+ return lines_enum
+
+
+def handle_line(
+ line, # type: ParsedLine
+ finder=None, # type: Optional[PackageFinder]
+ options=None, # type: Optional[optparse.Values]
+ session=None, # type: Optional[PipSession]
+ wheel_cache=None, # type: Optional[WheelCache]
+ use_pep517=None, # type: Optional[bool]
+):
+ # type: (...) -> Optional[InstallRequirement]
+ """Handle a single parsed requirements line; This can result in
+ creating/yielding requirements, or updating the finder.
+
+ For lines that contain requirements, the only options that have an effect
+ are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
+ requirement. Other options from SUPPORTED_OPTIONS may be present, but are
+ ignored.
+
+ For lines that do not contain requirements, the only options that have an
+ effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
+ be present, but are ignored. These lines may contain multiple options
+ (although our docs imply only one is supported), and all our parsed and
+ affect the finder.
+ """
+
+ # preserve for the nested code path
+ line_comes_from = '%s %s (line %s)' % (
+ '-c' if line.constraint else '-r', line.filename, line.lineno,
+ )
+
+ # return a line requirement
+ if line.args:
+ isolated = options.isolated_mode if options else False
+ if options:
+ cmdoptions.check_install_build_global(options, line.opts)
+ # get the options that apply to requirements
+ req_options = {}
+ for dest in SUPPORTED_OPTIONS_REQ_DEST:
+ if dest in line.opts.__dict__ and line.opts.__dict__[dest]:
+ req_options[dest] = line.opts.__dict__[dest]
+ line_source = 'line {} of {}'.format(line.lineno, line.filename)
+ return install_req_from_line(
+ line.args,
+ comes_from=line_comes_from,
+ use_pep517=use_pep517,
+ isolated=isolated,
+ options=req_options,
+ wheel_cache=wheel_cache,
+ constraint=line.constraint,
+ line_source=line_source,
+ )
+
+ # return an editable requirement
+ elif line.opts.editables:
+ isolated = options.isolated_mode if options else False
+ return install_req_from_editable(
+ line.opts.editables[0], comes_from=line_comes_from,
+ use_pep517=use_pep517,
+ constraint=line.constraint, isolated=isolated,
+ wheel_cache=wheel_cache
+ )
+
+ # percolate hash-checking option upward
+ elif line.opts.require_hashes:
+ options.require_hashes = line.opts.require_hashes
+
+ # set finder options
+ elif finder:
+ find_links = finder.find_links
+ index_urls = finder.index_urls
+ if line.opts.index_url:
+ index_urls = [line.opts.index_url]
+ if line.opts.no_index is True:
+ index_urls = []
+ if line.opts.extra_index_urls:
+ index_urls.extend(line.opts.extra_index_urls)
+ if line.opts.find_links:
+ # FIXME: it would be nice to keep track of the source
+ # of the find_links: support a find-links local path
+ # relative to a requirements file.
+ value = line.opts.find_links[0]
+ req_dir = os.path.dirname(os.path.abspath(line.filename))
+ relative_to_reqs_file = os.path.join(req_dir, value)
+ if os.path.exists(relative_to_reqs_file):
+ value = relative_to_reqs_file
+ find_links.append(value)
+
+ search_scope = SearchScope(
+ find_links=find_links,
+ index_urls=index_urls,
+ )
+ finder.search_scope = search_scope
+
+ if line.opts.pre:
+ finder.set_allow_all_prereleases()
+
+ if session:
+ for host in line.opts.trusted_hosts or []:
+ source = 'line {} of {}'.format(line.lineno, line.filename)
+ session.add_trusted_host(host, source=source)
+
+ return None
+
+
+class RequirementsFileParser(object):
+ def __init__(
+ self,
+ session, # type: PipSession
+ line_parser, # type: LineParser
+ comes_from, # type: str
+ skip_requirements_regex, # type: Optional[str]
+ ):
+ # type: (...) -> None
+ self._session = session
+ self._line_parser = line_parser
+ self._comes_from = comes_from
+ self._skip_requirements_regex = skip_requirements_regex
+
+ def parse(self, filename, constraint):
+ # type: (str, bool) -> Iterator[ParsedLine]
+ """Parse a given file, yielding parsed lines.
+ """
+ for line in self._parse_and_recurse(filename, constraint):
+ yield line
+
+ def _parse_and_recurse(self, filename, constraint):
+ # type: (str, bool) -> Iterator[ParsedLine]
+ for line in self._parse_file(filename, constraint):
+ if (
+ not line.args and
+ not line.opts.editables and
+ (line.opts.requirements or line.opts.constraints)
+ ):
+ # parse a nested requirements file
+ if line.opts.requirements:
+ req_path = line.opts.requirements[0]
+ nested_constraint = False
+ else:
+ req_path = line.opts.constraints[0]
+ nested_constraint = True
+
+ # original file is over http
+ if SCHEME_RE.search(filename):
+ # do a url join so relative paths work
+ req_path = urllib_parse.urljoin(filename, req_path)
+ # original file and nested file are paths
+ elif not SCHEME_RE.search(req_path):
+ # do a join so relative paths work
+ req_path = os.path.join(
+ os.path.dirname(filename), req_path,
+ )
+
+ for inner_line in self._parse_and_recurse(
+ req_path, nested_constraint,
+ ):
+ yield inner_line
+ else:
+ yield line
+
+ def _parse_file(self, filename, constraint):
+ # type: (str, bool) -> Iterator[ParsedLine]
+ _, content = get_file_content(
+ filename, self._session, comes_from=self._comes_from
+ )
+
+ lines_enum = preprocess(content, self._skip_requirements_regex)
+
+ for line_number, line in lines_enum:
+ try:
+ args_str, opts = self._line_parser(line)
+ except OptionParsingError as e:
+ # add offending line
+ msg = 'Invalid requirement: %s\n%s' % (line, e.msg)
+ raise RequirementsFileParseError(msg)
+
+ yield ParsedLine(
+ filename,
+ line_number,
+ self._comes_from,
+ args_str,
+ opts,
+ constraint,
+ )
+
+
+def get_line_parser(finder):
+ # type: (Optional[PackageFinder]) -> LineParser
+ def parse_line(line):
+ # type: (Text) -> Tuple[str, Values]
+ # Build new parser for each line since it accumulates appendable
+ # options.
+ parser = build_parser()
+ defaults = parser.get_default_values()
+ defaults.index_url = None
+ if finder:
+ defaults.format_control = finder.format_control
+
+ args_str, options_str = break_args_options(line)
+ # Prior to 2.7.3, shlex cannot deal with unicode entries
+ if sys.version_info < (2, 7, 3):
+ # https://github.com/python/mypy/issues/1174
+ options_str = options_str.encode('utf8') # type: ignore
+
+ # https://github.com/python/mypy/issues/1174
+ opts, _ = parser.parse_args(
+ shlex.split(options_str), defaults) # type: ignore
+
+ return args_str, opts
+
+ return parse_line
+
+
+def break_args_options(line):
+ # type: (Text) -> Tuple[str, Text]
+ """Break up the line into an args and options string. We only want to shlex
+ (and then optparse) the options, not the args. args can contain markers
+ which are corrupted by shlex.
+ """
+ tokens = line.split(' ')
+ args = []
+ options = tokens[:]
+ for token in tokens:
+ if token.startswith('-') or token.startswith('--'):
+ break
+ else:
+ args.append(token)
+ options.pop(0)
+ return ' '.join(args), ' '.join(options) # type: ignore
+
+
+class OptionParsingError(Exception):
+ def __init__(self, msg):
+ # type: (str) -> None
+ self.msg = msg
+
+
+def build_parser():
+ # type: () -> optparse.OptionParser
+ """
+ Return a parser for parsing requirement lines
+ """
+ parser = optparse.OptionParser(add_help_option=False)
+
+ option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
+ for option_factory in option_factories:
+ option = option_factory()
+ parser.add_option(option)
+
+ # By default optparse sys.exits on parsing errors. We want to wrap
+ # that in our own exception.
+ def parser_exit(self, msg):
+ # type: (Any, str) -> NoReturn
+ raise OptionParsingError(msg)
+ # NOTE: mypy disallows assigning to a method
+ # https://github.com/python/mypy/issues/2427
+ parser.exit = parser_exit # type: ignore
+
+ return parser
+
+
+def join_lines(lines_enum):
+ # type: (ReqFileLines) -> ReqFileLines
+ """Joins a line ending in '\' with the previous line (except when following
+ comments). The joined line takes on the index of the first line.
+ """
+ primary_line_number = None
+ new_line = [] # type: List[Text]
+ for line_number, line in lines_enum:
+ if not line.endswith('\\') or COMMENT_RE.match(line):
+ if COMMENT_RE.match(line):
+ # this ensures comments are always matched later
+ line = ' ' + line
+ if new_line:
+ new_line.append(line)
+ yield primary_line_number, ''.join(new_line)
+ new_line = []
+ else:
+ yield line_number, line
+ else:
+ if not new_line:
+ primary_line_number = line_number
+ new_line.append(line.strip('\\'))
+
+ # last line contains \
+ if new_line:
+ yield primary_line_number, ''.join(new_line)
+
+ # TODO: handle space after '\'.
+
+
+def ignore_comments(lines_enum):
+ # type: (ReqFileLines) -> ReqFileLines
+ """
+ Strips comments and filter empty lines.
+ """
+ for line_number, line in lines_enum:
+ line = COMMENT_RE.sub('', line)
+ line = line.strip()
+ if line:
+ yield line_number, line
+
+
+def skip_regex(lines_enum, pattern):
+ # type: (ReqFileLines, str) -> ReqFileLines
+ """
+ Skip lines that match the provided pattern
+
+ Note: the regex pattern is only built once
+ """
+ matcher = re.compile(pattern)
+ lines_enum = filterfalse(lambda e: matcher.search(e[1]), lines_enum)
+ return lines_enum
+
+
+def expand_env_variables(lines_enum):
+ # type: (ReqFileLines) -> ReqFileLines
+ """Replace all environment variables that can be retrieved via `os.getenv`.
+
+ The only allowed format for environment variables defined in the
+ requirement file is `${MY_VARIABLE_1}` to ensure two things:
+
+ 1. Strings that contain a `$` aren't accidentally (partially) expanded.
+ 2. Ensure consistency across platforms for requirement files.
+
+ These points are the result of a discussion on the `github pull
+ request #3514 <https://github.com/pypa/pip/pull/3514>`_.
+
+ Valid characters in variable names follow the `POSIX standard
+ <http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited
+ to uppercase letter, digits and the `_` (underscore).
+ """
+ for line_number, line in lines_enum:
+ for env_var, var_name in ENV_VAR_RE.findall(line):
+ value = os.getenv(var_name)
+ if not value:
+ continue
+
+ line = line.replace(env_var, value)
+
+ yield line_number, line
+
+
+def get_file_content(url, session, comes_from=None):
+ # type: (str, PipSession, Optional[str]) -> Tuple[str, Text]
+ """Gets the content of a file; it may be a filename, file: URL, or
+ http: URL. Returns (location, content). Content is unicode.
+ Respects # -*- coding: declarations on the retrieved files.
+
+ :param url: File path or url.
+ :param session: PipSession instance.
+ :param comes_from: Origin description of requirements.
+ """
+ scheme = get_url_scheme(url)
+
+ if scheme in ['http', 'https']:
+ # FIXME: catch some errors
+ resp = session.get(url)
+ resp.raise_for_status()
+ return resp.url, resp.text
+
+ elif scheme == 'file':
+ if comes_from and comes_from.startswith('http'):
+ raise InstallationError(
+ 'Requirements file %s references URL %s, which is local'
+ % (comes_from, url))
+
+ path = url.split(':', 1)[1]
+ path = path.replace('\\', '/')
+ match = _url_slash_drive_re.match(path)
+ if match:
+ path = match.group(1) + ':' + path.split('|', 1)[1]
+ path = urllib_parse.unquote(path)
+ if path.startswith('/'):
+ path = '/' + path.lstrip('/')
+ url = path
+
+ try:
+ with open(url, 'rb') as f:
+ content = auto_decode(f.read())
+ except IOError as exc:
+ raise InstallationError(
+ 'Could not open requirements file: %s' % str(exc)
+ )
+ return url, content
+
+
+_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/req_install.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/req_install.py
new file mode 100644
index 0000000000000000000000000000000000000000..22ac24b96d361e8202979e3cbb23309792f7e090
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/req_install.py
@@ -0,0 +1,830 @@
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+from __future__ import absolute_import
+
+import logging
+import os
+import shutil
+import sys
+import zipfile
+
+from pip._vendor import pkg_resources, six
+from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.packaging.version import Version
+from pip._vendor.packaging.version import parse as parse_version
+from pip._vendor.pep517.wrappers import Pep517HookCaller
+
+from pip._internal import pep425tags
+from pip._internal.build_env import NoOpBuildEnvironment
+from pip._internal.exceptions import InstallationError
+from pip._internal.locations import get_scheme
+from pip._internal.models.link import Link
+from pip._internal.operations.build.metadata import generate_metadata
+from pip._internal.operations.build.metadata_legacy import \
+ generate_metadata as generate_metadata_legacy
+from pip._internal.operations.install.editable_legacy import \
+ install_editable as install_editable_legacy
+from pip._internal.operations.install.legacy import install as install_legacy
+from pip._internal.operations.install.wheel import install_wheel
+from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
+from pip._internal.req.req_uninstall import UninstallPathSet
+from pip._internal.utils.deprecation import deprecated
+from pip._internal.utils.hashes import Hashes
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.marker_files import (
+ PIP_DELETE_MARKER_FILENAME,
+ has_delete_marker_file,
+ write_delete_marker_file,
+)
+from pip._internal.utils.misc import (
+ ask_path_exists,
+ backup_dir,
+ display_path,
+ dist_in_site_packages,
+ dist_in_usersite,
+ get_installed_version,
+ hide_url,
+ redact_auth_from_url,
+ rmtree,
+)
+from pip._internal.utils.packaging import get_metadata
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.virtualenv import running_under_virtualenv
+from pip._internal.vcs import vcs
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, Dict, Iterable, List, Optional, Sequence, Union,
+ )
+ from pip._internal.build_env import BuildEnvironment
+ from pip._internal.cache import WheelCache
+ from pip._internal.index.package_finder import PackageFinder
+ from pip._vendor.pkg_resources import Distribution
+ from pip._vendor.packaging.specifiers import SpecifierSet
+ from pip._vendor.packaging.markers import Marker
+
+
+logger = logging.getLogger(__name__)
+
+
+def _get_dist(metadata_directory):
+ # type: (str) -> Distribution
+ """Return a pkg_resources.Distribution for the provided
+ metadata directory.
+ """
+ dist_dir = metadata_directory.rstrip(os.sep)
+
+ # Determine the correct Distribution object type.
+ if dist_dir.endswith(".egg-info"):
+ dist_cls = pkg_resources.Distribution
+ else:
+ assert dist_dir.endswith(".dist-info")
+ dist_cls = pkg_resources.DistInfoDistribution
+
+ # Build a PathMetadata object, from path to metadata. :wink:
+ base_dir, dist_dir_name = os.path.split(dist_dir)
+ dist_name = os.path.splitext(dist_dir_name)[0]
+ metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
+
+ return dist_cls(
+ base_dir,
+ project_name=dist_name,
+ metadata=metadata,
+ )
+
+
+class InstallRequirement(object):
+ """
+ Represents something that may be installed later on, may have information
+ about where to fetch the relevant requirement and also contains logic for
+ installing the said requirement.
+ """
+
+ def __init__(
+ self,
+ req, # type: Optional[Requirement]
+ comes_from, # type: Optional[Union[str, InstallRequirement]]
+ source_dir=None, # type: Optional[str]
+ editable=False, # type: bool
+ link=None, # type: Optional[Link]
+ markers=None, # type: Optional[Marker]
+ use_pep517=None, # type: Optional[bool]
+ isolated=False, # type: bool
+ options=None, # type: Optional[Dict[str, Any]]
+ wheel_cache=None, # type: Optional[WheelCache]
+ constraint=False, # type: bool
+ extras=() # type: Iterable[str]
+ ):
+ # type: (...) -> None
+ assert req is None or isinstance(req, Requirement), req
+ self.req = req
+ self.comes_from = comes_from
+ self.constraint = constraint
+ if source_dir is None:
+ self.source_dir = None # type: Optional[str]
+ else:
+ self.source_dir = os.path.normpath(os.path.abspath(source_dir))
+ self.editable = editable
+
+ self._wheel_cache = wheel_cache
+ if link is None and req and req.url:
+ # PEP 508 URL requirement
+ link = Link(req.url)
+ self.link = self.original_link = link
+ # Path to any downloaded or already-existing package.
+ self.local_file_path = None # type: Optional[str]
+ if self.link and self.link.is_file:
+ self.local_file_path = self.link.file_path
+
+ if extras:
+ self.extras = extras
+ elif req:
+ self.extras = {
+ pkg_resources.safe_extra(extra) for extra in req.extras
+ }
+ else:
+ self.extras = set()
+ if markers is None and req:
+ markers = req.marker
+ self.markers = markers
+
+ # This holds the pkg_resources.Distribution object if this requirement
+ # is already available:
+ self.satisfied_by = None # type: Optional[Distribution]
+ # Whether the installation process should try to uninstall an existing
+ # distribution before installing this requirement.
+ self.should_reinstall = False
+ # Temporary build location
+ self._temp_build_dir = None # type: Optional[TempDirectory]
+ # Set to True after successful installation
+ self.install_succeeded = None # type: Optional[bool]
+ self.options = options if options else {}
+ # Set to True after successful preparation of this requirement
+ self.prepared = False
+ self.is_direct = False
+
+ self.isolated = isolated
+ self.build_env = NoOpBuildEnvironment() # type: BuildEnvironment
+
+ # For PEP 517, the directory where we request the project metadata
+ # gets stored. We need this to pass to build_wheel, so the backend
+ # can ensure that the wheel matches the metadata (see the PEP for
+ # details).
+ self.metadata_directory = None # type: Optional[str]
+
+ # The static build requirements (from pyproject.toml)
+ self.pyproject_requires = None # type: Optional[List[str]]
+
+ # Build requirements that we will check are available
+ self.requirements_to_check = [] # type: List[str]
+
+ # The PEP 517 backend we should use to build the project
+ self.pep517_backend = None # type: Optional[Pep517HookCaller]
+
+ # Are we using PEP 517 for this requirement?
+ # After pyproject.toml has been loaded, the only valid values are True
+ # and False. Before loading, None is valid (meaning "use the default").
+ # Setting an explicit value before loading pyproject.toml is supported,
+ # but after loading this flag should be treated as read only.
+ self.use_pep517 = use_pep517
+
+ def __str__(self):
+ # type: () -> str
+ if self.req:
+ s = str(self.req)
+ if self.link:
+ s += ' from %s' % redact_auth_from_url(self.link.url)
+ elif self.link:
+ s = redact_auth_from_url(self.link.url)
+ else:
+ s = '<InstallRequirement>'
+ if self.satisfied_by is not None:
+ s += ' in %s' % display_path(self.satisfied_by.location)
+ if self.comes_from:
+ if isinstance(self.comes_from, six.string_types):
+ comes_from = self.comes_from # type: Optional[str]
+ else:
+ comes_from = self.comes_from.from_path()
+ if comes_from:
+ s += ' (from %s)' % comes_from
+ return s
+
+ def __repr__(self):
+ # type: () -> str
+ return '<%s object: %s editable=%r>' % (
+ self.__class__.__name__, str(self), self.editable)
+
+ def format_debug(self):
+ # type: () -> str
+ """An un-tested helper for getting state, for debugging.
+ """
+ attributes = vars(self)
+ names = sorted(attributes)
+
+ state = (
+ "{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)
+ )
+ return '<{name} object: {{{state}}}>'.format(
+ name=self.__class__.__name__,
+ state=", ".join(state),
+ )
+
+ def populate_link(self, finder, upgrade, require_hashes):
+ # type: (PackageFinder, bool, bool) -> None
+ """Ensure that if a link can be found for this, that it is found.
+
+ Note that self.link may still be None - if Upgrade is False and the
+ requirement is already installed.
+
+ If require_hashes is True, don't use the wheel cache, because cached
+ wheels, always built locally, have different hashes than the files
+ downloaded from the index server and thus throw false hash mismatches.
+ Furthermore, cached wheels at present have undeterministic contents due
+ to file modification times.
+ """
+ if self.link is None:
+ self.link = finder.find_requirement(self, upgrade)
+ if self._wheel_cache is not None and not require_hashes:
+ old_link = self.link
+ supported_tags = pep425tags.get_supported()
+ self.link = self._wheel_cache.get(
+ link=self.link,
+ package_name=self.name,
+ supported_tags=supported_tags,
+ )
+ if old_link != self.link:
+ logger.debug('Using cached wheel link: %s', self.link)
+
+ # Things that are valid for all kinds of requirements?
+ @property
+ def name(self):
+ # type: () -> Optional[str]
+ if self.req is None:
+ return None
+ return six.ensure_str(pkg_resources.safe_name(self.req.name))
+
+ @property
+ def specifier(self):
+ # type: () -> SpecifierSet
+ return self.req.specifier
+
+ @property
+ def is_pinned(self):
+ # type: () -> bool
+ """Return whether I am pinned to an exact version.
+
+ For example, some-package==1.2 is pinned; some-package>1.2 is not.
+ """
+ specifiers = self.specifier
+ return (len(specifiers) == 1 and
+ next(iter(specifiers)).operator in {'==', '==='})
+
+ @property
+ def installed_version(self):
+ # type: () -> Optional[str]
+ return get_installed_version(self.name)
+
+ def match_markers(self, extras_requested=None):
+ # type: (Optional[Iterable[str]]) -> bool
+ if not extras_requested:
+ # Provide an extra to safely evaluate the markers
+ # without matching any extra
+ extras_requested = ('',)
+ if self.markers is not None:
+ return any(
+ self.markers.evaluate({'extra': extra})
+ for extra in extras_requested)
+ else:
+ return True
+
+ @property
+ def has_hash_options(self):
+ # type: () -> bool
+ """Return whether any known-good hashes are specified as options.
+
+ These activate --require-hashes mode; hashes specified as part of a
+ URL do not.
+
+ """
+ return bool(self.options.get('hashes', {}))
+
+ def hashes(self, trust_internet=True):
+ # type: (bool) -> Hashes
+ """Return a hash-comparer that considers my option- and URL-based
+ hashes to be known-good.
+
+ Hashes in URLs--ones embedded in the requirements file, not ones
+ downloaded from an index server--are almost peers with ones from
+ flags. They satisfy --require-hashes (whether it was implicitly or
+ explicitly activated) but do not activate it. md5 and sha224 are not
+ allowed in flags, which should nudge people toward good algos. We
+ always OR all hashes together, even ones from URLs.
+
+ :param trust_internet: Whether to trust URL-based (#md5=...) hashes
+ downloaded from the internet, as by populate_link()
+
+ """
+ good_hashes = self.options.get('hashes', {}).copy()
+ link = self.link if trust_internet else self.original_link
+ if link and link.hash:
+ good_hashes.setdefault(link.hash_name, []).append(link.hash)
+ return Hashes(good_hashes)
+
+ def from_path(self):
+ # type: () -> Optional[str]
+ """Format a nice indicator to show where this "comes from"
+ """
+ if self.req is None:
+ return None
+ s = str(self.req)
+ if self.comes_from:
+ if isinstance(self.comes_from, six.string_types):
+ comes_from = self.comes_from
+ else:
+ comes_from = self.comes_from.from_path()
+ if comes_from:
+ s += '->' + comes_from
+ return s
+
+ def ensure_build_location(self, build_dir):
+ # type: (str) -> str
+ assert build_dir is not None
+ if self._temp_build_dir is not None:
+ assert self._temp_build_dir.path
+ return self._temp_build_dir.path
+ if self.req is None:
+ # Some systems have /tmp as a symlink which confuses custom
+ # builds (such as numpy). Thus, we ensure that the real path
+ # is returned.
+ self._temp_build_dir = TempDirectory(kind="req-build")
+
+ return self._temp_build_dir.path
+ if self.editable:
+ name = self.name.lower()
+ else:
+ name = self.name
+ # FIXME: Is there a better place to create the build_dir? (hg and bzr
+ # need this)
+ if not os.path.exists(build_dir):
+ logger.debug('Creating directory %s', build_dir)
+ os.makedirs(build_dir)
+ write_delete_marker_file(build_dir)
+ return os.path.join(build_dir, name)
+
+ def _set_requirement(self):
+ # type: () -> None
+ """Set requirement after generating metadata.
+ """
+ assert self.req is None
+ assert self.metadata is not None
+ assert self.source_dir is not None
+
+ # Construct a Requirement object from the generated metadata
+ if isinstance(parse_version(self.metadata["Version"]), Version):
+ op = "=="
+ else:
+ op = "==="
+
+ self.req = Requirement(
+ "".join([
+ self.metadata["Name"],
+ op,
+ self.metadata["Version"],
+ ])
+ )
+
+ def warn_on_mismatching_name(self):
+ # type: () -> None
+ metadata_name = canonicalize_name(self.metadata["Name"])
+ if canonicalize_name(self.req.name) == metadata_name:
+ # Everything is fine.
+ return
+
+ # If we're here, there's a mismatch. Log a warning about it.
+ logger.warning(
+ 'Generating metadata for package %s '
+ 'produced metadata for project name %s. Fix your '
+ '#egg=%s fragments.',
+ self.name, metadata_name, self.name
+ )
+ self.req = Requirement(metadata_name)
+
+ def remove_temporary_source(self):
+ # type: () -> None
+ """Remove the source files from this requirement, if they are marked
+ for deletion"""
+ if self.source_dir and has_delete_marker_file(self.source_dir):
+ logger.debug('Removing source in %s', self.source_dir)
+ rmtree(self.source_dir)
+ self.source_dir = None
+ if self._temp_build_dir:
+ self._temp_build_dir.cleanup()
+ self._temp_build_dir = None
+ self.build_env.cleanup()
+
+ def check_if_exists(self, use_user_site):
+ # type: (bool) -> None
+ """Find an installed distribution that satisfies or conflicts
+ with this requirement, and set self.satisfied_by or
+ self.should_reinstall appropriately.
+ """
+ if self.req is None:
+ return
+ # get_distribution() will resolve the entire list of requirements
+ # anyway, and we've already determined that we need the requirement
+ # in question, so strip the marker so that we don't try to
+ # evaluate it.
+ no_marker = Requirement(str(self.req))
+ no_marker.marker = None
+ try:
+ self.satisfied_by = pkg_resources.get_distribution(str(no_marker))
+ except pkg_resources.DistributionNotFound:
+ return
+ except pkg_resources.VersionConflict:
+ existing_dist = pkg_resources.get_distribution(
+ self.req.name
+ )
+ if use_user_site:
+ if dist_in_usersite(existing_dist):
+ self.should_reinstall = True
+ elif (running_under_virtualenv() and
+ dist_in_site_packages(existing_dist)):
+ raise InstallationError(
+ "Will not install to the user site because it will "
+ "lack sys.path precedence to %s in %s" %
+ (existing_dist.project_name, existing_dist.location)
+ )
+ else:
+ self.should_reinstall = True
+ else:
+ if self.editable and self.satisfied_by:
+ self.should_reinstall = True
+ # when installing editables, nothing pre-existing should ever
+ # satisfy
+ self.satisfied_by = None
+
+ # Things valid for wheels
+ @property
+ def is_wheel(self):
+ # type: () -> bool
+ if not self.link:
+ return False
+ return self.link.is_wheel
+
+ # Things valid for sdists
+ @property
+ def unpacked_source_directory(self):
+ # type: () -> str
+ return os.path.join(
+ self.source_dir,
+ self.link and self.link.subdirectory_fragment or '')
+
+ @property
+ def setup_py_path(self):
+ # type: () -> str
+ assert self.source_dir, "No source dir for %s" % self
+ setup_py = os.path.join(self.unpacked_source_directory, 'setup.py')
+
+ # Python2 __file__ should not be unicode
+ if six.PY2 and isinstance(setup_py, six.text_type):
+ setup_py = setup_py.encode(sys.getfilesystemencoding())
+
+ return setup_py
+
+ @property
+ def pyproject_toml_path(self):
+ # type: () -> str
+ assert self.source_dir, "No source dir for %s" % self
+ return make_pyproject_path(self.unpacked_source_directory)
+
+ def load_pyproject_toml(self):
+ # type: () -> None
+ """Load the pyproject.toml file.
+
+ After calling this routine, all of the attributes related to PEP 517
+ processing for this requirement have been set. In particular, the
+ use_pep517 attribute can be used to determine whether we should
+ follow the PEP 517 or legacy (setup.py) code path.
+ """
+ pyproject_toml_data = load_pyproject_toml(
+ self.use_pep517,
+ self.pyproject_toml_path,
+ self.setup_py_path,
+ str(self)
+ )
+
+ if pyproject_toml_data is None:
+ self.use_pep517 = False
+ return
+
+ self.use_pep517 = True
+ requires, backend, check, backend_path = pyproject_toml_data
+ self.requirements_to_check = check
+ self.pyproject_requires = requires
+ self.pep517_backend = Pep517HookCaller(
+ self.unpacked_source_directory, backend, backend_path=backend_path,
+ )
+
+ def _generate_metadata(self):
+ # type: () -> str
+ """Invokes metadata generator functions, with the required arguments.
+ """
+ if not self.use_pep517:
+ assert self.unpacked_source_directory
+
+ return generate_metadata_legacy(
+ build_env=self.build_env,
+ setup_py_path=self.setup_py_path,
+ source_dir=self.unpacked_source_directory,
+ editable=self.editable,
+ isolated=self.isolated,
+ details=self.name or "from {}".format(self.link)
+ )
+
+ assert self.pep517_backend is not None
+
+ return generate_metadata(
+ build_env=self.build_env,
+ backend=self.pep517_backend,
+ )
+
+ def prepare_metadata(self):
+ # type: () -> None
+ """Ensure that project metadata is available.
+
+ Under PEP 517, call the backend hook to prepare the metadata.
+ Under legacy processing, call setup.py egg-info.
+ """
+ assert self.source_dir
+
+ with indent_log():
+ self.metadata_directory = self._generate_metadata()
+
+ # Act on the newly generated metadata, based on the name and version.
+ if not self.name:
+ self._set_requirement()
+ else:
+ self.warn_on_mismatching_name()
+
+ self.assert_source_matches_version()
+
+ @property
+ def metadata(self):
+ # type: () -> Any
+ if not hasattr(self, '_metadata'):
+ self._metadata = get_metadata(self.get_dist())
+
+ return self._metadata
+
+ def get_dist(self):
+ # type: () -> Distribution
+ return _get_dist(self.metadata_directory)
+
+ def assert_source_matches_version(self):
+ # type: () -> None
+ assert self.source_dir
+ version = self.metadata['version']
+ if self.req.specifier and version not in self.req.specifier:
+ logger.warning(
+ 'Requested %s, but installing version %s',
+ self,
+ version,
+ )
+ else:
+ logger.debug(
+ 'Source in %s has version %s, which satisfies requirement %s',
+ display_path(self.source_dir),
+ version,
+ self,
+ )
+
+ # For both source distributions and editables
+ def ensure_has_source_dir(self, parent_dir):
+ # type: (str) -> None
+ """Ensure that a source_dir is set.
+
+ This will create a temporary build dir if the name of the requirement
+ isn't known yet.
+
+ :param parent_dir: The ideal pip parent_dir for the source_dir.
+ Generally src_dir for editables and build_dir for sdists.
+ :return: self.source_dir
+ """
+ if self.source_dir is None:
+ self.source_dir = self.ensure_build_location(parent_dir)
+
+ # For editable installations
+ def update_editable(self, obtain=True):
+ # type: (bool) -> None
+ if not self.link:
+ logger.debug(
+ "Cannot update repository at %s; repository location is "
+ "unknown",
+ self.source_dir,
+ )
+ return
+ assert self.editable
+ assert self.source_dir
+ if self.link.scheme == 'file':
+ # Static paths don't get updated
+ return
+ assert '+' in self.link.url, "bad url: %r" % self.link.url
+ vc_type, url = self.link.url.split('+', 1)
+ vcs_backend = vcs.get_backend(vc_type)
+ if vcs_backend:
+ if not self.link.is_vcs:
+ reason = (
+ "This form of VCS requirement is being deprecated: {}."
+ ).format(
+ self.link.url
+ )
+ replacement = None
+ if self.link.url.startswith("git+git@"):
+ replacement = (
+ "git+https://git@example.com/..., "
+ "git+ssh://git@example.com/..., "
+ "or the insecure git+git://git@example.com/..."
+ )
+ deprecated(reason, replacement, gone_in="21.0", issue=7554)
+ hidden_url = hide_url(self.link.url)
+ if obtain:
+ vcs_backend.obtain(self.source_dir, url=hidden_url)
+ else:
+ vcs_backend.export(self.source_dir, url=hidden_url)
+ else:
+ assert 0, (
+ 'Unexpected version control type (in %s): %s'
+ % (self.link, vc_type))
+
+ # Top-level Actions
+ def uninstall(self, auto_confirm=False, verbose=False):
+ # type: (bool, bool) -> Optional[UninstallPathSet]
+ """
+ Uninstall the distribution currently satisfying this requirement.
+
+ Prompts before removing or modifying files unless
+ ``auto_confirm`` is True.
+
+ Refuses to delete or modify files outside of ``sys.prefix`` -
+ thus uninstallation within a virtual environment can only
+ modify that virtual environment, even if the virtualenv is
+ linked to global site-packages.
+
+ """
+ assert self.req
+ try:
+ dist = pkg_resources.get_distribution(self.req.name)
+ except pkg_resources.DistributionNotFound:
+ logger.warning("Skipping %s as it is not installed.", self.name)
+ return None
+ else:
+ logger.info('Found existing installation: %s', dist)
+
+ uninstalled_pathset = UninstallPathSet.from_dist(dist)
+ uninstalled_pathset.remove(auto_confirm, verbose)
+ return uninstalled_pathset
+
+ def _get_archive_name(self, path, parentdir, rootdir):
+ # type: (str, str, str) -> str
+
+ def _clean_zip_name(name, prefix):
+ # type: (str, str) -> str
+ assert name.startswith(prefix + os.path.sep), (
+ "name %r doesn't start with prefix %r" % (name, prefix)
+ )
+ name = name[len(prefix) + 1:]
+ name = name.replace(os.path.sep, '/')
+ return name
+
+ path = os.path.join(parentdir, path)
+ name = _clean_zip_name(path, rootdir)
+ return self.name + '/' + name
+
+ def archive(self, build_dir):
+ # type: (str) -> None
+ """Saves archive to provided build_dir.
+
+ Used for saving downloaded VCS requirements as part of `pip download`.
+ """
+ assert self.source_dir
+
+ create_archive = True
+ archive_name = '%s-%s.zip' % (self.name, self.metadata["version"])
+ archive_path = os.path.join(build_dir, archive_name)
+
+ if os.path.exists(archive_path):
+ response = ask_path_exists(
+ 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' %
+ display_path(archive_path), ('i', 'w', 'b', 'a'))
+ if response == 'i':
+ create_archive = False
+ elif response == 'w':
+ logger.warning('Deleting %s', display_path(archive_path))
+ os.remove(archive_path)
+ elif response == 'b':
+ dest_file = backup_dir(archive_path)
+ logger.warning(
+ 'Backing up %s to %s',
+ display_path(archive_path),
+ display_path(dest_file),
+ )
+ shutil.move(archive_path, dest_file)
+ elif response == 'a':
+ sys.exit(-1)
+
+ if not create_archive:
+ return
+
+ zip_output = zipfile.ZipFile(
+ archive_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True,
+ )
+ with zip_output:
+ dir = os.path.normcase(
+ os.path.abspath(self.unpacked_source_directory)
+ )
+ for dirpath, dirnames, filenames in os.walk(dir):
+ if 'pip-egg-info' in dirnames:
+ dirnames.remove('pip-egg-info')
+ for dirname in dirnames:
+ dir_arcname = self._get_archive_name(
+ dirname, parentdir=dirpath, rootdir=dir,
+ )
+ zipdir = zipfile.ZipInfo(dir_arcname + '/')
+ zipdir.external_attr = 0x1ED << 16 # 0o755
+ zip_output.writestr(zipdir, '')
+ for filename in filenames:
+ if filename == PIP_DELETE_MARKER_FILENAME:
+ continue
+ file_arcname = self._get_archive_name(
+ filename, parentdir=dirpath, rootdir=dir,
+ )
+ filename = os.path.join(dirpath, filename)
+ zip_output.write(filename, file_arcname)
+
+ logger.info('Saved %s', display_path(archive_path))
+
+ def install(
+ self,
+ install_options, # type: List[str]
+ global_options=None, # type: Optional[Sequence[str]]
+ root=None, # type: Optional[str]
+ home=None, # type: Optional[str]
+ prefix=None, # type: Optional[str]
+ warn_script_location=True, # type: bool
+ use_user_site=False, # type: bool
+ pycompile=True # type: bool
+ ):
+ # type: (...) -> None
+ scheme = get_scheme(
+ self.name,
+ user=use_user_site,
+ home=home,
+ root=root,
+ isolated=self.isolated,
+ prefix=prefix,
+ )
+
+ global_options = global_options if global_options is not None else []
+ if self.editable:
+ install_editable_legacy(
+ install_options,
+ global_options,
+ prefix=prefix,
+ home=home,
+ use_user_site=use_user_site,
+ name=self.name,
+ setup_py_path=self.setup_py_path,
+ isolated=self.isolated,
+ build_env=self.build_env,
+ unpacked_source_directory=self.unpacked_source_directory,
+ )
+ self.install_succeeded = True
+ return
+
+ if self.is_wheel:
+ assert self.local_file_path
+ install_wheel(
+ self.name,
+ self.local_file_path,
+ scheme=scheme,
+ req_description=str(self.req),
+ pycompile=pycompile,
+ warn_script_location=warn_script_location,
+ )
+ self.install_succeeded = True
+ return
+
+ install_legacy(
+ self,
+ install_options=install_options,
+ global_options=global_options,
+ root=root,
+ home=home,
+ prefix=prefix,
+ use_user_site=use_user_site,
+ pycompile=pycompile,
+ scheme=scheme,
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/req_set.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/req_set.py
new file mode 100644
index 0000000000000000000000000000000000000000..087ac5925f52c99345cffe693d6a392e39bd70c4
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/req_set.py
@@ -0,0 +1,209 @@
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+from __future__ import absolute_import
+
+import logging
+from collections import OrderedDict
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal import pep425tags
+from pip._internal.exceptions import InstallationError
+from pip._internal.models.wheel import Wheel
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Dict, Iterable, List, Optional, Tuple
+ from pip._internal.req.req_install import InstallRequirement
+
+
+logger = logging.getLogger(__name__)
+
+
+class RequirementSet(object):
+
+ def __init__(self, check_supported_wheels=True):
+ # type: (bool) -> None
+ """Create a RequirementSet.
+ """
+
+ self.requirements = OrderedDict() # type: Dict[str, InstallRequirement] # noqa: E501
+ self.check_supported_wheels = check_supported_wheels
+
+ self.unnamed_requirements = [] # type: List[InstallRequirement]
+ self.successfully_downloaded = [] # type: List[InstallRequirement]
+ self.reqs_to_cleanup = [] # type: List[InstallRequirement]
+
+ def __str__(self):
+ # type: () -> str
+ requirements = sorted(
+ (req for req in self.requirements.values() if not req.comes_from),
+ key=lambda req: canonicalize_name(req.name),
+ )
+ return ' '.join(str(req.req) for req in requirements)
+
+ def __repr__(self):
+ # type: () -> str
+ requirements = sorted(
+ self.requirements.values(),
+ key=lambda req: canonicalize_name(req.name),
+ )
+
+ format_string = '<{classname} object; {count} requirement(s): {reqs}>'
+ return format_string.format(
+ classname=self.__class__.__name__,
+ count=len(requirements),
+ reqs=', '.join(str(req.req) for req in requirements),
+ )
+
+ def add_unnamed_requirement(self, install_req):
+ # type: (InstallRequirement) -> None
+ assert not install_req.name
+ self.unnamed_requirements.append(install_req)
+
+ def add_named_requirement(self, install_req):
+ # type: (InstallRequirement) -> None
+ assert install_req.name
+
+ project_name = canonicalize_name(install_req.name)
+ self.requirements[project_name] = install_req
+
+ def add_requirement(
+ self,
+ install_req, # type: InstallRequirement
+ parent_req_name=None, # type: Optional[str]
+ extras_requested=None # type: Optional[Iterable[str]]
+ ):
+ # type: (...) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]] # noqa: E501
+ """Add install_req as a requirement to install.
+
+ :param parent_req_name: The name of the requirement that needed this
+ added. The name is used because when multiple unnamed requirements
+ resolve to the same name, we could otherwise end up with dependency
+ links that point outside the Requirements set. parent_req must
+ already be added. Note that None implies that this is a user
+ supplied requirement, vs an inferred one.
+ :param extras_requested: an iterable of extras used to evaluate the
+ environment markers.
+ :return: Additional requirements to scan. That is either [] if
+ the requirement is not applicable, or [install_req] if the
+ requirement is applicable and has just been added.
+ """
+ # If the markers do not match, ignore this requirement.
+ if not install_req.match_markers(extras_requested):
+ logger.info(
+ "Ignoring %s: markers '%s' don't match your environment",
+ install_req.name, install_req.markers,
+ )
+ return [], None
+
+ # If the wheel is not supported, raise an error.
+ # Should check this after filtering out based on environment markers to
+ # allow specifying different wheels based on the environment/OS, in a
+ # single requirements file.
+ if install_req.link and install_req.link.is_wheel:
+ wheel = Wheel(install_req.link.filename)
+ tags = pep425tags.get_supported()
+ if (self.check_supported_wheels and not wheel.supported(tags)):
+ raise InstallationError(
+ "%s is not a supported wheel on this platform." %
+ wheel.filename
+ )
+
+ # This next bit is really a sanity check.
+ assert install_req.is_direct == (parent_req_name is None), (
+ "a direct req shouldn't have a parent and also, "
+ "a non direct req should have a parent"
+ )
+
+ # Unnamed requirements are scanned again and the requirement won't be
+ # added as a dependency until after scanning.
+ if not install_req.name:
+ self.add_unnamed_requirement(install_req)
+ return [install_req], None
+
+ try:
+ existing_req = self.get_requirement(install_req.name)
+ except KeyError:
+ existing_req = None
+
+ has_conflicting_requirement = (
+ parent_req_name is None and
+ existing_req and
+ not existing_req.constraint and
+ existing_req.extras == install_req.extras and
+ existing_req.req.specifier != install_req.req.specifier
+ )
+ if has_conflicting_requirement:
+ raise InstallationError(
+ "Double requirement given: %s (already in %s, name=%r)"
+ % (install_req, existing_req, install_req.name)
+ )
+
+ # When no existing requirement exists, add the requirement as a
+ # dependency and it will be scanned again after.
+ if not existing_req:
+ self.add_named_requirement(install_req)
+ # We'd want to rescan this requirement later
+ return [install_req], install_req
+
+ # Assume there's no need to scan, and that we've already
+ # encountered this for scanning.
+ if install_req.constraint or not existing_req.constraint:
+ return [], existing_req
+
+ does_not_satisfy_constraint = (
+ install_req.link and
+ not (
+ existing_req.link and
+ install_req.link.path == existing_req.link.path
+ )
+ )
+ if does_not_satisfy_constraint:
+ self.reqs_to_cleanup.append(install_req)
+ raise InstallationError(
+ "Could not satisfy constraints for '%s': "
+ "installation from path or url cannot be "
+ "constrained to a version" % install_req.name,
+ )
+ # If we're now installing a constraint, mark the existing
+ # object for real installation.
+ existing_req.constraint = False
+ existing_req.extras = tuple(sorted(
+ set(existing_req.extras) | set(install_req.extras)
+ ))
+ logger.debug(
+ "Setting %s extras to: %s",
+ existing_req, existing_req.extras,
+ )
+ # Return the existing requirement for addition to the parent and
+ # scanning again.
+ return [existing_req], existing_req
+
+ def has_requirement(self, name):
+ # type: (str) -> bool
+ project_name = canonicalize_name(name)
+
+ return (
+ project_name in self.requirements and
+ not self.requirements[project_name].constraint
+ )
+
+ def get_requirement(self, name):
+ # type: (str) -> InstallRequirement
+ project_name = canonicalize_name(name)
+
+ if project_name in self.requirements:
+ return self.requirements[project_name]
+
+ raise KeyError("No project with the name %r" % name)
+
+ def cleanup_files(self):
+ # type: () -> None
+ """Clean up files, remove builds."""
+ logger.debug('Cleaning up...')
+ with indent_log():
+ for req in self.reqs_to_cleanup:
+ req.remove_temporary_source()
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/req_tracker.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/req_tracker.py
new file mode 100644
index 0000000000000000000000000000000000000000..84e0c0419fc7064b05b2de7507a38aeba3c2dfad
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/req_tracker.py
@@ -0,0 +1,150 @@
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+from __future__ import absolute_import
+
+import contextlib
+import errno
+import hashlib
+import logging
+import os
+
+from pip._vendor import contextlib2
+
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from types import TracebackType
+ from typing import Dict, Iterator, Optional, Set, Type, Union
+ from pip._internal.req.req_install import InstallRequirement
+ from pip._internal.models.link import Link
+
+logger = logging.getLogger(__name__)
+
+
+@contextlib.contextmanager
+def update_env_context_manager(**changes):
+ # type: (str) -> Iterator[None]
+ target = os.environ
+
+ # Save values from the target and change them.
+ non_existent_marker = object()
+ saved_values = {} # type: Dict[str, Union[object, str]]
+ for name, new_value in changes.items():
+ try:
+ saved_values[name] = target[name]
+ except KeyError:
+ saved_values[name] = non_existent_marker
+ target[name] = new_value
+
+ try:
+ yield
+ finally:
+ # Restore original values in the target.
+ for name, original_value in saved_values.items():
+ if original_value is non_existent_marker:
+ del target[name]
+ else:
+ assert isinstance(original_value, str) # for mypy
+ target[name] = original_value
+
+
+@contextlib.contextmanager
+def get_requirement_tracker():
+ # type: () -> Iterator[RequirementTracker]
+ root = os.environ.get('PIP_REQ_TRACKER')
+ with contextlib2.ExitStack() as ctx:
+ if root is None:
+ root = ctx.enter_context(
+ TempDirectory(kind='req-tracker')
+ ).path
+ ctx.enter_context(update_env_context_manager(PIP_REQ_TRACKER=root))
+ logger.debug("Initialized build tracking at %s", root)
+
+ with RequirementTracker(root) as tracker:
+ yield tracker
+
+
+class RequirementTracker(object):
+
+ def __init__(self, root):
+ # type: (str) -> None
+ self._root = root
+ self._entries = set() # type: Set[InstallRequirement]
+ logger.debug("Created build tracker: %s", self._root)
+
+ def __enter__(self):
+ # type: () -> RequirementTracker
+ logger.debug("Entered build tracker: %s", self._root)
+ return self
+
+ def __exit__(
+ self,
+ exc_type, # type: Optional[Type[BaseException]]
+ exc_val, # type: Optional[BaseException]
+ exc_tb # type: Optional[TracebackType]
+ ):
+ # type: (...) -> None
+ self.cleanup()
+
+ def _entry_path(self, link):
+ # type: (Link) -> str
+ hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
+ return os.path.join(self._root, hashed)
+
+ def add(self, req):
+ # type: (InstallRequirement) -> None
+ """Add an InstallRequirement to build tracking.
+ """
+
+ # Get the file to write information about this requirement.
+ entry_path = self._entry_path(req.link)
+
+ # Try reading from the file. If it exists and can be read from, a build
+ # is already in progress, so a LookupError is raised.
+ try:
+ with open(entry_path) as fp:
+ contents = fp.read()
+ except IOError as e:
+ # if the error is anything other than "file does not exist", raise.
+ if e.errno != errno.ENOENT:
+ raise
+ else:
+ message = '%s is already being built: %s' % (req.link, contents)
+ raise LookupError(message)
+
+ # If we're here, req should really not be building already.
+ assert req not in self._entries
+
+ # Start tracking this requirement.
+ with open(entry_path, 'w') as fp:
+ fp.write(str(req))
+ self._entries.add(req)
+
+ logger.debug('Added %s to build tracker %r', req, self._root)
+
+ def remove(self, req):
+ # type: (InstallRequirement) -> None
+ """Remove an InstallRequirement from build tracking.
+ """
+
+ # Delete the created file and the corresponding entries.
+ os.unlink(self._entry_path(req.link))
+ self._entries.remove(req)
+
+ logger.debug('Removed %s from build tracker %r', req, self._root)
+
+ def cleanup(self):
+ # type: () -> None
+ for req in set(self._entries):
+ self.remove(req)
+
+ logger.debug("Removed build tracker: %r", self._root)
+
+ @contextlib.contextmanager
+ def track(self, req):
+ # type: (InstallRequirement) -> Iterator[None]
+ self.add(req)
+ yield
+ self.remove(req)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/req_uninstall.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/req_uninstall.py
new file mode 100644
index 0000000000000000000000000000000000000000..5971b130ec029478d59ea1761630605deb4a8b39
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/req/req_uninstall.py
@@ -0,0 +1,644 @@
+from __future__ import absolute_import
+
+import csv
+import functools
+import logging
+import os
+import sys
+import sysconfig
+
+from pip._vendor import pkg_resources
+
+from pip._internal.exceptions import UninstallationError
+from pip._internal.locations import bin_py, bin_user
+from pip._internal.utils.compat import WINDOWS, cache_from_source, uses_pycache
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import (
+ FakeFile,
+ ask,
+ dist_in_usersite,
+ dist_is_local,
+ egg_link_path,
+ is_local,
+ normalize_path,
+ renames,
+ rmtree,
+)
+from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple,
+ )
+ from pip._vendor.pkg_resources import Distribution
+
+logger = logging.getLogger(__name__)
+
+
+def _script_names(dist, script_name, is_gui):
+ # type: (Distribution, str, bool) -> List[str]
+ """Create the fully qualified name of the files created by
+ {console,gui}_scripts for the given ``dist``.
+ Returns the list of file names
+ """
+ if dist_in_usersite(dist):
+ bin_dir = bin_user
+ else:
+ bin_dir = bin_py
+ exe_name = os.path.join(bin_dir, script_name)
+ paths_to_remove = [exe_name]
+ if WINDOWS:
+ paths_to_remove.append(exe_name + '.exe')
+ paths_to_remove.append(exe_name + '.exe.manifest')
+ if is_gui:
+ paths_to_remove.append(exe_name + '-script.pyw')
+ else:
+ paths_to_remove.append(exe_name + '-script.py')
+ return paths_to_remove
+
+
+def _unique(fn):
+ # type: (Callable[..., Iterator[Any]]) -> Callable[..., Iterator[Any]]
+ @functools.wraps(fn)
+ def unique(*args, **kw):
+ # type: (Any, Any) -> Iterator[Any]
+ seen = set() # type: Set[Any]
+ for item in fn(*args, **kw):
+ if item not in seen:
+ seen.add(item)
+ yield item
+ return unique
+
+
+@_unique
+def uninstallation_paths(dist):
+ # type: (Distribution) -> Iterator[str]
+ """
+ Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
+
+ Yield paths to all the files in RECORD. For each .py file in RECORD, add
+ the .pyc and .pyo in the same directory.
+
+ UninstallPathSet.add() takes care of the __pycache__ .py[co].
+ """
+ r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
+ for row in r:
+ path = os.path.join(dist.location, row[0])
+ yield path
+ if path.endswith('.py'):
+ dn, fn = os.path.split(path)
+ base = fn[:-3]
+ path = os.path.join(dn, base + '.pyc')
+ yield path
+ path = os.path.join(dn, base + '.pyo')
+ yield path
+
+
+def compact(paths):
+ # type: (Iterable[str]) -> Set[str]
+ """Compact a path set to contain the minimal number of paths
+ necessary to contain all paths in the set. If /a/path/ and
+ /a/path/to/a/file.txt are both in the set, leave only the
+ shorter path."""
+
+ sep = os.path.sep
+ short_paths = set() # type: Set[str]
+ for path in sorted(paths, key=len):
+ should_skip = any(
+ path.startswith(shortpath.rstrip("*")) and
+ path[len(shortpath.rstrip("*").rstrip(sep))] == sep
+ for shortpath in short_paths
+ )
+ if not should_skip:
+ short_paths.add(path)
+ return short_paths
+
+
+def compress_for_rename(paths):
+ # type: (Iterable[str]) -> Set[str]
+ """Returns a set containing the paths that need to be renamed.
+
+ This set may include directories when the original sequence of paths
+ included every file on disk.
+ """
+ case_map = dict((os.path.normcase(p), p) for p in paths)
+ remaining = set(case_map)
+ unchecked = sorted(set(os.path.split(p)[0]
+ for p in case_map.values()), key=len)
+ wildcards = set() # type: Set[str]
+
+ def norm_join(*a):
+ # type: (str) -> str
+ return os.path.normcase(os.path.join(*a))
+
+ for root in unchecked:
+ if any(os.path.normcase(root).startswith(w)
+ for w in wildcards):
+ # This directory has already been handled.
+ continue
+
+ all_files = set() # type: Set[str]
+ all_subdirs = set() # type: Set[str]
+ for dirname, subdirs, files in os.walk(root):
+ all_subdirs.update(norm_join(root, dirname, d)
+ for d in subdirs)
+ all_files.update(norm_join(root, dirname, f)
+ for f in files)
+ # If all the files we found are in our remaining set of files to
+ # remove, then remove them from the latter set and add a wildcard
+ # for the directory.
+ if not (all_files - remaining):
+ remaining.difference_update(all_files)
+ wildcards.add(root + os.sep)
+
+ return set(map(case_map.__getitem__, remaining)) | wildcards
+
+
+def compress_for_output_listing(paths):
+ # type: (Iterable[str]) -> Tuple[Set[str], Set[str]]
+ """Returns a tuple of 2 sets of which paths to display to user
+
+ The first set contains paths that would be deleted. Files of a package
+ are not added and the top-level directory of the package has a '*' added
+ at the end - to signify that all it's contents are removed.
+
+ The second set contains files that would have been skipped in the above
+ folders.
+ """
+
+ will_remove = set(paths)
+ will_skip = set()
+
+ # Determine folders and files
+ folders = set()
+ files = set()
+ for path in will_remove:
+ if path.endswith(".pyc"):
+ continue
+ if path.endswith("__init__.py") or ".dist-info" in path:
+ folders.add(os.path.dirname(path))
+ files.add(path)
+
+ # probably this one https://github.com/python/mypy/issues/390
+ _normcased_files = set(map(os.path.normcase, files)) # type: ignore
+
+ folders = compact(folders)
+
+ # This walks the tree using os.walk to not miss extra folders
+ # that might get added.
+ for folder in folders:
+ for dirpath, _, dirfiles in os.walk(folder):
+ for fname in dirfiles:
+ if fname.endswith(".pyc"):
+ continue
+
+ file_ = os.path.join(dirpath, fname)
+ if (os.path.isfile(file_) and
+ os.path.normcase(file_) not in _normcased_files):
+ # We are skipping this file. Add it to the set.
+ will_skip.add(file_)
+
+ will_remove = files | {
+ os.path.join(folder, "*") for folder in folders
+ }
+
+ return will_remove, will_skip
+
+
+class StashedUninstallPathSet(object):
+ """A set of file rename operations to stash files while
+ tentatively uninstalling them."""
+ def __init__(self):
+ # type: () -> None
+ # Mapping from source file root to [Adjacent]TempDirectory
+ # for files under that directory.
+ self._save_dirs = {} # type: Dict[str, TempDirectory]
+ # (old path, new path) tuples for each move that may need
+ # to be undone.
+ self._moves = [] # type: List[Tuple[str, str]]
+
+ def _get_directory_stash(self, path):
+ # type: (str) -> str
+ """Stashes a directory.
+
+ Directories are stashed adjacent to their original location if
+ possible, or else moved/copied into the user's temp dir."""
+
+ try:
+ save_dir = AdjacentTempDirectory(path) # type: TempDirectory
+ except OSError:
+ save_dir = TempDirectory(kind="uninstall")
+ self._save_dirs[os.path.normcase(path)] = save_dir
+
+ return save_dir.path
+
+ def _get_file_stash(self, path):
+ # type: (str) -> str
+ """Stashes a file.
+
+ If no root has been provided, one will be created for the directory
+ in the user's temp directory."""
+ path = os.path.normcase(path)
+ head, old_head = os.path.dirname(path), None
+ save_dir = None
+
+ while head != old_head:
+ try:
+ save_dir = self._save_dirs[head]
+ break
+ except KeyError:
+ pass
+ head, old_head = os.path.dirname(head), head
+ else:
+ # Did not find any suitable root
+ head = os.path.dirname(path)
+ save_dir = TempDirectory(kind='uninstall')
+ self._save_dirs[head] = save_dir
+
+ relpath = os.path.relpath(path, head)
+ if relpath and relpath != os.path.curdir:
+ return os.path.join(save_dir.path, relpath)
+ return save_dir.path
+
+ def stash(self, path):
+ # type: (str) -> str
+ """Stashes the directory or file and returns its new location.
+ Handle symlinks as files to avoid modifying the symlink targets.
+ """
+ path_is_dir = os.path.isdir(path) and not os.path.islink(path)
+ if path_is_dir:
+ new_path = self._get_directory_stash(path)
+ else:
+ new_path = self._get_file_stash(path)
+
+ self._moves.append((path, new_path))
+ if (path_is_dir and os.path.isdir(new_path)):
+ # If we're moving a directory, we need to
+ # remove the destination first or else it will be
+ # moved to inside the existing directory.
+ # We just created new_path ourselves, so it will
+ # be removable.
+ os.rmdir(new_path)
+ renames(path, new_path)
+ return new_path
+
+ def commit(self):
+ # type: () -> None
+ """Commits the uninstall by removing stashed files."""
+ for _, save_dir in self._save_dirs.items():
+ save_dir.cleanup()
+ self._moves = []
+ self._save_dirs = {}
+
+ def rollback(self):
+ # type: () -> None
+ """Undoes the uninstall by moving stashed files back."""
+ for p in self._moves:
+ logger.info("Moving to %s\n from %s", *p)
+
+ for new_path, path in self._moves:
+ try:
+ logger.debug('Replacing %s from %s', new_path, path)
+ if os.path.isfile(new_path) or os.path.islink(new_path):
+ os.unlink(new_path)
+ elif os.path.isdir(new_path):
+ rmtree(new_path)
+ renames(path, new_path)
+ except OSError as ex:
+ logger.error("Failed to restore %s", new_path)
+ logger.debug("Exception: %s", ex)
+
+ self.commit()
+
+ @property
+ def can_rollback(self):
+ # type: () -> bool
+ return bool(self._moves)
+
+
+class UninstallPathSet(object):
+ """A set of file paths to be removed in the uninstallation of a
+ requirement."""
+ def __init__(self, dist):
+ # type: (Distribution) -> None
+ self.paths = set() # type: Set[str]
+ self._refuse = set() # type: Set[str]
+ self.pth = {} # type: Dict[str, UninstallPthEntries]
+ self.dist = dist
+ self._moved_paths = StashedUninstallPathSet()
+
+ def _permitted(self, path):
+ # type: (str) -> bool
+ """
+ Return True if the given path is one we are permitted to
+ remove/modify, False otherwise.
+
+ """
+ return is_local(path)
+
+ def add(self, path):
+ # type: (str) -> None
+ head, tail = os.path.split(path)
+
+ # we normalize the head to resolve parent directory symlinks, but not
+ # the tail, since we only want to uninstall symlinks, not their targets
+ path = os.path.join(normalize_path(head), os.path.normcase(tail))
+
+ if not os.path.exists(path):
+ return
+ if self._permitted(path):
+ self.paths.add(path)
+ else:
+ self._refuse.add(path)
+
+ # __pycache__ files can show up after 'installed-files.txt' is created,
+ # due to imports
+ if os.path.splitext(path)[1] == '.py' and uses_pycache:
+ self.add(cache_from_source(path))
+
+ def add_pth(self, pth_file, entry):
+ # type: (str, str) -> None
+ pth_file = normalize_path(pth_file)
+ if self._permitted(pth_file):
+ if pth_file not in self.pth:
+ self.pth[pth_file] = UninstallPthEntries(pth_file)
+ self.pth[pth_file].add(entry)
+ else:
+ self._refuse.add(pth_file)
+
+ def remove(self, auto_confirm=False, verbose=False):
+ # type: (bool, bool) -> None
+ """Remove paths in ``self.paths`` with confirmation (unless
+ ``auto_confirm`` is True)."""
+
+ if not self.paths:
+ logger.info(
+ "Can't uninstall '%s'. No files were found to uninstall.",
+ self.dist.project_name,
+ )
+ return
+
+ dist_name_version = (
+ self.dist.project_name + "-" + self.dist.version
+ )
+ logger.info('Uninstalling %s:', dist_name_version)
+
+ with indent_log():
+ if auto_confirm or self._allowed_to_proceed(verbose):
+ moved = self._moved_paths
+
+ for_rename = compress_for_rename(self.paths)
+
+ for path in sorted(compact(for_rename)):
+ moved.stash(path)
+ logger.debug('Removing file or directory %s', path)
+
+ for pth in self.pth.values():
+ pth.remove()
+
+ logger.info('Successfully uninstalled %s', dist_name_version)
+
+ def _allowed_to_proceed(self, verbose):
+ # type: (bool) -> bool
+ """Display which files would be deleted and prompt for confirmation
+ """
+
+ def _display(msg, paths):
+ # type: (str, Iterable[str]) -> None
+ if not paths:
+ return
+
+ logger.info(msg)
+ with indent_log():
+ for path in sorted(compact(paths)):
+ logger.info(path)
+
+ if not verbose:
+ will_remove, will_skip = compress_for_output_listing(self.paths)
+ else:
+ # In verbose mode, display all the files that are going to be
+ # deleted.
+ will_remove = set(self.paths)
+ will_skip = set()
+
+ _display('Would remove:', will_remove)
+ _display('Would not remove (might be manually added):', will_skip)
+ _display('Would not remove (outside of prefix):', self._refuse)
+ if verbose:
+ _display('Will actually move:', compress_for_rename(self.paths))
+
+ return ask('Proceed (y/n)? ', ('y', 'n')) == 'y'
+
+ def rollback(self):
+ # type: () -> None
+ """Rollback the changes previously made by remove()."""
+ if not self._moved_paths.can_rollback:
+ logger.error(
+ "Can't roll back %s; was not uninstalled",
+ self.dist.project_name,
+ )
+ return
+ logger.info('Rolling back uninstall of %s', self.dist.project_name)
+ self._moved_paths.rollback()
+ for pth in self.pth.values():
+ pth.rollback()
+
+ def commit(self):
+ # type: () -> None
+ """Remove temporary save dir: rollback will no longer be possible."""
+ self._moved_paths.commit()
+
+ @classmethod
+ def from_dist(cls, dist):
+ # type: (Distribution) -> UninstallPathSet
+ dist_path = normalize_path(dist.location)
+ if not dist_is_local(dist):
+ logger.info(
+ "Not uninstalling %s at %s, outside environment %s",
+ dist.key,
+ dist_path,
+ sys.prefix,
+ )
+ return cls(dist)
+
+ if dist_path in {p for p in {sysconfig.get_path("stdlib"),
+ sysconfig.get_path("platstdlib")}
+ if p}:
+ logger.info(
+ "Not uninstalling %s at %s, as it is in the standard library.",
+ dist.key,
+ dist_path,
+ )
+ return cls(dist)
+
+ paths_to_remove = cls(dist)
+ develop_egg_link = egg_link_path(dist)
+ develop_egg_link_egg_info = '{}.egg-info'.format(
+ pkg_resources.to_filename(dist.project_name))
+ egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
+ # Special case for distutils installed package
+ distutils_egg_info = getattr(dist._provider, 'path', None)
+
+ # Uninstall cases order do matter as in the case of 2 installs of the
+ # same package, pip needs to uninstall the currently detected version
+ if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
+ not dist.egg_info.endswith(develop_egg_link_egg_info)):
+ # if dist.egg_info.endswith(develop_egg_link_egg_info), we
+ # are in fact in the develop_egg_link case
+ paths_to_remove.add(dist.egg_info)
+ if dist.has_metadata('installed-files.txt'):
+ for installed_file in dist.get_metadata(
+ 'installed-files.txt').splitlines():
+ path = os.path.normpath(
+ os.path.join(dist.egg_info, installed_file)
+ )
+ paths_to_remove.add(path)
+ # FIXME: need a test for this elif block
+ # occurs with --single-version-externally-managed/--record outside
+ # of pip
+ elif dist.has_metadata('top_level.txt'):
+ if dist.has_metadata('namespace_packages.txt'):
+ namespaces = dist.get_metadata('namespace_packages.txt')
+ else:
+ namespaces = []
+ for top_level_pkg in [
+ p for p
+ in dist.get_metadata('top_level.txt').splitlines()
+ if p and p not in namespaces]:
+ path = os.path.join(dist.location, top_level_pkg)
+ paths_to_remove.add(path)
+ paths_to_remove.add(path + '.py')
+ paths_to_remove.add(path + '.pyc')
+ paths_to_remove.add(path + '.pyo')
+
+ elif distutils_egg_info:
+ raise UninstallationError(
+ "Cannot uninstall {!r}. It is a distutils installed project "
+ "and thus we cannot accurately determine which files belong "
+ "to it which would lead to only a partial uninstall.".format(
+ dist.project_name,
+ )
+ )
+
+ elif dist.location.endswith('.egg'):
+ # package installed by easy_install
+ # We cannot match on dist.egg_name because it can slightly vary
+ # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
+ paths_to_remove.add(dist.location)
+ easy_install_egg = os.path.split(dist.location)[1]
+ easy_install_pth = os.path.join(os.path.dirname(dist.location),
+ 'easy-install.pth')
+ paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
+
+ elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
+ for path in uninstallation_paths(dist):
+ paths_to_remove.add(path)
+
+ elif develop_egg_link:
+ # develop egg
+ with open(develop_egg_link, 'r') as fh:
+ link_pointer = os.path.normcase(fh.readline().strip())
+ assert (link_pointer == dist.location), (
+ 'Egg-link %s does not match installed location of %s '
+ '(at %s)' % (link_pointer, dist.project_name, dist.location)
+ )
+ paths_to_remove.add(develop_egg_link)
+ easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
+ 'easy-install.pth')
+ paths_to_remove.add_pth(easy_install_pth, dist.location)
+
+ else:
+ logger.debug(
+ 'Not sure how to uninstall: %s - Check: %s',
+ dist, dist.location,
+ )
+
+ # find distutils scripts= scripts
+ if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
+ for script in dist.metadata_listdir('scripts'):
+ if dist_in_usersite(dist):
+ bin_dir = bin_user
+ else:
+ bin_dir = bin_py
+ paths_to_remove.add(os.path.join(bin_dir, script))
+ if WINDOWS:
+ paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
+
+ # find console_scripts
+ _scripts_to_remove = []
+ console_scripts = dist.get_entry_map(group='console_scripts')
+ for name in console_scripts.keys():
+ _scripts_to_remove.extend(_script_names(dist, name, False))
+ # find gui_scripts
+ gui_scripts = dist.get_entry_map(group='gui_scripts')
+ for name in gui_scripts.keys():
+ _scripts_to_remove.extend(_script_names(dist, name, True))
+
+ for s in _scripts_to_remove:
+ paths_to_remove.add(s)
+
+ return paths_to_remove
+
+
+class UninstallPthEntries(object):
+ def __init__(self, pth_file):
+ # type: (str) -> None
+ if not os.path.isfile(pth_file):
+ raise UninstallationError(
+ "Cannot remove entries from nonexistent file %s" % pth_file
+ )
+ self.file = pth_file
+ self.entries = set() # type: Set[str]
+ self._saved_lines = None # type: Optional[List[bytes]]
+
+ def add(self, entry):
+ # type: (str) -> None
+ entry = os.path.normcase(entry)
+ # On Windows, os.path.normcase converts the entry to use
+ # backslashes. This is correct for entries that describe absolute
+ # paths outside of site-packages, but all the others use forward
+ # slashes.
+ # os.path.splitdrive is used instead of os.path.isabs because isabs
+ # treats non-absolute paths with drive letter markings like c:foo\bar
+ # as absolute paths. It also does not recognize UNC paths if they don't
+ # have more than "\\sever\share". Valid examples: "\\server\share\" or
+ # "\\server\share\folder". Python 2.7.8+ support UNC in splitdrive.
+ if WINDOWS and not os.path.splitdrive(entry)[0]:
+ entry = entry.replace('\\', '/')
+ self.entries.add(entry)
+
+ def remove(self):
+ # type: () -> None
+ logger.debug('Removing pth entries from %s:', self.file)
+ with open(self.file, 'rb') as fh:
+ # windows uses '\r\n' with py3k, but uses '\n' with py2.x
+ lines = fh.readlines()
+ self._saved_lines = lines
+ if any(b'\r\n' in line for line in lines):
+ endline = '\r\n'
+ else:
+ endline = '\n'
+ # handle missing trailing newline
+ if lines and not lines[-1].endswith(endline.encode("utf-8")):
+ lines[-1] = lines[-1] + endline.encode("utf-8")
+ for entry in self.entries:
+ try:
+ logger.debug('Removing entry: %s', entry)
+ lines.remove((entry + endline).encode("utf-8"))
+ except ValueError:
+ pass
+ with open(self.file, 'wb') as fh:
+ fh.writelines(lines)
+
+ def rollback(self):
+ # type: () -> bool
+ if self._saved_lines is None:
+ logger.error(
+ 'Cannot roll back changes to %s, none were made', self.file
+ )
+ return False
+ logger.debug('Rolling %s back to previous state', self.file)
+ with open(self.file, 'wb') as fh:
+ fh.writelines(self._saved_lines)
+ return True
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/self_outdated_check.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/self_outdated_check.py
new file mode 100644
index 0000000000000000000000000000000000000000..8fc3c594acf96eb8dee7e69c9d835e16cd45cec3
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/self_outdated_check.py
@@ -0,0 +1,242 @@
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import datetime
+import hashlib
+import json
+import logging
+import os.path
+import sys
+
+from pip._vendor import pkg_resources
+from pip._vendor.packaging import version as packaging_version
+from pip._vendor.six import ensure_binary
+
+from pip._internal.index.collector import LinkCollector
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.models.search_scope import SearchScope
+from pip._internal.models.selection_prefs import SelectionPreferences
+from pip._internal.utils.filesystem import (
+ adjacent_tmp_file,
+ check_path_owner,
+ replace,
+)
+from pip._internal.utils.misc import (
+ ensure_dir,
+ get_installed_version,
+ redact_auth_from_url,
+)
+from pip._internal.utils.packaging import get_installer
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ import optparse
+ from optparse import Values
+ from typing import Any, Dict, Text, Union
+
+ from pip._internal.network.session import PipSession
+
+
+SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
+
+
+logger = logging.getLogger(__name__)
+
+
+def make_link_collector(
+ session, # type: PipSession
+ options, # type: Values
+ suppress_no_index=False, # type: bool
+):
+ # type: (...) -> LinkCollector
+ """
+ :param session: The Session to use to make requests.
+ :param suppress_no_index: Whether to ignore the --no-index option
+ when constructing the SearchScope object.
+ """
+ index_urls = [options.index_url] + options.extra_index_urls
+ if options.no_index and not suppress_no_index:
+ logger.debug(
+ 'Ignoring indexes: %s',
+ ','.join(redact_auth_from_url(url) for url in index_urls),
+ )
+ index_urls = []
+
+ # Make sure find_links is a list before passing to create().
+ find_links = options.find_links or []
+
+ search_scope = SearchScope.create(
+ find_links=find_links, index_urls=index_urls,
+ )
+
+ link_collector = LinkCollector(session=session, search_scope=search_scope)
+
+ return link_collector
+
+
+def _get_statefile_name(key):
+ # type: (Union[str, Text]) -> str
+ key_bytes = ensure_binary(key)
+ name = hashlib.sha224(key_bytes).hexdigest()
+ return name
+
+
+class SelfCheckState(object):
+ def __init__(self, cache_dir):
+ # type: (str) -> None
+ self.state = {} # type: Dict[str, Any]
+ self.statefile_path = None
+
+ # Try to load the existing state
+ if cache_dir:
+ self.statefile_path = os.path.join(
+ cache_dir, "selfcheck", _get_statefile_name(self.key)
+ )
+ try:
+ with open(self.statefile_path) as statefile:
+ self.state = json.load(statefile)
+ except (IOError, ValueError, KeyError):
+ # Explicitly suppressing exceptions, since we don't want to
+ # error out if the cache file is invalid.
+ pass
+
+ @property
+ def key(self):
+ return sys.prefix
+
+ def save(self, pypi_version, current_time):
+ # type: (str, datetime.datetime) -> None
+ # If we do not have a path to cache in, don't bother saving.
+ if not self.statefile_path:
+ return
+
+ # Check to make sure that we own the directory
+ if not check_path_owner(os.path.dirname(self.statefile_path)):
+ return
+
+ # Now that we've ensured the directory is owned by this user, we'll go
+ # ahead and make sure that all our directories are created.
+ ensure_dir(os.path.dirname(self.statefile_path))
+
+ state = {
+ # Include the key so it's easy to tell which pip wrote the
+ # file.
+ "key": self.key,
+ "last_check": current_time.strftime(SELFCHECK_DATE_FMT),
+ "pypi_version": pypi_version,
+ }
+
+ text = json.dumps(state, sort_keys=True, separators=(",", ":"))
+
+ with adjacent_tmp_file(self.statefile_path) as f:
+ f.write(ensure_binary(text))
+
+ try:
+ # Since we have a prefix-specific state file, we can just
+ # overwrite whatever is there, no need to check.
+ replace(f.name, self.statefile_path)
+ except OSError:
+ # Best effort.
+ pass
+
+
+def was_installed_by_pip(pkg):
+ # type: (str) -> bool
+ """Checks whether pkg was installed by pip
+
+ This is used not to display the upgrade message when pip is in fact
+ installed by system package manager, such as dnf on Fedora.
+ """
+ try:
+ dist = pkg_resources.get_distribution(pkg)
+ return "pip" == get_installer(dist)
+ except pkg_resources.DistributionNotFound:
+ return False
+
+
+def pip_self_version_check(session, options):
+ # type: (PipSession, optparse.Values) -> None
+ """Check for an update for pip.
+
+ Limit the frequency of checks to once per week. State is stored either in
+ the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
+ of the pip script path.
+ """
+ installed_version = get_installed_version("pip")
+ if not installed_version:
+ return
+
+ pip_version = packaging_version.parse(installed_version)
+ pypi_version = None
+
+ try:
+ state = SelfCheckState(cache_dir=options.cache_dir)
+
+ current_time = datetime.datetime.utcnow()
+ # Determine if we need to refresh the state
+ if "last_check" in state.state and "pypi_version" in state.state:
+ last_check = datetime.datetime.strptime(
+ state.state["last_check"],
+ SELFCHECK_DATE_FMT
+ )
+ if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60:
+ pypi_version = state.state["pypi_version"]
+
+ # Refresh the version if we need to or just see if we need to warn
+ if pypi_version is None:
+ # Lets use PackageFinder to see what the latest pip version is
+ link_collector = make_link_collector(
+ session,
+ options=options,
+ suppress_no_index=True,
+ )
+
+ # Pass allow_yanked=False so we don't suggest upgrading to a
+ # yanked version.
+ selection_prefs = SelectionPreferences(
+ allow_yanked=False,
+ allow_all_prereleases=False, # Explicitly set to False
+ )
+
+ finder = PackageFinder.create(
+ link_collector=link_collector,
+ selection_prefs=selection_prefs,
+ )
+ best_candidate = finder.find_best_candidate("pip").best_candidate
+ if best_candidate is None:
+ return
+ pypi_version = str(best_candidate.version)
+
+ # save that we've performed a check
+ state.save(pypi_version, current_time)
+
+ remote_version = packaging_version.parse(pypi_version)
+
+ local_version_is_older = (
+ pip_version < remote_version and
+ pip_version.base_version != remote_version.base_version and
+ was_installed_by_pip('pip')
+ )
+
+ # Determine if our pypi_version is older
+ if not local_version_is_older:
+ return
+
+ # We cannot tell how the current pip is available in the current
+ # command context, so be pragmatic here and suggest the command
+ # that's always available. This does not accommodate spaces in
+ # `sys.executable`.
+ pip_cmd = "{} -m pip".format(sys.executable)
+ logger.warning(
+ "You are using pip version %s; however, version %s is "
+ "available.\nYou should consider upgrading via the "
+ "'%s install --upgrade pip' command.",
+ pip_version, pypi_version, pip_cmd
+ )
+ except Exception:
+ logger.debug(
+ "There was an error checking the latest version of pip",
+ exc_info=True,
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__init__.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b6fc3a2f4849b77e9d72efb3d979418a1ba216e1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d612cba4e19a20fc528682c0dd687f848d4ddbf4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/compat.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/compat.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..47ef88f6ab4402a392b835e2904bcb3a3c4a8ae3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/compat.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..967ae7d4c87ccd0c88d84efe0c6085aac3ae78a2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a9d36cff0f9f7b120b992240bc9f62b6b5f5fc7a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cba0e631a51c2edcec47418cda4423523f6e525a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..971334b4eef501ab14729afe289dfcb3369b1926
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7bda8a7898d05455a1124c8fd7cd050fc33ea6ba
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..378b529a6c84636d3f66b2a413a4c7d21bef7470
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5d3c780f7be3829daa0e5c67a1f73d57df426c66
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..304384c3a082f92d9b3334fee5d7e71542957475
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..74a7b0cb050a18d8e93946b16af6c5df5222703e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/logging.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/logging.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9034f0e1e5c1a53161fa4ff58abd2afc3db9b202
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/logging.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/marker_files.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/marker_files.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2ef897922fe47e3497a882da0add5ae51c0fcc17
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/marker_files.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/misc.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/misc.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..15e2a5b49591c8008e0d9b178dac847ab80d9e33
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/misc.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/models.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/models.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9713043f6beb76ad7574b6e8d4d0c7088a3ef91f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/models.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1c1308caa1203c7d0bdad7809078eb34a232f08d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/pkg_resources.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/pkg_resources.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..967a787686d502534ab2f4c395663c77f771bbec
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/pkg_resources.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..188bf4daaabd1480510f16d490cefa7d04746d28
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..54fa91c7f1881fb5825e8a84e5abbcc00fa77828
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..aedc0a37df4b2735f63ed4a03b1701288629bc7c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/typing.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/typing.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bc1cdd9036c65c37c81a7355b33b65de7ed6c1c2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/typing.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/ui.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/ui.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..650ff9fc3bd81485a6dd8117dbff8ba5a10f8d7f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/ui.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..701415d6421f56dad8e227a69dbff54a8efcf14d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/urls.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/urls.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..75987b233a8b891d6dcf7f3bd2fca2b51b583457
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/urls.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a46fea79154cbe452a18bce001503d95fe5901e2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..108941e2e4367272a7fe07bf29ed169fdb134c2c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/appdirs.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/appdirs.py
new file mode 100644
index 0000000000000000000000000000000000000000..93d17b5a81bdeb3077ba18834a47a37c8d7f4841
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/appdirs.py
@@ -0,0 +1,44 @@
+"""
+This code wraps the vendored appdirs module to so the return values are
+compatible for the current pip code base.
+
+The intention is to rewrite current usages gradually, keeping the tests pass,
+and eventually drop this after all usages are changed.
+"""
+
+from __future__ import absolute_import
+
+import os
+
+from pip._vendor import appdirs as _appdirs
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List
+
+
+def user_cache_dir(appname):
+ # type: (str) -> str
+ return _appdirs.user_cache_dir(appname, appauthor=False)
+
+
+def user_config_dir(appname, roaming=True):
+ # type: (str, bool) -> str
+ return _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming)
+
+
+def user_data_dir(appname, roaming=False):
+ # type: (str, bool) -> str
+ return _appdirs.user_data_dir(appname, appauthor=False, roaming=roaming)
+
+
+# for the discussion regarding site_config_dir locations
+# see <https://github.com/pypa/pip/issues/1733>
+def site_config_dirs(appname):
+ # type: (str) -> List[str]
+ dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True)
+ if _appdirs.system not in ["win32", "darwin"]:
+ # always look in /etc directly as well
+ return dirval.split(os.pathsep) + ['/etc']
+ return [dirval]
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/compat.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/compat.py
new file mode 100644
index 0000000000000000000000000000000000000000..6efa52ad2b8daece49acf69daa1196582220f4a3
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/compat.py
@@ -0,0 +1,269 @@
+"""Stuff that differs in different Python versions and platform
+distributions."""
+
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import, division
+
+import codecs
+import locale
+import logging
+import os
+import shutil
+import sys
+
+from pip._vendor.six import PY2, text_type
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Text, Tuple, Union
+
+try:
+ import ipaddress
+except ImportError:
+ try:
+ from pip._vendor import ipaddress # type: ignore
+ except ImportError:
+ import ipaddr as ipaddress # type: ignore
+ ipaddress.ip_address = ipaddress.IPAddress # type: ignore
+ ipaddress.ip_network = ipaddress.IPNetwork # type: ignore
+
+
+__all__ = [
+ "ipaddress", "uses_pycache", "console_to_str",
+ "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size",
+]
+
+
+logger = logging.getLogger(__name__)
+
+if PY2:
+ import imp
+
+ try:
+ cache_from_source = imp.cache_from_source # type: ignore
+ except AttributeError:
+ # does not use __pycache__
+ cache_from_source = None
+
+ uses_pycache = cache_from_source is not None
+else:
+ uses_pycache = True
+ from importlib.util import cache_from_source
+
+
+if PY2:
+ # In Python 2.7, backslashreplace exists
+ # but does not support use for decoding.
+ # We implement our own replace handler for this
+ # situation, so that we can consistently use
+ # backslash replacement for all versions.
+ def backslashreplace_decode_fn(err):
+ raw_bytes = (err.object[i] for i in range(err.start, err.end))
+ # Python 2 gave us characters - convert to numeric bytes
+ raw_bytes = (ord(b) for b in raw_bytes)
+ return u"".join(u"\\x%x" % c for c in raw_bytes), err.end
+ codecs.register_error(
+ "backslashreplace_decode",
+ backslashreplace_decode_fn,
+ )
+ backslashreplace_decode = "backslashreplace_decode"
+else:
+ backslashreplace_decode = "backslashreplace"
+
+
+def has_tls():
+ # type: () -> bool
+ try:
+ import _ssl # noqa: F401 # ignore unused
+ return True
+ except ImportError:
+ pass
+
+ from pip._vendor.urllib3.util import IS_PYOPENSSL
+ return IS_PYOPENSSL
+
+
+def str_to_display(data, desc=None):
+ # type: (Union[bytes, Text], Optional[str]) -> Text
+ """
+ For display or logging purposes, convert a bytes object (or text) to
+ text (e.g. unicode in Python 2) safe for output.
+
+ :param desc: An optional phrase describing the input data, for use in
+ the log message if a warning is logged. Defaults to "Bytes object".
+
+ This function should never error out and so can take a best effort
+ approach. It is okay to be lossy if needed since the return value is
+ just for display.
+
+ We assume the data is in the locale preferred encoding. If it won't
+ decode properly, we warn the user but decode as best we can.
+
+ We also ensure that the output can be safely written to standard output
+ without encoding errors.
+ """
+ if isinstance(data, text_type):
+ return data
+
+ # Otherwise, data is a bytes object (str in Python 2).
+ # First, get the encoding we assume. This is the preferred
+ # encoding for the locale, unless that is not found, or
+ # it is ASCII, in which case assume UTF-8
+ encoding = locale.getpreferredencoding()
+ if (not encoding) or codecs.lookup(encoding).name == "ascii":
+ encoding = "utf-8"
+
+ # Now try to decode the data - if we fail, warn the user and
+ # decode with replacement.
+ try:
+ decoded_data = data.decode(encoding)
+ except UnicodeDecodeError:
+ if desc is None:
+ desc = 'Bytes object'
+ msg_format = '{} does not appear to be encoded as %s'.format(desc)
+ logger.warning(msg_format, encoding)
+ decoded_data = data.decode(encoding, errors=backslashreplace_decode)
+
+ # Make sure we can print the output, by encoding it to the output
+ # encoding with replacement of unencodable characters, and then
+ # decoding again.
+ # We use stderr's encoding because it's less likely to be
+ # redirected and if we don't find an encoding we skip this
+ # step (on the assumption that output is wrapped by something
+ # that won't fail).
+ # The double getattr is to deal with the possibility that we're
+ # being called in a situation where sys.__stderr__ doesn't exist,
+ # or doesn't have an encoding attribute. Neither of these cases
+ # should occur in normal pip use, but there's no harm in checking
+ # in case people use pip in (unsupported) unusual situations.
+ output_encoding = getattr(getattr(sys, "__stderr__", None),
+ "encoding", None)
+
+ if output_encoding:
+ output_encoded = decoded_data.encode(
+ output_encoding,
+ errors="backslashreplace"
+ )
+ decoded_data = output_encoded.decode(output_encoding)
+
+ return decoded_data
+
+
+def console_to_str(data):
+ # type: (bytes) -> Text
+ """Return a string, safe for output, of subprocess output.
+ """
+ return str_to_display(data, desc='Subprocess output')
+
+
+def get_path_uid(path):
+ # type: (str) -> int
+ """
+ Return path's uid.
+
+ Does not follow symlinks:
+ https://github.com/pypa/pip/pull/935#discussion_r5307003
+
+ Placed this function in compat due to differences on AIX and
+ Jython, that should eventually go away.
+
+ :raises OSError: When path is a symlink or can't be read.
+ """
+ if hasattr(os, 'O_NOFOLLOW'):
+ fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
+ file_uid = os.fstat(fd).st_uid
+ os.close(fd)
+ else: # AIX and Jython
+ # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
+ if not os.path.islink(path):
+ # older versions of Jython don't have `os.fstat`
+ file_uid = os.stat(path).st_uid
+ else:
+ # raise OSError for parity with os.O_NOFOLLOW above
+ raise OSError(
+ "%s is a symlink; Will not return uid for symlinks" % path
+ )
+ return file_uid
+
+
+def expanduser(path):
+ # type: (str) -> str
+ """
+ Expand ~ and ~user constructions.
+
+ Includes a workaround for https://bugs.python.org/issue14768
+ """
+ expanded = os.path.expanduser(path)
+ if path.startswith('~/') and expanded.startswith('//'):
+ expanded = expanded[1:]
+ return expanded
+
+
+# packages in the stdlib that may have installation metadata, but should not be
+# considered 'installed'. this theoretically could be determined based on
+# dist.location (py27:`sysconfig.get_paths()['stdlib']`,
+# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
+# make this ineffective, so hard-coding
+stdlib_pkgs = {"python", "wsgiref", "argparse"}
+
+
+# windows detection, covers cpython and ironpython
+WINDOWS = (sys.platform.startswith("win") or
+ (sys.platform == 'cli' and os.name == 'nt'))
+
+
+def samefile(file1, file2):
+ # type: (str, str) -> bool
+ """Provide an alternative for os.path.samefile on Windows/Python2"""
+ if hasattr(os.path, 'samefile'):
+ return os.path.samefile(file1, file2)
+ else:
+ path1 = os.path.normcase(os.path.abspath(file1))
+ path2 = os.path.normcase(os.path.abspath(file2))
+ return path1 == path2
+
+
+if hasattr(shutil, 'get_terminal_size'):
+ def get_terminal_size():
+ # type: () -> Tuple[int, int]
+ """
+ Returns a tuple (x, y) representing the width(x) and the height(y)
+ in characters of the terminal window.
+ """
+ return tuple(shutil.get_terminal_size()) # type: ignore
+else:
+ def get_terminal_size():
+ # type: () -> Tuple[int, int]
+ """
+ Returns a tuple (x, y) representing the width(x) and the height(y)
+ in characters of the terminal window.
+ """
+ def ioctl_GWINSZ(fd):
+ try:
+ import fcntl
+ import termios
+ import struct
+ cr = struct.unpack_from(
+ 'hh',
+ fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678')
+ )
+ except Exception:
+ return None
+ if cr == (0, 0):
+ return None
+ return cr
+ cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
+ if not cr:
+ if sys.platform != "win32":
+ try:
+ fd = os.open(os.ctermid(), os.O_RDONLY)
+ cr = ioctl_GWINSZ(fd)
+ os.close(fd)
+ except Exception:
+ pass
+ if not cr:
+ cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
+ return int(cr[1]), int(cr[0])
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/deprecation.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/deprecation.py
new file mode 100644
index 0000000000000000000000000000000000000000..2f20cfd49d32f0bbab7b4719eb2dbdca971b751a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/deprecation.py
@@ -0,0 +1,104 @@
+"""
+A module that implements tooling to enable easy warnings about deprecations.
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import logging
+import warnings
+
+from pip._vendor.packaging.version import parse
+
+from pip import __version__ as current_version
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, Optional
+
+
+DEPRECATION_MSG_PREFIX = "DEPRECATION: "
+
+
+class PipDeprecationWarning(Warning):
+ pass
+
+
+_original_showwarning = None # type: Any
+
+
+# Warnings <-> Logging Integration
+def _showwarning(message, category, filename, lineno, file=None, line=None):
+ if file is not None:
+ if _original_showwarning is not None:
+ _original_showwarning(
+ message, category, filename, lineno, file, line,
+ )
+ elif issubclass(category, PipDeprecationWarning):
+ # We use a specially named logger which will handle all of the
+ # deprecation messages for pip.
+ logger = logging.getLogger("pip._internal.deprecations")
+ logger.warning(message)
+ else:
+ _original_showwarning(
+ message, category, filename, lineno, file, line,
+ )
+
+
+def install_warning_logger():
+ # type: () -> None
+ # Enable our Deprecation Warnings
+ warnings.simplefilter("default", PipDeprecationWarning, append=True)
+
+ global _original_showwarning
+
+ if _original_showwarning is None:
+ _original_showwarning = warnings.showwarning
+ warnings.showwarning = _showwarning
+
+
+def deprecated(reason, replacement, gone_in, issue=None):
+ # type: (str, Optional[str], Optional[str], Optional[int]) -> None
+ """Helper to deprecate existing functionality.
+
+ reason:
+ Textual reason shown to the user about why this functionality has
+ been deprecated.
+ replacement:
+ Textual suggestion shown to the user about what alternative
+ functionality they can use.
+ gone_in:
+ The version of pip does this functionality should get removed in.
+ Raises errors if pip's current version is greater than or equal to
+ this.
+ issue:
+ Issue number on the tracker that would serve as a useful place for
+ users to find related discussion and provide feedback.
+
+ Always pass replacement, gone_in and issue as keyword arguments for clarity
+ at the call site.
+ """
+
+ # Construct a nice message.
+ # This is eagerly formatted as we want it to get logged as if someone
+ # typed this entire message out.
+ sentences = [
+ (reason, DEPRECATION_MSG_PREFIX + "{}"),
+ (gone_in, "pip {} will remove support for this functionality."),
+ (replacement, "A possible replacement is {}."),
+ (issue, (
+ "You can find discussion regarding this at "
+ "https://github.com/pypa/pip/issues/{}."
+ )),
+ ]
+ message = " ".join(
+ template.format(val) for val, template in sentences if val is not None
+ )
+
+ # Raise as an error if it has to be removed.
+ if gone_in is not None and parse(current_version) >= parse(gone_in):
+ raise PipDeprecationWarning(message)
+
+ warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/distutils_args.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/distutils_args.py
new file mode 100644
index 0000000000000000000000000000000000000000..e38e402d7330778385f65a440b5b39f7bcbdedb3
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/distutils_args.py
@@ -0,0 +1,48 @@
+from distutils.errors import DistutilsArgError
+from distutils.fancy_getopt import FancyGetopt
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Dict, List
+
+
+_options = [
+ ("exec-prefix=", None, ""),
+ ("home=", None, ""),
+ ("install-base=", None, ""),
+ ("install-data=", None, ""),
+ ("install-headers=", None, ""),
+ ("install-lib=", None, ""),
+ ("install-platlib=", None, ""),
+ ("install-purelib=", None, ""),
+ ("install-scripts=", None, ""),
+ ("prefix=", None, ""),
+ ("root=", None, ""),
+ ("user", None, ""),
+]
+
+
+# typeshed doesn't permit Tuple[str, None, str], see python/typeshed#3469.
+_distutils_getopt = FancyGetopt(_options) # type: ignore
+
+
+def parse_distutils_args(args):
+ # type: (List[str]) -> Dict[str, str]
+ """Parse provided arguments, returning an object that has the
+ matched arguments.
+
+ Any unknown arguments are ignored.
+ """
+ result = {}
+ for arg in args:
+ try:
+ _, match = _distutils_getopt.getopt(args=[arg])
+ except DistutilsArgError:
+ # We don't care about any other options, which here may be
+ # considered unrecognized since our option list is not
+ # exhaustive.
+ pass
+ else:
+ result.update(match.__dict__)
+ return result
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/encoding.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/encoding.py
new file mode 100644
index 0000000000000000000000000000000000000000..ab4d4b98e3e1bca6f28db1ae114e48933a36be4e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/encoding.py
@@ -0,0 +1,42 @@
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+import codecs
+import locale
+import re
+import sys
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Tuple, Text
+
+BOMS = [
+ (codecs.BOM_UTF8, 'utf-8'),
+ (codecs.BOM_UTF16, 'utf-16'),
+ (codecs.BOM_UTF16_BE, 'utf-16-be'),
+ (codecs.BOM_UTF16_LE, 'utf-16-le'),
+ (codecs.BOM_UTF32, 'utf-32'),
+ (codecs.BOM_UTF32_BE, 'utf-32-be'),
+ (codecs.BOM_UTF32_LE, 'utf-32-le'),
+] # type: List[Tuple[bytes, Text]]
+
+ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)')
+
+
+def auto_decode(data):
+ # type: (bytes) -> Text
+ """Check a bytes string for a BOM to correctly detect the encoding
+
+ Fallback to locale.getpreferredencoding(False) like open() on Python3"""
+ for bom, encoding in BOMS:
+ if data.startswith(bom):
+ return data[len(bom):].decode(encoding)
+ # Lets check the first two lines as in PEP263
+ for line in data.split(b'\n')[:2]:
+ if line[0:1] == b'#' and ENCODING_RE.search(line):
+ encoding = ENCODING_RE.search(line).groups()[0].decode('ascii')
+ return data.decode(encoding)
+ return data.decode(
+ locale.getpreferredencoding(False) or sys.getdefaultencoding(),
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/entrypoints.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/entrypoints.py
new file mode 100644
index 0000000000000000000000000000000000000000..befd01c890184c74534bfefa1abd2376f234ac42
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/entrypoints.py
@@ -0,0 +1,31 @@
+import sys
+
+from pip._internal.cli.main import main
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, List
+
+
+def _wrapper(args=None):
+ # type: (Optional[List[str]]) -> int
+ """Central wrapper for all old entrypoints.
+
+ Historically pip has had several entrypoints defined. Because of issues
+ arising from PATH, sys.path, multiple Pythons, their interactions, and most
+ of them having a pip installed, users suffer every time an entrypoint gets
+ moved.
+
+ To alleviate this pain, and provide a mechanism for warning users and
+ directing them to an appropriate place for help, we now define all of
+ our old entrypoints as wrappers for the current one.
+ """
+ sys.stderr.write(
+ "WARNING: pip is being invoked by an old script wrapper. This will "
+ "fail in a future version of pip.\n"
+ "Please see https://github.com/pypa/pip/issues/5599 for advice on "
+ "fixing the underlying issue.\n"
+ "To avoid this problem you can invoke Python with '-m pip' instead of "
+ "running pip directly.\n"
+ )
+ return main(args)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/filesystem.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/filesystem.py
new file mode 100644
index 0000000000000000000000000000000000000000..6f1537e4032617d294b26db09db1d85af4ad0dc2
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/filesystem.py
@@ -0,0 +1,171 @@
+import errno
+import os
+import os.path
+import random
+import shutil
+import stat
+import sys
+from contextlib import contextmanager
+from tempfile import NamedTemporaryFile
+
+# NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is
+# why we ignore the type on this import.
+from pip._vendor.retrying import retry # type: ignore
+from pip._vendor.six import PY2
+
+from pip._internal.utils.compat import get_path_uid
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING, cast
+
+if MYPY_CHECK_RUNNING:
+ from typing import BinaryIO, Iterator
+
+ class NamedTemporaryFileResult(BinaryIO):
+ @property
+ def file(self):
+ # type: () -> BinaryIO
+ pass
+
+
+def check_path_owner(path):
+ # type: (str) -> bool
+ # If we don't have a way to check the effective uid of this process, then
+ # we'll just assume that we own the directory.
+ if sys.platform == "win32" or not hasattr(os, "geteuid"):
+ return True
+
+ assert os.path.isabs(path)
+
+ previous = None
+ while path != previous:
+ if os.path.lexists(path):
+ # Check if path is writable by current user.
+ if os.geteuid() == 0:
+ # Special handling for root user in order to handle properly
+ # cases where users use sudo without -H flag.
+ try:
+ path_uid = get_path_uid(path)
+ except OSError:
+ return False
+ return path_uid == 0
+ else:
+ return os.access(path, os.W_OK)
+ else:
+ previous, path = path, os.path.dirname(path)
+ return False # assume we don't own the path
+
+
+def copy2_fixed(src, dest):
+ # type: (str, str) -> None
+ """Wrap shutil.copy2() but map errors copying socket files to
+ SpecialFileError as expected.
+
+ See also https://bugs.python.org/issue37700.
+ """
+ try:
+ shutil.copy2(src, dest)
+ except (OSError, IOError):
+ for f in [src, dest]:
+ try:
+ is_socket_file = is_socket(f)
+ except OSError:
+ # An error has already occurred. Another error here is not
+ # a problem and we can ignore it.
+ pass
+ else:
+ if is_socket_file:
+ raise shutil.SpecialFileError("`%s` is a socket" % f)
+
+ raise
+
+
+def is_socket(path):
+ # type: (str) -> bool
+ return stat.S_ISSOCK(os.lstat(path).st_mode)
+
+
+@contextmanager
+def adjacent_tmp_file(path):
+ # type: (str) -> Iterator[NamedTemporaryFileResult]
+ """Given a path to a file, open a temp file next to it securely and ensure
+ it is written to disk after the context reaches its end.
+ """
+ with NamedTemporaryFile(
+ delete=False,
+ dir=os.path.dirname(path),
+ prefix=os.path.basename(path),
+ suffix='.tmp',
+ ) as f:
+ result = cast('NamedTemporaryFileResult', f)
+ try:
+ yield result
+ finally:
+ result.file.flush()
+ os.fsync(result.file.fileno())
+
+
+_replace_retry = retry(stop_max_delay=1000, wait_fixed=250)
+
+if PY2:
+ @_replace_retry
+ def replace(src, dest):
+ # type: (str, str) -> None
+ try:
+ os.rename(src, dest)
+ except OSError:
+ os.remove(dest)
+ os.rename(src, dest)
+
+else:
+ replace = _replace_retry(os.replace)
+
+
+# test_writable_dir and _test_writable_dir_win are copied from Flit,
+# with the author's agreement to also place them under pip's license.
+def test_writable_dir(path):
+ # type: (str) -> bool
+ """Check if a directory is writable.
+
+ Uses os.access() on POSIX, tries creating files on Windows.
+ """
+ # If the directory doesn't exist, find the closest parent that does.
+ while not os.path.isdir(path):
+ parent = os.path.dirname(path)
+ if parent == path:
+ break # Should never get here, but infinite loops are bad
+ path = parent
+
+ if os.name == 'posix':
+ return os.access(path, os.W_OK)
+
+ return _test_writable_dir_win(path)
+
+
+def _test_writable_dir_win(path):
+ # type: (str) -> bool
+ # os.access doesn't work on Windows: http://bugs.python.org/issue2528
+ # and we can't use tempfile: http://bugs.python.org/issue22107
+ basename = 'accesstest_deleteme_fishfingers_custard_'
+ alphabet = 'abcdefghijklmnopqrstuvwxyz0123456789'
+ for i in range(10):
+ name = basename + ''.join(random.choice(alphabet) for _ in range(6))
+ file = os.path.join(path, name)
+ try:
+ fd = os.open(file, os.O_RDWR | os.O_CREAT | os.O_EXCL)
+ except OSError as e:
+ if e.errno == errno.EEXIST:
+ continue
+ if e.errno == errno.EPERM:
+ # This could be because there's a directory with the same name.
+ # But it's highly unlikely there's a directory called that,
+ # so we'll assume it's because the parent dir is not writable.
+ return False
+ raise
+ else:
+ os.close(fd)
+ os.unlink(file)
+ return True
+
+ # This should never be reached
+ raise EnvironmentError(
+ 'Unexpected condition testing for writable directory'
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/filetypes.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/filetypes.py
new file mode 100644
index 0000000000000000000000000000000000000000..daa0ca771b77a32bf498d07803f5bffc34b1abf9
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/filetypes.py
@@ -0,0 +1,16 @@
+"""Filetype information.
+"""
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Tuple
+
+WHEEL_EXTENSION = '.whl'
+BZ2_EXTENSIONS = ('.tar.bz2', '.tbz') # type: Tuple[str, ...]
+XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz',
+ '.tar.lz', '.tar.lzma') # type: Tuple[str, ...]
+ZIP_EXTENSIONS = ('.zip', WHEEL_EXTENSION) # type: Tuple[str, ...]
+TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar') # type: Tuple[str, ...]
+ARCHIVE_EXTENSIONS = (
+ ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS
+)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/glibc.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/glibc.py
new file mode 100644
index 0000000000000000000000000000000000000000..361042441384693dbeeb9424c78dedf3bdbb8a3d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/glibc.py
@@ -0,0 +1,98 @@
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+from __future__ import absolute_import
+
+import os
+import sys
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Tuple
+
+
+def glibc_version_string():
+ # type: () -> Optional[str]
+ "Returns glibc version string, or None if not using glibc."
+ return glibc_version_string_confstr() or glibc_version_string_ctypes()
+
+
+def glibc_version_string_confstr():
+ # type: () -> Optional[str]
+ "Primary implementation of glibc_version_string using os.confstr."
+ # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
+ # to be broken or missing. This strategy is used in the standard library
+ # platform module:
+ # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183
+ if sys.platform == "win32":
+ return None
+ try:
+ # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17":
+ _, version = os.confstr("CS_GNU_LIBC_VERSION").split()
+ except (AttributeError, OSError, ValueError):
+ # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
+ return None
+ return version
+
+
+def glibc_version_string_ctypes():
+ # type: () -> Optional[str]
+ "Fallback implementation of glibc_version_string using ctypes."
+
+ try:
+ import ctypes
+ except ImportError:
+ return None
+
+ # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+ # manpage says, "If filename is NULL, then the returned handle is for the
+ # main program". This way we can let the linker do the work to figure out
+ # which libc our process is actually using.
+ process_namespace = ctypes.CDLL(None)
+ try:
+ gnu_get_libc_version = process_namespace.gnu_get_libc_version
+ except AttributeError:
+ # Symbol doesn't exist -> therefore, we are not linked to
+ # glibc.
+ return None
+
+ # Call gnu_get_libc_version, which returns a string like "2.5"
+ gnu_get_libc_version.restype = ctypes.c_char_p
+ version_str = gnu_get_libc_version()
+ # py2 / py3 compatibility:
+ if not isinstance(version_str, str):
+ version_str = version_str.decode("ascii")
+
+ return version_str
+
+
+# platform.libc_ver regularly returns completely nonsensical glibc
+# versions. E.g. on my computer, platform says:
+#
+# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
+# ('glibc', '2.7')
+# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
+# ('glibc', '2.9')
+#
+# But the truth is:
+#
+# ~$ ldd --version
+# ldd (Debian GLIBC 2.22-11) 2.22
+#
+# This is unfortunate, because it means that the linehaul data on libc
+# versions that was generated by pip 8.1.2 and earlier is useless and
+# misleading. Solution: instead of using platform, use our code that actually
+# works.
+def libc_ver():
+ # type: () -> Tuple[str, str]
+ """Try to determine the glibc version
+
+ Returns a tuple of strings (lib, version) which default to empty strings
+ in case the lookup fails.
+ """
+ glibc_version = glibc_version_string()
+ if glibc_version is None:
+ return ("", "")
+ else:
+ return ("glibc", glibc_version)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/hashes.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/hashes.py
new file mode 100644
index 0000000000000000000000000000000000000000..4c41551a25597aa646d480c7a896ab9f151fff96
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/hashes.py
@@ -0,0 +1,131 @@
+from __future__ import absolute_import
+
+import hashlib
+
+from pip._vendor.six import iteritems, iterkeys, itervalues
+
+from pip._internal.exceptions import (
+ HashMismatch,
+ HashMissing,
+ InstallationError,
+)
+from pip._internal.utils.misc import read_chunks
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Dict, List, BinaryIO, NoReturn, Iterator
+ )
+ from pip._vendor.six import PY3
+ if PY3:
+ from hashlib import _Hash
+ else:
+ from hashlib import _hash as _Hash
+
+
+# The recommended hash algo of the moment. Change this whenever the state of
+# the art changes; it won't hurt backward compatibility.
+FAVORITE_HASH = 'sha256'
+
+
+# Names of hashlib algorithms allowed by the --hash option and ``pip hash``
+# Currently, those are the ones at least as collision-resistant as sha256.
+STRONG_HASHES = ['sha256', 'sha384', 'sha512']
+
+
+class Hashes(object):
+ """A wrapper that builds multiple hashes at once and checks them against
+ known-good values
+
+ """
+ def __init__(self, hashes=None):
+ # type: (Dict[str, List[str]]) -> None
+ """
+ :param hashes: A dict of algorithm names pointing to lists of allowed
+ hex digests
+ """
+ self._allowed = {} if hashes is None else hashes
+
+ @property
+ def digest_count(self):
+ # type: () -> int
+ return sum(len(digests) for digests in self._allowed.values())
+
+ def is_hash_allowed(
+ self,
+ hash_name, # type: str
+ hex_digest, # type: str
+ ):
+ # type: (...) -> bool
+ """Return whether the given hex digest is allowed."""
+ return hex_digest in self._allowed.get(hash_name, [])
+
+ def check_against_chunks(self, chunks):
+ # type: (Iterator[bytes]) -> None
+ """Check good hashes against ones built from iterable of chunks of
+ data.
+
+ Raise HashMismatch if none match.
+
+ """
+ gots = {}
+ for hash_name in iterkeys(self._allowed):
+ try:
+ gots[hash_name] = hashlib.new(hash_name)
+ except (ValueError, TypeError):
+ raise InstallationError('Unknown hash name: %s' % hash_name)
+
+ for chunk in chunks:
+ for hash in itervalues(gots):
+ hash.update(chunk)
+
+ for hash_name, got in iteritems(gots):
+ if got.hexdigest() in self._allowed[hash_name]:
+ return
+ self._raise(gots)
+
+ def _raise(self, gots):
+ # type: (Dict[str, _Hash]) -> NoReturn
+ raise HashMismatch(self._allowed, gots)
+
+ def check_against_file(self, file):
+ # type: (BinaryIO) -> None
+ """Check good hashes against a file-like object
+
+ Raise HashMismatch if none match.
+
+ """
+ return self.check_against_chunks(read_chunks(file))
+
+ def check_against_path(self, path):
+ # type: (str) -> None
+ with open(path, 'rb') as file:
+ return self.check_against_file(file)
+
+ def __nonzero__(self):
+ # type: () -> bool
+ """Return whether I know any known-good hashes."""
+ return bool(self._allowed)
+
+ def __bool__(self):
+ # type: () -> bool
+ return self.__nonzero__()
+
+
+class MissingHashes(Hashes):
+ """A workalike for Hashes used when we're missing a hash for a requirement
+
+ It computes the actual hash of the requirement and raises a HashMissing
+ exception showing it to the user.
+
+ """
+ def __init__(self):
+ # type: () -> None
+ """Don't offer the ``hashes`` kwarg."""
+ # Pass our favorite hash in to generate a "gotten hash". With the
+ # empty list, it will never match, so an error will always raise.
+ super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []})
+
+ def _raise(self, gots):
+ # type: (Dict[str, _Hash]) -> NoReturn
+ raise HashMissing(gots[FAVORITE_HASH].hexdigest())
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/inject_securetransport.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/inject_securetransport.py
new file mode 100644
index 0000000000000000000000000000000000000000..5b93b1d6730518ec49afe78bdfbe74407825d8ee
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/inject_securetransport.py
@@ -0,0 +1,36 @@
+"""A helper module that injects SecureTransport, on import.
+
+The import should be done as early as possible, to ensure all requests and
+sessions (or whatever) are created after injecting SecureTransport.
+
+Note that we only do the injection on macOS, when the linked OpenSSL is too
+old to handle TLSv1.2.
+"""
+
+import sys
+
+
+def inject_securetransport():
+ # type: () -> None
+ # Only relevant on macOS
+ if sys.platform != "darwin":
+ return
+
+ try:
+ import ssl
+ except ImportError:
+ return
+
+ # Checks for OpenSSL 1.0.1
+ if ssl.OPENSSL_VERSION_NUMBER >= 0x1000100f:
+ return
+
+ try:
+ from pip._vendor.urllib3.contrib import securetransport
+ except (ImportError, OSError):
+ return
+
+ securetransport.inject_into_urllib3()
+
+
+inject_securetransport()
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/logging.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/logging.py
new file mode 100644
index 0000000000000000000000000000000000000000..7767111a6ba90278807dac5efd7a3ab59cc92fe1
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/logging.py
@@ -0,0 +1,398 @@
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import contextlib
+import errno
+import logging
+import logging.handlers
+import os
+import sys
+from logging import Filter, getLogger
+
+from pip._vendor.six import PY2
+
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX
+from pip._internal.utils.misc import ensure_dir
+
+try:
+ import threading
+except ImportError:
+ import dummy_threading as threading # type: ignore
+
+
+try:
+ # Use "import as" and set colorama in the else clause to avoid mypy
+ # errors and get the following correct revealed type for colorama:
+ # `Union[_importlib_modulespec.ModuleType, None]`
+ # Otherwise, we get an error like the following in the except block:
+ # > Incompatible types in assignment (expression has type "None",
+ # variable has type Module)
+ # TODO: eliminate the need to use "import as" once mypy addresses some
+ # of its issues with conditional imports. Here is an umbrella issue:
+ # https://github.com/python/mypy/issues/1297
+ from pip._vendor import colorama as _colorama
+# Lots of different errors can come from this, including SystemError and
+# ImportError.
+except Exception:
+ colorama = None
+else:
+ # Import Fore explicitly rather than accessing below as colorama.Fore
+ # to avoid the following error running mypy:
+ # > Module has no attribute "Fore"
+ # TODO: eliminate the need to import Fore once mypy addresses some of its
+ # issues with conditional imports. This particular case could be an
+ # instance of the following issue (but also see the umbrella issue above):
+ # https://github.com/python/mypy/issues/3500
+ from pip._vendor.colorama import Fore
+
+ colorama = _colorama
+
+
+_log_state = threading.local()
+_log_state.indentation = 0
+subprocess_logger = getLogger('pip.subprocessor')
+
+
+class BrokenStdoutLoggingError(Exception):
+ """
+ Raised if BrokenPipeError occurs for the stdout stream while logging.
+ """
+ pass
+
+
+# BrokenPipeError does not exist in Python 2 and, in addition, manifests
+# differently in Windows and non-Windows.
+if WINDOWS:
+ # In Windows, a broken pipe can show up as EINVAL rather than EPIPE:
+ # https://bugs.python.org/issue19612
+ # https://bugs.python.org/issue30418
+ if PY2:
+ def _is_broken_pipe_error(exc_class, exc):
+ """See the docstring for non-Windows Python 3 below."""
+ return (exc_class is IOError and
+ exc.errno in (errno.EINVAL, errno.EPIPE))
+ else:
+ # In Windows, a broken pipe IOError became OSError in Python 3.
+ def _is_broken_pipe_error(exc_class, exc):
+ """See the docstring for non-Windows Python 3 below."""
+ return ((exc_class is BrokenPipeError) or # noqa: F821
+ (exc_class is OSError and
+ exc.errno in (errno.EINVAL, errno.EPIPE)))
+elif PY2:
+ def _is_broken_pipe_error(exc_class, exc):
+ """See the docstring for non-Windows Python 3 below."""
+ return (exc_class is IOError and exc.errno == errno.EPIPE)
+else:
+ # Then we are in the non-Windows Python 3 case.
+ def _is_broken_pipe_error(exc_class, exc):
+ """
+ Return whether an exception is a broken pipe error.
+
+ Args:
+ exc_class: an exception class.
+ exc: an exception instance.
+ """
+ return (exc_class is BrokenPipeError) # noqa: F821
+
+
+@contextlib.contextmanager
+def indent_log(num=2):
+ """
+ A context manager which will cause the log output to be indented for any
+ log messages emitted inside it.
+ """
+ _log_state.indentation += num
+ try:
+ yield
+ finally:
+ _log_state.indentation -= num
+
+
+def get_indentation():
+ return getattr(_log_state, 'indentation', 0)
+
+
+class IndentingFormatter(logging.Formatter):
+
+ def __init__(self, *args, **kwargs):
+ """
+ A logging.Formatter that obeys the indent_log() context manager.
+
+ :param add_timestamp: A bool indicating output lines should be prefixed
+ with their record's timestamp.
+ """
+ self.add_timestamp = kwargs.pop("add_timestamp", False)
+ super(IndentingFormatter, self).__init__(*args, **kwargs)
+
+ def get_message_start(self, formatted, levelno):
+ """
+ Return the start of the formatted log message (not counting the
+ prefix to add to each line).
+ """
+ if levelno < logging.WARNING:
+ return ''
+ if formatted.startswith(DEPRECATION_MSG_PREFIX):
+ # Then the message already has a prefix. We don't want it to
+ # look like "WARNING: DEPRECATION: ...."
+ return ''
+ if levelno < logging.ERROR:
+ return 'WARNING: '
+
+ return 'ERROR: '
+
+ def format(self, record):
+ """
+ Calls the standard formatter, but will indent all of the log message
+ lines by our current indentation level.
+ """
+ formatted = super(IndentingFormatter, self).format(record)
+ message_start = self.get_message_start(formatted, record.levelno)
+ formatted = message_start + formatted
+
+ prefix = ''
+ if self.add_timestamp:
+ # TODO: Use Formatter.default_time_format after dropping PY2.
+ t = self.formatTime(record, "%Y-%m-%dT%H:%M:%S")
+ prefix = '%s,%03d ' % (t, record.msecs)
+ prefix += " " * get_indentation()
+ formatted = "".join([
+ prefix + line
+ for line in formatted.splitlines(True)
+ ])
+ return formatted
+
+
+def _color_wrap(*colors):
+ def wrapped(inp):
+ return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
+ return wrapped
+
+
+class ColorizedStreamHandler(logging.StreamHandler):
+
+ # Don't build up a list of colors if we don't have colorama
+ if colorama:
+ COLORS = [
+ # This needs to be in order from highest logging level to lowest.
+ (logging.ERROR, _color_wrap(Fore.RED)),
+ (logging.WARNING, _color_wrap(Fore.YELLOW)),
+ ]
+ else:
+ COLORS = []
+
+ def __init__(self, stream=None, no_color=None):
+ logging.StreamHandler.__init__(self, stream)
+ self._no_color = no_color
+
+ if WINDOWS and colorama:
+ self.stream = colorama.AnsiToWin32(self.stream)
+
+ def _using_stdout(self):
+ """
+ Return whether the handler is using sys.stdout.
+ """
+ if WINDOWS and colorama:
+ # Then self.stream is an AnsiToWin32 object.
+ return self.stream.wrapped is sys.stdout
+
+ return self.stream is sys.stdout
+
+ def should_color(self):
+ # Don't colorize things if we do not have colorama or if told not to
+ if not colorama or self._no_color:
+ return False
+
+ real_stream = (
+ self.stream if not isinstance(self.stream, colorama.AnsiToWin32)
+ else self.stream.wrapped
+ )
+
+ # If the stream is a tty we should color it
+ if hasattr(real_stream, "isatty") and real_stream.isatty():
+ return True
+
+ # If we have an ANSI term we should color it
+ if os.environ.get("TERM") == "ANSI":
+ return True
+
+ # If anything else we should not color it
+ return False
+
+ def format(self, record):
+ msg = logging.StreamHandler.format(self, record)
+
+ if self.should_color():
+ for level, color in self.COLORS:
+ if record.levelno >= level:
+ msg = color(msg)
+ break
+
+ return msg
+
+ # The logging module says handleError() can be customized.
+ def handleError(self, record):
+ exc_class, exc = sys.exc_info()[:2]
+ # If a broken pipe occurred while calling write() or flush() on the
+ # stdout stream in logging's Handler.emit(), then raise our special
+ # exception so we can handle it in main() instead of logging the
+ # broken pipe error and continuing.
+ if (exc_class and self._using_stdout() and
+ _is_broken_pipe_error(exc_class, exc)):
+ raise BrokenStdoutLoggingError()
+
+ return super(ColorizedStreamHandler, self).handleError(record)
+
+
+class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
+
+ def _open(self):
+ ensure_dir(os.path.dirname(self.baseFilename))
+ return logging.handlers.RotatingFileHandler._open(self)
+
+
+class MaxLevelFilter(Filter):
+
+ def __init__(self, level):
+ self.level = level
+
+ def filter(self, record):
+ return record.levelno < self.level
+
+
+class ExcludeLoggerFilter(Filter):
+
+ """
+ A logging Filter that excludes records from a logger (or its children).
+ """
+
+ def filter(self, record):
+ # The base Filter class allows only records from a logger (or its
+ # children).
+ return not super(ExcludeLoggerFilter, self).filter(record)
+
+
+def setup_logging(verbosity, no_color, user_log_file):
+ """Configures and sets up all of the logging
+
+ Returns the requested logging level, as its integer value.
+ """
+
+ # Determine the level to be logging at.
+ if verbosity >= 1:
+ level = "DEBUG"
+ elif verbosity == -1:
+ level = "WARNING"
+ elif verbosity == -2:
+ level = "ERROR"
+ elif verbosity <= -3:
+ level = "CRITICAL"
+ else:
+ level = "INFO"
+
+ level_number = getattr(logging, level)
+
+ # The "root" logger should match the "console" level *unless* we also need
+ # to log to a user log file.
+ include_user_log = user_log_file is not None
+ if include_user_log:
+ additional_log_file = user_log_file
+ root_level = "DEBUG"
+ else:
+ additional_log_file = "/dev/null"
+ root_level = level
+
+ # Disable any logging besides WARNING unless we have DEBUG level logging
+ # enabled for vendored libraries.
+ vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
+
+ # Shorthands for clarity
+ log_streams = {
+ "stdout": "ext://sys.stdout",
+ "stderr": "ext://sys.stderr",
+ }
+ handler_classes = {
+ "stream": "pip._internal.utils.logging.ColorizedStreamHandler",
+ "file": "pip._internal.utils.logging.BetterRotatingFileHandler",
+ }
+ handlers = ["console", "console_errors", "console_subprocess"] + (
+ ["user_log"] if include_user_log else []
+ )
+
+ logging.config.dictConfig({
+ "version": 1,
+ "disable_existing_loggers": False,
+ "filters": {
+ "exclude_warnings": {
+ "()": "pip._internal.utils.logging.MaxLevelFilter",
+ "level": logging.WARNING,
+ },
+ "restrict_to_subprocess": {
+ "()": "logging.Filter",
+ "name": subprocess_logger.name,
+ },
+ "exclude_subprocess": {
+ "()": "pip._internal.utils.logging.ExcludeLoggerFilter",
+ "name": subprocess_logger.name,
+ },
+ },
+ "formatters": {
+ "indent": {
+ "()": IndentingFormatter,
+ "format": "%(message)s",
+ },
+ "indent_with_timestamp": {
+ "()": IndentingFormatter,
+ "format": "%(message)s",
+ "add_timestamp": True,
+ },
+ },
+ "handlers": {
+ "console": {
+ "level": level,
+ "class": handler_classes["stream"],
+ "no_color": no_color,
+ "stream": log_streams["stdout"],
+ "filters": ["exclude_subprocess", "exclude_warnings"],
+ "formatter": "indent",
+ },
+ "console_errors": {
+ "level": "WARNING",
+ "class": handler_classes["stream"],
+ "no_color": no_color,
+ "stream": log_streams["stderr"],
+ "filters": ["exclude_subprocess"],
+ "formatter": "indent",
+ },
+ # A handler responsible for logging to the console messages
+ # from the "subprocessor" logger.
+ "console_subprocess": {
+ "level": level,
+ "class": handler_classes["stream"],
+ "no_color": no_color,
+ "stream": log_streams["stderr"],
+ "filters": ["restrict_to_subprocess"],
+ "formatter": "indent",
+ },
+ "user_log": {
+ "level": "DEBUG",
+ "class": handler_classes["file"],
+ "filename": additional_log_file,
+ "delay": True,
+ "formatter": "indent_with_timestamp",
+ },
+ },
+ "root": {
+ "level": root_level,
+ "handlers": handlers,
+ },
+ "loggers": {
+ "pip._vendor": {
+ "level": vendored_log_level
+ }
+ },
+ })
+
+ return level_number
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/marker_files.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/marker_files.py
new file mode 100644
index 0000000000000000000000000000000000000000..42ea81405085a0000c587ad563fee30c7f37a026
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/marker_files.py
@@ -0,0 +1,25 @@
+import os.path
+
+DELETE_MARKER_MESSAGE = '''\
+This file is placed here by pip to indicate the source was put
+here by pip.
+
+Once this package is successfully installed this source code will be
+deleted (unless you remove this file).
+'''
+PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
+
+
+def has_delete_marker_file(directory):
+ # type: (str) -> bool
+ return os.path.exists(os.path.join(directory, PIP_DELETE_MARKER_FILENAME))
+
+
+def write_delete_marker_file(directory):
+ # type: (str) -> None
+ """
+ Write the pip delete marker file into this directory.
+ """
+ filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
+ with open(filepath, 'w') as marker_fp:
+ marker_fp.write(DELETE_MARKER_MESSAGE)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/misc.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/misc.py
new file mode 100644
index 0000000000000000000000000000000000000000..554af0bf7b9b8c03de1b2cd3f3eda09a31c60a41
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/misc.py
@@ -0,0 +1,904 @@
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import contextlib
+import errno
+import getpass
+import hashlib
+import io
+import logging
+import os
+import posixpath
+import shutil
+import stat
+import sys
+from collections import deque
+
+from pip._vendor import pkg_resources
+# NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is
+# why we ignore the type on this import.
+from pip._vendor.retrying import retry # type: ignore
+from pip._vendor.six import PY2, text_type
+from pip._vendor.six.moves import input
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+from pip._vendor.six.moves.urllib.parse import unquote as urllib_unquote
+
+from pip import __version__
+from pip._internal.exceptions import CommandError
+from pip._internal.locations import (
+ get_major_minor_version,
+ site_packages,
+ user_site,
+)
+from pip._internal.utils.compat import (
+ WINDOWS,
+ expanduser,
+ stdlib_pkgs,
+ str_to_display,
+)
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING, cast
+from pip._internal.utils.virtualenv import (
+ running_under_virtualenv,
+ virtualenv_no_global,
+)
+
+if PY2:
+ from io import BytesIO as StringIO
+else:
+ from io import StringIO
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, AnyStr, Container, Iterable, List, Optional, Text,
+ Tuple, Union,
+ )
+ from pip._vendor.pkg_resources import Distribution
+
+ VersionInfo = Tuple[int, int, int]
+
+
+__all__ = ['rmtree', 'display_path', 'backup_dir',
+ 'ask', 'splitext',
+ 'format_size', 'is_installable_dir',
+ 'normalize_path',
+ 'renames', 'get_prog',
+ 'captured_stdout', 'ensure_dir',
+ 'get_installed_version', 'remove_auth_from_url']
+
+
+logger = logging.getLogger(__name__)
+
+
+def get_pip_version():
+ # type: () -> str
+ pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..")
+ pip_pkg_dir = os.path.abspath(pip_pkg_dir)
+
+ return (
+ 'pip {} from {} (python {})'.format(
+ __version__, pip_pkg_dir, get_major_minor_version(),
+ )
+ )
+
+
+def normalize_version_info(py_version_info):
+ # type: (Tuple[int, ...]) -> Tuple[int, int, int]
+ """
+ Convert a tuple of ints representing a Python version to one of length
+ three.
+
+ :param py_version_info: a tuple of ints representing a Python version,
+ or None to specify no version. The tuple can have any length.
+
+ :return: a tuple of length three if `py_version_info` is non-None.
+ Otherwise, return `py_version_info` unchanged (i.e. None).
+ """
+ if len(py_version_info) < 3:
+ py_version_info += (3 - len(py_version_info)) * (0,)
+ elif len(py_version_info) > 3:
+ py_version_info = py_version_info[:3]
+
+ return cast('VersionInfo', py_version_info)
+
+
+def ensure_dir(path):
+ # type: (AnyStr) -> None
+ """os.path.makedirs without EEXIST."""
+ try:
+ os.makedirs(path)
+ except OSError as e:
+ # Windows can raise spurious ENOTEMPTY errors. See #6426.
+ if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY:
+ raise
+
+
+def get_prog():
+ # type: () -> str
+ try:
+ prog = os.path.basename(sys.argv[0])
+ if prog in ('__main__.py', '-c'):
+ return "%s -m pip" % sys.executable
+ else:
+ return prog
+ except (AttributeError, TypeError, IndexError):
+ pass
+ return 'pip'
+
+
+# Retry every half second for up to 3 seconds
+@retry(stop_max_delay=3000, wait_fixed=500)
+def rmtree(dir, ignore_errors=False):
+ # type: (str, bool) -> None
+ shutil.rmtree(dir, ignore_errors=ignore_errors,
+ onerror=rmtree_errorhandler)
+
+
+def rmtree_errorhandler(func, path, exc_info):
+ """On Windows, the files in .svn are read-only, so when rmtree() tries to
+ remove them, an exception is thrown. We catch that here, remove the
+ read-only attribute, and hopefully continue without problems."""
+ try:
+ has_attr_readonly = not (os.stat(path).st_mode & stat.S_IWRITE)
+ except (IOError, OSError):
+ # it's equivalent to os.path.exists
+ return
+
+ if has_attr_readonly:
+ # convert to read/write
+ os.chmod(path, stat.S_IWRITE)
+ # use the original function to repeat the operation
+ func(path)
+ return
+ else:
+ raise
+
+
+def path_to_display(path):
+ # type: (Optional[Union[str, Text]]) -> Optional[Text]
+ """
+ Convert a bytes (or text) path to text (unicode in Python 2) for display
+ and logging purposes.
+
+ This function should never error out. Also, this function is mainly needed
+ for Python 2 since in Python 3 str paths are already text.
+ """
+ if path is None:
+ return None
+ if isinstance(path, text_type):
+ return path
+ # Otherwise, path is a bytes object (str in Python 2).
+ try:
+ display_path = path.decode(sys.getfilesystemencoding(), 'strict')
+ except UnicodeDecodeError:
+ # Include the full bytes to make troubleshooting easier, even though
+ # it may not be very human readable.
+ if PY2:
+ # Convert the bytes to a readable str representation using
+ # repr(), and then convert the str to unicode.
+ # Also, we add the prefix "b" to the repr() return value both
+ # to make the Python 2 output look like the Python 3 output, and
+ # to signal to the user that this is a bytes representation.
+ display_path = str_to_display('b{!r}'.format(path))
+ else:
+ # Silence the "F821 undefined name 'ascii'" flake8 error since
+ # in Python 3 ascii() is a built-in.
+ display_path = ascii(path) # noqa: F821
+
+ return display_path
+
+
+def display_path(path):
+ # type: (Union[str, Text]) -> str
+ """Gives the display value for a given path, making it relative to cwd
+ if possible."""
+ path = os.path.normcase(os.path.abspath(path))
+ if sys.version_info[0] == 2:
+ path = path.decode(sys.getfilesystemencoding(), 'replace')
+ path = path.encode(sys.getdefaultencoding(), 'replace')
+ if path.startswith(os.getcwd() + os.path.sep):
+ path = '.' + path[len(os.getcwd()):]
+ return path
+
+
+def backup_dir(dir, ext='.bak'):
+ # type: (str, str) -> str
+ """Figure out the name of a directory to back up the given dir to
+ (adding .bak, .bak2, etc)"""
+ n = 1
+ extension = ext
+ while os.path.exists(dir + extension):
+ n += 1
+ extension = ext + str(n)
+ return dir + extension
+
+
+def ask_path_exists(message, options):
+ # type: (str, Iterable[str]) -> str
+ for action in os.environ.get('PIP_EXISTS_ACTION', '').split():
+ if action in options:
+ return action
+ return ask(message, options)
+
+
+def _check_no_input(message):
+ # type: (str) -> None
+ """Raise an error if no input is allowed."""
+ if os.environ.get('PIP_NO_INPUT'):
+ raise Exception(
+ 'No input was expected ($PIP_NO_INPUT set); question: %s' %
+ message
+ )
+
+
+def ask(message, options):
+ # type: (str, Iterable[str]) -> str
+ """Ask the message interactively, with the given possible responses"""
+ while 1:
+ _check_no_input(message)
+ response = input(message)
+ response = response.strip().lower()
+ if response not in options:
+ print(
+ 'Your response (%r) was not one of the expected responses: '
+ '%s' % (response, ', '.join(options))
+ )
+ else:
+ return response
+
+
+def ask_input(message):
+ # type: (str) -> str
+ """Ask for input interactively."""
+ _check_no_input(message)
+ return input(message)
+
+
+def ask_password(message):
+ # type: (str) -> str
+ """Ask for a password interactively."""
+ _check_no_input(message)
+ return getpass.getpass(message)
+
+
+def format_size(bytes):
+ # type: (float) -> str
+ if bytes > 1000 * 1000:
+ return '%.1f MB' % (bytes / 1000.0 / 1000)
+ elif bytes > 10 * 1000:
+ return '%i kB' % (bytes / 1000)
+ elif bytes > 1000:
+ return '%.1f kB' % (bytes / 1000.0)
+ else:
+ return '%i bytes' % bytes
+
+
+def is_installable_dir(path):
+ # type: (str) -> bool
+ """Is path is a directory containing setup.py or pyproject.toml?
+ """
+ if not os.path.isdir(path):
+ return False
+ setup_py = os.path.join(path, 'setup.py')
+ if os.path.isfile(setup_py):
+ return True
+ pyproject_toml = os.path.join(path, 'pyproject.toml')
+ if os.path.isfile(pyproject_toml):
+ return True
+ return False
+
+
+def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE):
+ """Yield pieces of data from a file-like object until EOF."""
+ while True:
+ chunk = file.read(size)
+ if not chunk:
+ break
+ yield chunk
+
+
+def normalize_path(path, resolve_symlinks=True):
+ # type: (str, bool) -> str
+ """
+ Convert a path to its canonical, case-normalized, absolute version.
+
+ """
+ path = expanduser(path)
+ if resolve_symlinks:
+ path = os.path.realpath(path)
+ else:
+ path = os.path.abspath(path)
+ return os.path.normcase(path)
+
+
+def splitext(path):
+ # type: (str) -> Tuple[str, str]
+ """Like os.path.splitext, but take off .tar too"""
+ base, ext = posixpath.splitext(path)
+ if base.lower().endswith('.tar'):
+ ext = base[-4:] + ext
+ base = base[:-4]
+ return base, ext
+
+
+def renames(old, new):
+ # type: (str, str) -> None
+ """Like os.renames(), but handles renaming across devices."""
+ # Implementation borrowed from os.renames().
+ head, tail = os.path.split(new)
+ if head and tail and not os.path.exists(head):
+ os.makedirs(head)
+
+ shutil.move(old, new)
+
+ head, tail = os.path.split(old)
+ if head and tail:
+ try:
+ os.removedirs(head)
+ except OSError:
+ pass
+
+
+def is_local(path):
+ # type: (str) -> bool
+ """
+ Return True if this is a path pip is allowed to modify.
+
+ If we're in a virtualenv, sys.prefix points to the virtualenv's
+ prefix; only sys.prefix is considered local.
+
+ If we're not in a virtualenv, in general we can modify anything.
+ However, if the OS vendor has configured distutils to install
+ somewhere other than sys.prefix (which could be a subdirectory of
+ sys.prefix, e.g. /usr/local), we consider sys.prefix itself nonlocal
+ and the domain of the OS vendor. (In other words, everything _other
+ than_ sys.prefix is considered local.)
+
+ Caution: this function assumes the head of path has been normalized
+ with normalize_path.
+ """
+
+ path = normalize_path(path)
+ prefix = normalize_path(sys.prefix)
+
+ if running_under_virtualenv():
+ return path.startswith(normalize_path(sys.prefix))
+ else:
+ from pip._internal.locations import distutils_scheme
+ if path.startswith(prefix):
+ for local_path in distutils_scheme("").values():
+ if path.startswith(normalize_path(local_path)):
+ return True
+ return False
+ else:
+ return True
+
+
+def dist_is_local(dist):
+ # type: (Distribution) -> bool
+ """
+ Return True if given Distribution object is installed somewhere pip
+ is allowed to modify.
+
+ """
+ return is_local(dist_location(dist))
+
+
+def dist_in_usersite(dist):
+ # type: (Distribution) -> bool
+ """
+ Return True if given Distribution is installed in user site.
+ """
+ return dist_location(dist).startswith(normalize_path(user_site))
+
+
+def dist_in_site_packages(dist):
+ # type: (Distribution) -> bool
+ """
+ Return True if given Distribution is installed in
+ sysconfig.get_python_lib().
+ """
+ return dist_location(dist).startswith(normalize_path(site_packages))
+
+
+def dist_is_editable(dist):
+ # type: (Distribution) -> bool
+ """
+ Return True if given Distribution is an editable install.
+ """
+ for path_item in sys.path:
+ egg_link = os.path.join(path_item, dist.project_name + '.egg-link')
+ if os.path.isfile(egg_link):
+ return True
+ return False
+
+
+def get_installed_distributions(
+ local_only=True, # type: bool
+ skip=stdlib_pkgs, # type: Container[str]
+ include_editables=True, # type: bool
+ editables_only=False, # type: bool
+ user_only=False, # type: bool
+ paths=None # type: Optional[List[str]]
+):
+ # type: (...) -> List[Distribution]
+ """
+ Return a list of installed Distribution objects.
+
+ If ``local_only`` is True (default), only return installations
+ local to the current virtualenv, if in a virtualenv.
+
+ ``skip`` argument is an iterable of lower-case project names to
+ ignore; defaults to stdlib_pkgs
+
+ If ``include_editables`` is False, don't report editables.
+
+ If ``editables_only`` is True , only report editables.
+
+ If ``user_only`` is True , only report installations in the user
+ site directory.
+
+ If ``paths`` is set, only report the distributions present at the
+ specified list of locations.
+ """
+ if paths:
+ working_set = pkg_resources.WorkingSet(paths)
+ else:
+ working_set = pkg_resources.working_set
+
+ if local_only:
+ local_test = dist_is_local
+ else:
+ def local_test(d):
+ return True
+
+ if include_editables:
+ def editable_test(d):
+ return True
+ else:
+ def editable_test(d):
+ return not dist_is_editable(d)
+
+ if editables_only:
+ def editables_only_test(d):
+ return dist_is_editable(d)
+ else:
+ def editables_only_test(d):
+ return True
+
+ if user_only:
+ user_test = dist_in_usersite
+ else:
+ def user_test(d):
+ return True
+
+ return [d for d in working_set
+ if local_test(d) and
+ d.key not in skip and
+ editable_test(d) and
+ editables_only_test(d) and
+ user_test(d)
+ ]
+
+
+def egg_link_path(dist):
+ # type: (Distribution) -> Optional[str]
+ """
+ Return the path for the .egg-link file if it exists, otherwise, None.
+
+ There's 3 scenarios:
+ 1) not in a virtualenv
+ try to find in site.USER_SITE, then site_packages
+ 2) in a no-global virtualenv
+ try to find in site_packages
+ 3) in a yes-global virtualenv
+ try to find in site_packages, then site.USER_SITE
+ (don't look in global location)
+
+ For #1 and #3, there could be odd cases, where there's an egg-link in 2
+ locations.
+
+ This method will just return the first one found.
+ """
+ sites = []
+ if running_under_virtualenv():
+ sites.append(site_packages)
+ if not virtualenv_no_global() and user_site:
+ sites.append(user_site)
+ else:
+ if user_site:
+ sites.append(user_site)
+ sites.append(site_packages)
+
+ for site in sites:
+ egglink = os.path.join(site, dist.project_name) + '.egg-link'
+ if os.path.isfile(egglink):
+ return egglink
+ return None
+
+
+def dist_location(dist):
+ # type: (Distribution) -> str
+ """
+ Get the site-packages location of this distribution. Generally
+ this is dist.location, except in the case of develop-installed
+ packages, where dist.location is the source code location, and we
+ want to know where the egg-link file is.
+
+ The returned location is normalized (in particular, with symlinks removed).
+ """
+ egg_link = egg_link_path(dist)
+ if egg_link:
+ return normalize_path(egg_link)
+ return normalize_path(dist.location)
+
+
+def write_output(msg, *args):
+ # type: (str, str) -> None
+ logger.info(msg, *args)
+
+
+class FakeFile(object):
+ """Wrap a list of lines in an object with readline() to make
+ ConfigParser happy."""
+ def __init__(self, lines):
+ self._gen = (l for l in lines)
+
+ def readline(self):
+ try:
+ try:
+ return next(self._gen)
+ except NameError:
+ return self._gen.next()
+ except StopIteration:
+ return ''
+
+ def __iter__(self):
+ return self._gen
+
+
+class StreamWrapper(StringIO):
+
+ @classmethod
+ def from_stream(cls, orig_stream):
+ cls.orig_stream = orig_stream
+ return cls()
+
+ # compileall.compile_dir() needs stdout.encoding to print to stdout
+ @property
+ def encoding(self):
+ return self.orig_stream.encoding
+
+
+@contextlib.contextmanager
+def captured_output(stream_name):
+ """Return a context manager used by captured_stdout/stdin/stderr
+ that temporarily replaces the sys stream *stream_name* with a StringIO.
+
+ Taken from Lib/support/__init__.py in the CPython repo.
+ """
+ orig_stdout = getattr(sys, stream_name)
+ setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout))
+ try:
+ yield getattr(sys, stream_name)
+ finally:
+ setattr(sys, stream_name, orig_stdout)
+
+
+def captured_stdout():
+ """Capture the output of sys.stdout:
+
+ with captured_stdout() as stdout:
+ print('hello')
+ self.assertEqual(stdout.getvalue(), 'hello\n')
+
+ Taken from Lib/support/__init__.py in the CPython repo.
+ """
+ return captured_output('stdout')
+
+
+def captured_stderr():
+ """
+ See captured_stdout().
+ """
+ return captured_output('stderr')
+
+
+class cached_property(object):
+ """A property that is only computed once per instance and then replaces
+ itself with an ordinary attribute. Deleting the attribute resets the
+ property.
+
+ Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175
+ """
+
+ def __init__(self, func):
+ self.__doc__ = getattr(func, '__doc__')
+ self.func = func
+
+ def __get__(self, obj, cls):
+ if obj is None:
+ # We're being accessed from the class itself, not from an object
+ return self
+ value = obj.__dict__[self.func.__name__] = self.func(obj)
+ return value
+
+
+def get_installed_version(dist_name, working_set=None):
+ """Get the installed version of dist_name avoiding pkg_resources cache"""
+ # Create a requirement that we'll look for inside of setuptools.
+ req = pkg_resources.Requirement.parse(dist_name)
+
+ if working_set is None:
+ # We want to avoid having this cached, so we need to construct a new
+ # working set each time.
+ working_set = pkg_resources.WorkingSet()
+
+ # Get the installed distribution from our working set
+ dist = working_set.find(req)
+
+ # Check to see if we got an installed distribution or not, if we did
+ # we want to return it's version.
+ return dist.version if dist else None
+
+
+def consume(iterator):
+ """Consume an iterable at C speed."""
+ deque(iterator, maxlen=0)
+
+
+# Simulates an enum
+def enum(*sequential, **named):
+ enums = dict(zip(sequential, range(len(sequential))), **named)
+ reverse = {value: key for key, value in enums.items()}
+ enums['reverse_mapping'] = reverse
+ return type('Enum', (), enums)
+
+
+def build_netloc(host, port):
+ # type: (str, Optional[int]) -> str
+ """
+ Build a netloc from a host-port pair
+ """
+ if port is None:
+ return host
+ if ':' in host:
+ # Only wrap host with square brackets when it is IPv6
+ host = '[{}]'.format(host)
+ return '{}:{}'.format(host, port)
+
+
+def build_url_from_netloc(netloc, scheme='https'):
+ # type: (str, str) -> str
+ """
+ Build a full URL from a netloc.
+ """
+ if netloc.count(':') >= 2 and '@' not in netloc and '[' not in netloc:
+ # It must be a bare IPv6 address, so wrap it with brackets.
+ netloc = '[{}]'.format(netloc)
+ return '{}://{}'.format(scheme, netloc)
+
+
+def parse_netloc(netloc):
+ # type: (str) -> Tuple[str, Optional[int]]
+ """
+ Return the host-port pair from a netloc.
+ """
+ url = build_url_from_netloc(netloc)
+ parsed = urllib_parse.urlparse(url)
+ return parsed.hostname, parsed.port
+
+
+def split_auth_from_netloc(netloc):
+ """
+ Parse out and remove the auth information from a netloc.
+
+ Returns: (netloc, (username, password)).
+ """
+ if '@' not in netloc:
+ return netloc, (None, None)
+
+ # Split from the right because that's how urllib.parse.urlsplit()
+ # behaves if more than one @ is present (which can be checked using
+ # the password attribute of urlsplit()'s return value).
+ auth, netloc = netloc.rsplit('@', 1)
+ if ':' in auth:
+ # Split from the left because that's how urllib.parse.urlsplit()
+ # behaves if more than one : is present (which again can be checked
+ # using the password attribute of the return value)
+ user_pass = auth.split(':', 1)
+ else:
+ user_pass = auth, None
+
+ user_pass = tuple(
+ None if x is None else urllib_unquote(x) for x in user_pass
+ )
+
+ return netloc, user_pass
+
+
+def redact_netloc(netloc):
+ # type: (str) -> str
+ """
+ Replace the sensitive data in a netloc with "****", if it exists.
+
+ For example:
+ - "user:pass@example.com" returns "user:****@example.com"
+ - "accesstoken@example.com" returns "****@example.com"
+ """
+ netloc, (user, password) = split_auth_from_netloc(netloc)
+ if user is None:
+ return netloc
+ if password is None:
+ user = '****'
+ password = ''
+ else:
+ user = urllib_parse.quote(user)
+ password = ':****'
+ return '{user}{password}@{netloc}'.format(user=user,
+ password=password,
+ netloc=netloc)
+
+
+def _transform_url(url, transform_netloc):
+ """Transform and replace netloc in a url.
+
+ transform_netloc is a function taking the netloc and returning a
+ tuple. The first element of this tuple is the new netloc. The
+ entire tuple is returned.
+
+ Returns a tuple containing the transformed url as item 0 and the
+ original tuple returned by transform_netloc as item 1.
+ """
+ purl = urllib_parse.urlsplit(url)
+ netloc_tuple = transform_netloc(purl.netloc)
+ # stripped url
+ url_pieces = (
+ purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment
+ )
+ surl = urllib_parse.urlunsplit(url_pieces)
+ return surl, netloc_tuple
+
+
+def _get_netloc(netloc):
+ return split_auth_from_netloc(netloc)
+
+
+def _redact_netloc(netloc):
+ return (redact_netloc(netloc),)
+
+
+def split_auth_netloc_from_url(url):
+ # type: (str) -> Tuple[str, str, Tuple[str, str]]
+ """
+ Parse a url into separate netloc, auth, and url with no auth.
+
+ Returns: (url_without_auth, netloc, (username, password))
+ """
+ url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc)
+ return url_without_auth, netloc, auth
+
+
+def remove_auth_from_url(url):
+ # type: (str) -> str
+ """Return a copy of url with 'username:password@' removed."""
+ # username/pass params are passed to subversion through flags
+ # and are not recognized in the url.
+ return _transform_url(url, _get_netloc)[0]
+
+
+def redact_auth_from_url(url):
+ # type: (str) -> str
+ """Replace the password in a given url with ****."""
+ return _transform_url(url, _redact_netloc)[0]
+
+
+class HiddenText(object):
+ def __init__(
+ self,
+ secret, # type: str
+ redacted, # type: str
+ ):
+ # type: (...) -> None
+ self.secret = secret
+ self.redacted = redacted
+
+ def __repr__(self):
+ # type: (...) -> str
+ return '<HiddenText {!r}>'.format(str(self))
+
+ def __str__(self):
+ # type: (...) -> str
+ return self.redacted
+
+ # This is useful for testing.
+ def __eq__(self, other):
+ # type: (Any) -> bool
+ if type(self) != type(other):
+ return False
+
+ # The string being used for redaction doesn't also have to match,
+ # just the raw, original string.
+ return (self.secret == other.secret)
+
+ # We need to provide an explicit __ne__ implementation for Python 2.
+ # TODO: remove this when we drop PY2 support.
+ def __ne__(self, other):
+ # type: (Any) -> bool
+ return not self == other
+
+
+def hide_value(value):
+ # type: (str) -> HiddenText
+ return HiddenText(value, redacted='****')
+
+
+def hide_url(url):
+ # type: (str) -> HiddenText
+ redacted = redact_auth_from_url(url)
+ return HiddenText(url, redacted=redacted)
+
+
+def protect_pip_from_modification_on_windows(modifying_pip):
+ # type: (bool) -> None
+ """Protection of pip.exe from modification on Windows
+
+ On Windows, any operation modifying pip should be run as:
+ python -m pip ...
+ """
+ pip_names = [
+ "pip.exe",
+ "pip{}.exe".format(sys.version_info[0]),
+ "pip{}.{}.exe".format(*sys.version_info[:2])
+ ]
+
+ # See https://github.com/pypa/pip/issues/1299 for more discussion
+ should_show_use_python_msg = (
+ modifying_pip and
+ WINDOWS and
+ os.path.basename(sys.argv[0]) in pip_names
+ )
+
+ if should_show_use_python_msg:
+ new_command = [
+ sys.executable, "-m", "pip"
+ ] + sys.argv[1:]
+ raise CommandError(
+ 'To modify pip, please run the following command:\n{}'
+ .format(" ".join(new_command))
+ )
+
+
+def is_console_interactive():
+ # type: () -> bool
+ """Is this console interactive?
+ """
+ return sys.stdin is not None and sys.stdin.isatty()
+
+
+def hash_file(path, blocksize=1 << 20):
+ # type: (str, int) -> Tuple[Any, int]
+ """Return (hash, length) for path using hashlib.sha256()
+ """
+
+ h = hashlib.sha256()
+ length = 0
+ with open(path, 'rb') as f:
+ for block in read_chunks(f, size=blocksize):
+ length += len(block)
+ h.update(block)
+ return h, length
+
+
+def is_wheel_installed():
+ """
+ Return whether the wheel package is installed.
+ """
+ try:
+ import wheel # noqa: F401
+ except ImportError:
+ return False
+
+ return True
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/models.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/models.py
new file mode 100644
index 0000000000000000000000000000000000000000..29e1441153b63446220a5e1867e691183e0d22d7
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/models.py
@@ -0,0 +1,42 @@
+"""Utilities for defining models
+"""
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+import operator
+
+
+class KeyBasedCompareMixin(object):
+ """Provides comparison capabilities that is based on a key
+ """
+
+ def __init__(self, key, defining_class):
+ self._compare_key = key
+ self._defining_class = defining_class
+
+ def __hash__(self):
+ return hash(self._compare_key)
+
+ def __lt__(self, other):
+ return self._compare(other, operator.__lt__)
+
+ def __le__(self, other):
+ return self._compare(other, operator.__le__)
+
+ def __gt__(self, other):
+ return self._compare(other, operator.__gt__)
+
+ def __ge__(self, other):
+ return self._compare(other, operator.__ge__)
+
+ def __eq__(self, other):
+ return self._compare(other, operator.__eq__)
+
+ def __ne__(self, other):
+ return self._compare(other, operator.__ne__)
+
+ def _compare(self, other, method):
+ if not isinstance(other, self._defining_class):
+ return NotImplemented
+
+ return method(self._compare_key, other._compare_key)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/packaging.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/packaging.py
new file mode 100644
index 0000000000000000000000000000000000000000..68aa86edbf012c68ceadbe67e21e5d6c9ebbc0ab
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/packaging.py
@@ -0,0 +1,94 @@
+from __future__ import absolute_import
+
+import logging
+from email.parser import FeedParser
+
+from pip._vendor import pkg_resources
+from pip._vendor.packaging import specifiers, version
+
+from pip._internal.exceptions import NoneMetadataError
+from pip._internal.utils.misc import display_path
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Tuple
+ from email.message import Message
+ from pip._vendor.pkg_resources import Distribution
+
+
+logger = logging.getLogger(__name__)
+
+
+def check_requires_python(requires_python, version_info):
+ # type: (Optional[str], Tuple[int, ...]) -> bool
+ """
+ Check if the given Python version matches a "Requires-Python" specifier.
+
+ :param version_info: A 3-tuple of ints representing a Python
+ major-minor-micro version to check (e.g. `sys.version_info[:3]`).
+
+ :return: `True` if the given Python version satisfies the requirement.
+ Otherwise, return `False`.
+
+ :raises InvalidSpecifier: If `requires_python` has an invalid format.
+ """
+ if requires_python is None:
+ # The package provides no information
+ return True
+ requires_python_specifier = specifiers.SpecifierSet(requires_python)
+
+ python_version = version.parse('.'.join(map(str, version_info)))
+ return python_version in requires_python_specifier
+
+
+def get_metadata(dist):
+ # type: (Distribution) -> Message
+ """
+ :raises NoneMetadataError: if the distribution reports `has_metadata()`
+ True but `get_metadata()` returns None.
+ """
+ metadata_name = 'METADATA'
+ if (isinstance(dist, pkg_resources.DistInfoDistribution) and
+ dist.has_metadata(metadata_name)):
+ metadata = dist.get_metadata(metadata_name)
+ elif dist.has_metadata('PKG-INFO'):
+ metadata_name = 'PKG-INFO'
+ metadata = dist.get_metadata(metadata_name)
+ else:
+ logger.warning("No metadata found in %s", display_path(dist.location))
+ metadata = ''
+
+ if metadata is None:
+ raise NoneMetadataError(dist, metadata_name)
+
+ feed_parser = FeedParser()
+ # The following line errors out if with a "NoneType" TypeError if
+ # passed metadata=None.
+ feed_parser.feed(metadata)
+ return feed_parser.close()
+
+
+def get_requires_python(dist):
+ # type: (pkg_resources.Distribution) -> Optional[str]
+ """
+ Return the "Requires-Python" metadata for a distribution, or None
+ if not present.
+ """
+ pkg_info_dict = get_metadata(dist)
+ requires_python = pkg_info_dict.get('Requires-Python')
+
+ if requires_python is not None:
+ # Convert to a str to satisfy the type checker, since requires_python
+ # can be a Header object.
+ requires_python = str(requires_python)
+
+ return requires_python
+
+
+def get_installer(dist):
+ # type: (Distribution) -> str
+ if dist.has_metadata('INSTALLER'):
+ for line in dist.get_metadata_lines('INSTALLER'):
+ if line.strip():
+ return line.strip()
+ return ''
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/pkg_resources.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/pkg_resources.py
new file mode 100644
index 0000000000000000000000000000000000000000..0bc129acc6ab582eb087be7ee186c554dc5feba1
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/pkg_resources.py
@@ -0,0 +1,44 @@
+from pip._vendor.pkg_resources import yield_lines
+from pip._vendor.six import ensure_str
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Dict, Iterable, List
+
+
+class DictMetadata(object):
+ """IMetadataProvider that reads metadata files from a dictionary.
+ """
+ def __init__(self, metadata):
+ # type: (Dict[str, bytes]) -> None
+ self._metadata = metadata
+
+ def has_metadata(self, name):
+ # type: (str) -> bool
+ return name in self._metadata
+
+ def get_metadata(self, name):
+ # type: (str) -> str
+ try:
+ return ensure_str(self._metadata[name])
+ except UnicodeDecodeError as e:
+ # Mirrors handling done in pkg_resources.NullProvider.
+ e.reason += " in {} file".format(name)
+ raise
+
+ def get_metadata_lines(self, name):
+ # type: (str) -> Iterable[str]
+ return yield_lines(self.get_metadata(name))
+
+ def metadata_isdir(self, name):
+ # type: (str) -> bool
+ return False
+
+ def metadata_listdir(self, name):
+ # type: (str) -> List[str]
+ return []
+
+ def run_script(self, script_name, namespace):
+ # type: (str, str) -> None
+ pass
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/setuptools_build.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/setuptools_build.py
new file mode 100644
index 0000000000000000000000000000000000000000..4147a650dca185dcd4491b805d0bdb0775eff924
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/setuptools_build.py
@@ -0,0 +1,181 @@
+import sys
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Optional, Sequence
+
+# Shim to wrap setup.py invocation with setuptools
+#
+# We set sys.argv[0] to the path to the underlying setup.py file so
+# setuptools / distutils don't take the path to the setup.py to be "-c" when
+# invoking via the shim. This avoids e.g. the following manifest_maker
+# warning: "warning: manifest_maker: standard file '-c' not found".
+_SETUPTOOLS_SHIM = (
+ "import sys, setuptools, tokenize; sys.argv[0] = {0!r}; __file__={0!r};"
+ "f=getattr(tokenize, 'open', open)(__file__);"
+ "code=f.read().replace('\\r\\n', '\\n');"
+ "f.close();"
+ "exec(compile(code, __file__, 'exec'))"
+)
+
+
+def make_setuptools_shim_args(
+ setup_py_path, # type: str
+ global_options=None, # type: Sequence[str]
+ no_user_config=False, # type: bool
+ unbuffered_output=False # type: bool
+):
+ # type: (...) -> List[str]
+ """
+ Get setuptools command arguments with shim wrapped setup file invocation.
+
+ :param setup_py_path: The path to setup.py to be wrapped.
+ :param global_options: Additional global options.
+ :param no_user_config: If True, disables personal user configuration.
+ :param unbuffered_output: If True, adds the unbuffered switch to the
+ argument list.
+ """
+ args = [sys.executable]
+ if unbuffered_output:
+ args += ["-u"]
+ args += ["-c", _SETUPTOOLS_SHIM.format(setup_py_path)]
+ if global_options:
+ args += global_options
+ if no_user_config:
+ args += ["--no-user-cfg"]
+ return args
+
+
+def make_setuptools_bdist_wheel_args(
+ setup_py_path, # type: str
+ global_options, # type: Sequence[str]
+ build_options, # type: Sequence[str]
+ destination_dir, # type: str
+):
+ # type: (...) -> List[str]
+ # NOTE: Eventually, we'd want to also -S to the flags here, when we're
+ # isolating. Currently, it breaks Python in virtualenvs, because it
+ # relies on site.py to find parts of the standard library outside the
+ # virtualenv.
+ args = make_setuptools_shim_args(
+ setup_py_path,
+ global_options=global_options,
+ unbuffered_output=True
+ )
+ args += ["bdist_wheel", "-d", destination_dir]
+ args += build_options
+ return args
+
+
+def make_setuptools_clean_args(
+ setup_py_path, # type: str
+ global_options, # type: Sequence[str]
+):
+ # type: (...) -> List[str]
+ args = make_setuptools_shim_args(
+ setup_py_path,
+ global_options=global_options,
+ unbuffered_output=True
+ )
+ args += ["clean", "--all"]
+ return args
+
+
+def make_setuptools_develop_args(
+ setup_py_path, # type: str
+ global_options, # type: Sequence[str]
+ install_options, # type: Sequence[str]
+ no_user_config, # type: bool
+ prefix, # type: Optional[str]
+ home, # type: Optional[str]
+ use_user_site, # type: bool
+):
+ # type: (...) -> List[str]
+ assert not (use_user_site and prefix)
+
+ args = make_setuptools_shim_args(
+ setup_py_path,
+ global_options=global_options,
+ no_user_config=no_user_config,
+ )
+
+ args += ["develop", "--no-deps"]
+
+ args += install_options
+
+ if prefix:
+ args += ["--prefix", prefix]
+ if home is not None:
+ args += ["--home", home]
+
+ if use_user_site:
+ args += ["--user", "--prefix="]
+
+ return args
+
+
+def make_setuptools_egg_info_args(
+ setup_py_path, # type: str
+ egg_info_dir, # type: Optional[str]
+ no_user_config, # type: bool
+):
+ # type: (...) -> List[str]
+ args = make_setuptools_shim_args(setup_py_path)
+ if no_user_config:
+ args += ["--no-user-cfg"]
+
+ args += ["egg_info"]
+
+ if egg_info_dir:
+ args += ["--egg-base", egg_info_dir]
+
+ return args
+
+
+def make_setuptools_install_args(
+ setup_py_path, # type: str
+ global_options, # type: Sequence[str]
+ install_options, # type: Sequence[str]
+ record_filename, # type: str
+ root, # type: Optional[str]
+ prefix, # type: Optional[str]
+ header_dir, # type: Optional[str]
+ home, # type: Optional[str]
+ use_user_site, # type: bool
+ no_user_config, # type: bool
+ pycompile # type: bool
+):
+ # type: (...) -> List[str]
+ assert not (use_user_site and prefix)
+ assert not (use_user_site and root)
+
+ args = make_setuptools_shim_args(
+ setup_py_path,
+ global_options=global_options,
+ no_user_config=no_user_config,
+ unbuffered_output=True
+ )
+ args += ["install", "--record", record_filename]
+ args += ["--single-version-externally-managed"]
+
+ if root is not None:
+ args += ["--root", root]
+ if prefix is not None:
+ args += ["--prefix", prefix]
+ if home is not None:
+ args += ["--home", home]
+ if use_user_site:
+ args += ["--user", "--prefix="]
+
+ if pycompile:
+ args += ["--compile"]
+ else:
+ args += ["--no-compile"]
+
+ if header_dir:
+ args += ["--install-headers", header_dir]
+
+ args += install_options
+
+ return args
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/subprocess.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/subprocess.py
new file mode 100644
index 0000000000000000000000000000000000000000..ea0176d341ec037e72399b43709aaa837f9c4744
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/subprocess.py
@@ -0,0 +1,278 @@
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+from __future__ import absolute_import
+
+import logging
+import os
+import subprocess
+
+from pip._vendor.six.moves import shlex_quote
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.utils.compat import console_to_str, str_to_display
+from pip._internal.utils.logging import subprocess_logger
+from pip._internal.utils.misc import HiddenText, path_to_display
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.ui import open_spinner
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, Callable, Iterable, List, Mapping, Optional, Text, Union,
+ )
+ from pip._internal.utils.ui import SpinnerInterface
+
+ CommandArgs = List[Union[str, HiddenText]]
+
+
+LOG_DIVIDER = '----------------------------------------'
+
+
+def make_command(*args):
+ # type: (Union[str, HiddenText, CommandArgs]) -> CommandArgs
+ """
+ Create a CommandArgs object.
+ """
+ command_args = [] # type: CommandArgs
+ for arg in args:
+ # Check for list instead of CommandArgs since CommandArgs is
+ # only known during type-checking.
+ if isinstance(arg, list):
+ command_args.extend(arg)
+ else:
+ # Otherwise, arg is str or HiddenText.
+ command_args.append(arg)
+
+ return command_args
+
+
+def format_command_args(args):
+ # type: (Union[List[str], CommandArgs]) -> str
+ """
+ Format command arguments for display.
+ """
+ # For HiddenText arguments, display the redacted form by calling str().
+ # Also, we don't apply str() to arguments that aren't HiddenText since
+ # this can trigger a UnicodeDecodeError in Python 2 if the argument
+ # has type unicode and includes a non-ascii character. (The type
+ # checker doesn't ensure the annotations are correct in all cases.)
+ return ' '.join(
+ shlex_quote(str(arg)) if isinstance(arg, HiddenText)
+ else shlex_quote(arg) for arg in args
+ )
+
+
+def reveal_command_args(args):
+ # type: (Union[List[str], CommandArgs]) -> List[str]
+ """
+ Return the arguments in their raw, unredacted form.
+ """
+ return [
+ arg.secret if isinstance(arg, HiddenText) else arg for arg in args
+ ]
+
+
+def make_subprocess_output_error(
+ cmd_args, # type: Union[List[str], CommandArgs]
+ cwd, # type: Optional[str]
+ lines, # type: List[Text]
+ exit_status, # type: int
+):
+ # type: (...) -> Text
+ """
+ Create and return the error message to use to log a subprocess error
+ with command output.
+
+ :param lines: A list of lines, each ending with a newline.
+ """
+ command = format_command_args(cmd_args)
+ # Convert `command` and `cwd` to text (unicode in Python 2) so we can use
+ # them as arguments in the unicode format string below. This avoids
+ # "UnicodeDecodeError: 'ascii' codec can't decode byte ..." in Python 2
+ # if either contains a non-ascii character.
+ command_display = str_to_display(command, desc='command bytes')
+ cwd_display = path_to_display(cwd)
+
+ # We know the joined output value ends in a newline.
+ output = ''.join(lines)
+ msg = (
+ # Use a unicode string to avoid "UnicodeEncodeError: 'ascii'
+ # codec can't encode character ..." in Python 2 when a format
+ # argument (e.g. `output`) has a non-ascii character.
+ u'Command errored out with exit status {exit_status}:\n'
+ ' command: {command_display}\n'
+ ' cwd: {cwd_display}\n'
+ 'Complete output ({line_count} lines):\n{output}{divider}'
+ ).format(
+ exit_status=exit_status,
+ command_display=command_display,
+ cwd_display=cwd_display,
+ line_count=len(lines),
+ output=output,
+ divider=LOG_DIVIDER,
+ )
+ return msg
+
+
+def call_subprocess(
+ cmd, # type: Union[List[str], CommandArgs]
+ show_stdout=False, # type: bool
+ cwd=None, # type: Optional[str]
+ on_returncode='raise', # type: str
+ extra_ok_returncodes=None, # type: Optional[Iterable[int]]
+ command_desc=None, # type: Optional[str]
+ extra_environ=None, # type: Optional[Mapping[str, Any]]
+ unset_environ=None, # type: Optional[Iterable[str]]
+ spinner=None, # type: Optional[SpinnerInterface]
+ log_failed_cmd=True # type: Optional[bool]
+):
+ # type: (...) -> Text
+ """
+ Args:
+ show_stdout: if true, use INFO to log the subprocess's stderr and
+ stdout streams. Otherwise, use DEBUG. Defaults to False.
+ extra_ok_returncodes: an iterable of integer return codes that are
+ acceptable, in addition to 0. Defaults to None, which means [].
+ unset_environ: an iterable of environment variable names to unset
+ prior to calling subprocess.Popen().
+ log_failed_cmd: if false, failed commands are not logged, only raised.
+ """
+ if extra_ok_returncodes is None:
+ extra_ok_returncodes = []
+ if unset_environ is None:
+ unset_environ = []
+ # Most places in pip use show_stdout=False. What this means is--
+ #
+ # - We connect the child's output (combined stderr and stdout) to a
+ # single pipe, which we read.
+ # - We log this output to stderr at DEBUG level as it is received.
+ # - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't
+ # requested), then we show a spinner so the user can still see the
+ # subprocess is in progress.
+ # - If the subprocess exits with an error, we log the output to stderr
+ # at ERROR level if it hasn't already been displayed to the console
+ # (e.g. if --verbose logging wasn't enabled). This way we don't log
+ # the output to the console twice.
+ #
+ # If show_stdout=True, then the above is still done, but with DEBUG
+ # replaced by INFO.
+ if show_stdout:
+ # Then log the subprocess output at INFO level.
+ log_subprocess = subprocess_logger.info
+ used_level = logging.INFO
+ else:
+ # Then log the subprocess output using DEBUG. This also ensures
+ # it will be logged to the log file (aka user_log), if enabled.
+ log_subprocess = subprocess_logger.debug
+ used_level = logging.DEBUG
+
+ # Whether the subprocess will be visible in the console.
+ showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level
+
+ # Only use the spinner if we're not showing the subprocess output
+ # and we have a spinner.
+ use_spinner = not showing_subprocess and spinner is not None
+
+ if command_desc is None:
+ command_desc = format_command_args(cmd)
+
+ log_subprocess("Running command %s", command_desc)
+ env = os.environ.copy()
+ if extra_environ:
+ env.update(extra_environ)
+ for name in unset_environ:
+ env.pop(name, None)
+ try:
+ proc = subprocess.Popen(
+ # Convert HiddenText objects to the underlying str.
+ reveal_command_args(cmd),
+ stderr=subprocess.STDOUT, stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE, cwd=cwd, env=env,
+ )
+ proc.stdin.close()
+ except Exception as exc:
+ if log_failed_cmd:
+ subprocess_logger.critical(
+ "Error %s while executing command %s", exc, command_desc,
+ )
+ raise
+ all_output = []
+ while True:
+ # The "line" value is a unicode string in Python 2.
+ line = console_to_str(proc.stdout.readline())
+ if not line:
+ break
+ line = line.rstrip()
+ all_output.append(line + '\n')
+
+ # Show the line immediately.
+ log_subprocess(line)
+ # Update the spinner.
+ if use_spinner:
+ spinner.spin()
+ try:
+ proc.wait()
+ finally:
+ if proc.stdout:
+ proc.stdout.close()
+ proc_had_error = (
+ proc.returncode and proc.returncode not in extra_ok_returncodes
+ )
+ if use_spinner:
+ if proc_had_error:
+ spinner.finish("error")
+ else:
+ spinner.finish("done")
+ if proc_had_error:
+ if on_returncode == 'raise':
+ if not showing_subprocess and log_failed_cmd:
+ # Then the subprocess streams haven't been logged to the
+ # console yet.
+ msg = make_subprocess_output_error(
+ cmd_args=cmd,
+ cwd=cwd,
+ lines=all_output,
+ exit_status=proc.returncode,
+ )
+ subprocess_logger.error(msg)
+ exc_msg = (
+ 'Command errored out with exit status {}: {} '
+ 'Check the logs for full command output.'
+ ).format(proc.returncode, command_desc)
+ raise InstallationError(exc_msg)
+ elif on_returncode == 'warn':
+ subprocess_logger.warning(
+ 'Command "%s" had error code %s in %s',
+ command_desc, proc.returncode, cwd,
+ )
+ elif on_returncode == 'ignore':
+ pass
+ else:
+ raise ValueError('Invalid value: on_returncode=%s' %
+ repr(on_returncode))
+ return ''.join(all_output)
+
+
+def runner_with_spinner_message(message):
+ # type: (str) -> Callable[..., None]
+ """Provide a subprocess_runner that shows a spinner message.
+
+ Intended for use with for pep517's Pep517HookCaller. Thus, the runner has
+ an API that matches what's expected by Pep517HookCaller.subprocess_runner.
+ """
+
+ def runner(
+ cmd, # type: List[str]
+ cwd=None, # type: Optional[str]
+ extra_environ=None # type: Optional[Mapping[str, Any]]
+ ):
+ # type: (...) -> None
+ with open_spinner(message) as spinner:
+ call_subprocess(
+ cmd,
+ cwd=cwd,
+ extra_environ=extra_environ,
+ spinner=spinner,
+ )
+
+ return runner
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/temp_dir.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/temp_dir.py
new file mode 100644
index 0000000000000000000000000000000000000000..65e41bc70e2d8184b8917b539726556c84f2c0df
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/temp_dir.py
@@ -0,0 +1,250 @@
+from __future__ import absolute_import
+
+import errno
+import itertools
+import logging
+import os.path
+import tempfile
+from contextlib import contextmanager
+
+from pip._vendor.contextlib2 import ExitStack
+
+from pip._internal.utils.misc import rmtree
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, Dict, Iterator, Optional, TypeVar
+
+ _T = TypeVar('_T', bound='TempDirectory')
+
+
+logger = logging.getLogger(__name__)
+
+
+_tempdir_manager = None # type: Optional[ExitStack]
+
+
+@contextmanager
+def global_tempdir_manager():
+ # type: () -> Iterator[None]
+ global _tempdir_manager
+ with ExitStack() as stack:
+ old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack
+ try:
+ yield
+ finally:
+ _tempdir_manager = old_tempdir_manager
+
+
+class TempDirectoryTypeRegistry(object):
+ """Manages temp directory behavior
+ """
+
+ def __init__(self):
+ # type: () -> None
+ self._should_delete = {} # type: Dict[str, bool]
+
+ def set_delete(self, kind, value):
+ # type: (str, bool) -> None
+ """Indicate whether a TempDirectory of the given kind should be
+ auto-deleted.
+ """
+ self._should_delete[kind] = value
+
+ def get_delete(self, kind):
+ # type: (str) -> bool
+ """Get configured auto-delete flag for a given TempDirectory type,
+ default True.
+ """
+ return self._should_delete.get(kind, True)
+
+
+_tempdir_registry = None # type: Optional[TempDirectoryTypeRegistry]
+
+
+@contextmanager
+def tempdir_registry():
+ # type: () -> Iterator[TempDirectoryTypeRegistry]
+ """Provides a scoped global tempdir registry that can be used to dictate
+ whether directories should be deleted.
+ """
+ global _tempdir_registry
+ old_tempdir_registry = _tempdir_registry
+ _tempdir_registry = TempDirectoryTypeRegistry()
+ try:
+ yield _tempdir_registry
+ finally:
+ _tempdir_registry = old_tempdir_registry
+
+
+class TempDirectory(object):
+ """Helper class that owns and cleans up a temporary directory.
+
+ This class can be used as a context manager or as an OO representation of a
+ temporary directory.
+
+ Attributes:
+ path
+ Location to the created temporary directory
+ delete
+ Whether the directory should be deleted when exiting
+ (when used as a contextmanager)
+
+ Methods:
+ cleanup()
+ Deletes the temporary directory
+
+ When used as a context manager, if the delete attribute is True, on
+ exiting the context the temporary directory is deleted.
+ """
+
+ def __init__(
+ self,
+ path=None, # type: Optional[str]
+ delete=None, # type: Optional[bool]
+ kind="temp", # type: str
+ globally_managed=False, # type: bool
+ ):
+ super(TempDirectory, self).__init__()
+
+ # If we were given an explicit directory, resolve delete option now.
+ # Otherwise we wait until cleanup and see what tempdir_registry says.
+ if path is not None and delete is None:
+ delete = False
+
+ if path is None:
+ path = self._create(kind)
+
+ self._path = path
+ self._deleted = False
+ self.delete = delete
+ self.kind = kind
+
+ if globally_managed:
+ assert _tempdir_manager is not None
+ _tempdir_manager.enter_context(self)
+
+ @property
+ def path(self):
+ # type: () -> str
+ assert not self._deleted, (
+ "Attempted to access deleted path: {}".format(self._path)
+ )
+ return self._path
+
+ def __repr__(self):
+ # type: () -> str
+ return "<{} {!r}>".format(self.__class__.__name__, self.path)
+
+ def __enter__(self):
+ # type: (_T) -> _T
+ return self
+
+ def __exit__(self, exc, value, tb):
+ # type: (Any, Any, Any) -> None
+ if self.delete is not None:
+ delete = self.delete
+ elif _tempdir_registry:
+ delete = _tempdir_registry.get_delete(self.kind)
+ else:
+ delete = True
+
+ if delete:
+ self.cleanup()
+
+ def _create(self, kind):
+ # type: (str) -> str
+ """Create a temporary directory and store its path in self.path
+ """
+ # We realpath here because some systems have their default tmpdir
+ # symlinked to another directory. This tends to confuse build
+ # scripts, so we canonicalize the path by traversing potential
+ # symlinks here.
+ path = os.path.realpath(
+ tempfile.mkdtemp(prefix="pip-{}-".format(kind))
+ )
+ logger.debug("Created temporary directory: {}".format(path))
+ return path
+
+ def cleanup(self):
+ # type: () -> None
+ """Remove the temporary directory created and reset state
+ """
+ self._deleted = True
+ if os.path.exists(self._path):
+ rmtree(self._path)
+
+
+class AdjacentTempDirectory(TempDirectory):
+ """Helper class that creates a temporary directory adjacent to a real one.
+
+ Attributes:
+ original
+ The original directory to create a temp directory for.
+ path
+ After calling create() or entering, contains the full
+ path to the temporary directory.
+ delete
+ Whether the directory should be deleted when exiting
+ (when used as a contextmanager)
+
+ """
+ # The characters that may be used to name the temp directory
+ # We always prepend a ~ and then rotate through these until
+ # a usable name is found.
+ # pkg_resources raises a different error for .dist-info folder
+ # with leading '-' and invalid metadata
+ LEADING_CHARS = "-~.=%0123456789"
+
+ def __init__(self, original, delete=None):
+ # type: (str, Optional[bool]) -> None
+ self.original = original.rstrip('/\\')
+ super(AdjacentTempDirectory, self).__init__(delete=delete)
+
+ @classmethod
+ def _generate_names(cls, name):
+ # type: (str) -> Iterator[str]
+ """Generates a series of temporary names.
+
+ The algorithm replaces the leading characters in the name
+ with ones that are valid filesystem characters, but are not
+ valid package names (for both Python and pip definitions of
+ package).
+ """
+ for i in range(1, len(name)):
+ for candidate in itertools.combinations_with_replacement(
+ cls.LEADING_CHARS, i - 1):
+ new_name = '~' + ''.join(candidate) + name[i:]
+ if new_name != name:
+ yield new_name
+
+ # If we make it this far, we will have to make a longer name
+ for i in range(len(cls.LEADING_CHARS)):
+ for candidate in itertools.combinations_with_replacement(
+ cls.LEADING_CHARS, i):
+ new_name = '~' + ''.join(candidate) + name
+ if new_name != name:
+ yield new_name
+
+ def _create(self, kind):
+ # type: (str) -> str
+ root, name = os.path.split(self.original)
+ for candidate in self._generate_names(name):
+ path = os.path.join(root, candidate)
+ try:
+ os.mkdir(path)
+ except OSError as ex:
+ # Continue if the name exists already
+ if ex.errno != errno.EEXIST:
+ raise
+ else:
+ path = os.path.realpath(path)
+ break
+ else:
+ # Final fallback on the default behavior.
+ path = os.path.realpath(
+ tempfile.mkdtemp(prefix="pip-{}-".format(kind))
+ )
+
+ logger.debug("Created temporary directory: {}".format(path))
+ return path
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/typing.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/typing.py
new file mode 100644
index 0000000000000000000000000000000000000000..8505a29b15d5f8a3565a52796c4e39cc6b826ffc
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/typing.py
@@ -0,0 +1,38 @@
+"""For neatly implementing static typing in pip.
+
+`mypy` - the static type analysis tool we use - uses the `typing` module, which
+provides core functionality fundamental to mypy's functioning.
+
+Generally, `typing` would be imported at runtime and used in that fashion -
+it acts as a no-op at runtime and does not have any run-time overhead by
+design.
+
+As it turns out, `typing` is not vendorable - it uses separate sources for
+Python 2/Python 3. Thus, this codebase can not expect it to be present.
+To work around this, mypy allows the typing import to be behind a False-y
+optional to prevent it from running at runtime and type-comments can be used
+to remove the need for the types to be accessible directly during runtime.
+
+This module provides the False-y guard in a nicely named fashion so that a
+curious maintainer can reach here to read this.
+
+In pip, all static-typing related imports should be guarded as follows:
+
+ from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+ if MYPY_CHECK_RUNNING:
+ from typing import ...
+
+Ref: https://github.com/python/mypy/issues/3216
+"""
+
+MYPY_CHECK_RUNNING = False
+
+
+if MYPY_CHECK_RUNNING:
+ from typing import cast
+else:
+ # typing's cast() is needed at runtime, but we don't want to import typing.
+ # Thus, we use a dummy no-op version, which we tell mypy to ignore.
+ def cast(type_, value): # type: ignore
+ return value
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/ui.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/ui.py
new file mode 100644
index 0000000000000000000000000000000000000000..87782aa641d5dfe4845f751c8fcc05658da91501
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/ui.py
@@ -0,0 +1,428 @@
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import, division
+
+import contextlib
+import itertools
+import logging
+import sys
+import time
+from signal import SIGINT, default_int_handler, signal
+
+from pip._vendor import six
+from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR
+from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar
+from pip._vendor.progress.spinner import Spinner
+
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.logging import get_indentation
+from pip._internal.utils.misc import format_size
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Any, Iterator, IO
+
+try:
+ from pip._vendor import colorama
+# Lots of different errors can come from this, including SystemError and
+# ImportError.
+except Exception:
+ colorama = None
+
+logger = logging.getLogger(__name__)
+
+
+def _select_progress_class(preferred, fallback):
+ encoding = getattr(preferred.file, "encoding", None)
+
+ # If we don't know what encoding this file is in, then we'll just assume
+ # that it doesn't support unicode and use the ASCII bar.
+ if not encoding:
+ return fallback
+
+ # Collect all of the possible characters we want to use with the preferred
+ # bar.
+ characters = [
+ getattr(preferred, "empty_fill", six.text_type()),
+ getattr(preferred, "fill", six.text_type()),
+ ]
+ characters += list(getattr(preferred, "phases", []))
+
+ # Try to decode the characters we're using for the bar using the encoding
+ # of the given file, if this works then we'll assume that we can use the
+ # fancier bar and if not we'll fall back to the plaintext bar.
+ try:
+ six.text_type().join(characters).encode(encoding)
+ except UnicodeEncodeError:
+ return fallback
+ else:
+ return preferred
+
+
+_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any
+
+
+class InterruptibleMixin(object):
+ """
+ Helper to ensure that self.finish() gets called on keyboard interrupt.
+
+ This allows downloads to be interrupted without leaving temporary state
+ (like hidden cursors) behind.
+
+ This class is similar to the progress library's existing SigIntMixin
+ helper, but as of version 1.2, that helper has the following problems:
+
+ 1. It calls sys.exit().
+ 2. It discards the existing SIGINT handler completely.
+ 3. It leaves its own handler in place even after an uninterrupted finish,
+ which will have unexpected delayed effects if the user triggers an
+ unrelated keyboard interrupt some time after a progress-displaying
+ download has already completed, for example.
+ """
+
+ def __init__(self, *args, **kwargs):
+ """
+ Save the original SIGINT handler for later.
+ """
+ super(InterruptibleMixin, self).__init__(*args, **kwargs)
+
+ self.original_handler = signal(SIGINT, self.handle_sigint)
+
+ # If signal() returns None, the previous handler was not installed from
+ # Python, and we cannot restore it. This probably should not happen,
+ # but if it does, we must restore something sensible instead, at least.
+ # The least bad option should be Python's default SIGINT handler, which
+ # just raises KeyboardInterrupt.
+ if self.original_handler is None:
+ self.original_handler = default_int_handler
+
+ def finish(self):
+ """
+ Restore the original SIGINT handler after finishing.
+
+ This should happen regardless of whether the progress display finishes
+ normally, or gets interrupted.
+ """
+ super(InterruptibleMixin, self).finish()
+ signal(SIGINT, self.original_handler)
+
+ def handle_sigint(self, signum, frame):
+ """
+ Call self.finish() before delegating to the original SIGINT handler.
+
+ This handler should only be in place while the progress display is
+ active.
+ """
+ self.finish()
+ self.original_handler(signum, frame)
+
+
+class SilentBar(Bar):
+
+ def update(self):
+ pass
+
+
+class BlueEmojiBar(IncrementalBar):
+
+ suffix = "%(percent)d%%"
+ bar_prefix = " "
+ bar_suffix = " "
+ phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any
+
+
+class DownloadProgressMixin(object):
+
+ def __init__(self, *args, **kwargs):
+ super(DownloadProgressMixin, self).__init__(*args, **kwargs)
+ self.message = (" " * (get_indentation() + 2)) + self.message
+
+ @property
+ def downloaded(self):
+ return format_size(self.index)
+
+ @property
+ def download_speed(self):
+ # Avoid zero division errors...
+ if self.avg == 0.0:
+ return "..."
+ return format_size(1 / self.avg) + "/s"
+
+ @property
+ def pretty_eta(self):
+ if self.eta:
+ return "eta %s" % self.eta_td
+ return ""
+
+ def iter(self, it):
+ for x in it:
+ yield x
+ self.next(len(x))
+ self.finish()
+
+
+class WindowsMixin(object):
+
+ def __init__(self, *args, **kwargs):
+ # The Windows terminal does not support the hide/show cursor ANSI codes
+ # even with colorama. So we'll ensure that hide_cursor is False on
+ # Windows.
+ # This call needs to go before the super() call, so that hide_cursor
+ # is set in time. The base progress bar class writes the "hide cursor"
+ # code to the terminal in its init, so if we don't set this soon
+ # enough, we get a "hide" with no corresponding "show"...
+ if WINDOWS and self.hide_cursor:
+ self.hide_cursor = False
+
+ super(WindowsMixin, self).__init__(*args, **kwargs)
+
+ # Check if we are running on Windows and we have the colorama module,
+ # if we do then wrap our file with it.
+ if WINDOWS and colorama:
+ self.file = colorama.AnsiToWin32(self.file)
+ # The progress code expects to be able to call self.file.isatty()
+ # but the colorama.AnsiToWin32() object doesn't have that, so we'll
+ # add it.
+ self.file.isatty = lambda: self.file.wrapped.isatty()
+ # The progress code expects to be able to call self.file.flush()
+ # but the colorama.AnsiToWin32() object doesn't have that, so we'll
+ # add it.
+ self.file.flush = lambda: self.file.wrapped.flush()
+
+
+class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin,
+ DownloadProgressMixin):
+
+ file = sys.stdout
+ message = "%(percent)d%%"
+ suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
+
+# NOTE: The "type: ignore" comments on the following classes are there to
+# work around https://github.com/python/typing/issues/241
+
+
+class DefaultDownloadProgressBar(BaseDownloadProgressBar,
+ _BaseBar):
+ pass
+
+
+class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore
+ pass
+
+
+class DownloadBar(BaseDownloadProgressBar, # type: ignore
+ Bar):
+ pass
+
+
+class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore
+ FillingCirclesBar):
+ pass
+
+
+class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore
+ BlueEmojiBar):
+ pass
+
+
+class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
+ DownloadProgressMixin, Spinner):
+
+ file = sys.stdout
+ suffix = "%(downloaded)s %(download_speed)s"
+
+ def next_phase(self):
+ if not hasattr(self, "_phaser"):
+ self._phaser = itertools.cycle(self.phases)
+ return next(self._phaser)
+
+ def update(self):
+ message = self.message % self
+ phase = self.next_phase()
+ suffix = self.suffix % self
+ line = ''.join([
+ message,
+ " " if message else "",
+ phase,
+ " " if suffix else "",
+ suffix,
+ ])
+
+ self.writeln(line)
+
+
+BAR_TYPES = {
+ "off": (DownloadSilentBar, DownloadSilentBar),
+ "on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
+ "ascii": (DownloadBar, DownloadProgressSpinner),
+ "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
+ "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner)
+}
+
+
+def DownloadProgressProvider(progress_bar, max=None):
+ if max is None or max == 0:
+ return BAR_TYPES[progress_bar][1]().iter
+ else:
+ return BAR_TYPES[progress_bar][0](max=max).iter
+
+
+################################################################
+# Generic "something is happening" spinners
+#
+# We don't even try using progress.spinner.Spinner here because it's actually
+# simpler to reimplement from scratch than to coerce their code into doing
+# what we need.
+################################################################
+
+@contextlib.contextmanager
+def hidden_cursor(file):
+ # type: (IO[Any]) -> Iterator[None]
+ # The Windows terminal does not support the hide/show cursor ANSI codes,
+ # even via colorama. So don't even try.
+ if WINDOWS:
+ yield
+ # We don't want to clutter the output with control characters if we're
+ # writing to a file, or if the user is running with --quiet.
+ # See https://github.com/pypa/pip/issues/3418
+ elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
+ yield
+ else:
+ file.write(HIDE_CURSOR)
+ try:
+ yield
+ finally:
+ file.write(SHOW_CURSOR)
+
+
+class RateLimiter(object):
+ def __init__(self, min_update_interval_seconds):
+ # type: (float) -> None
+ self._min_update_interval_seconds = min_update_interval_seconds
+ self._last_update = 0 # type: float
+
+ def ready(self):
+ # type: () -> bool
+ now = time.time()
+ delta = now - self._last_update
+ return delta >= self._min_update_interval_seconds
+
+ def reset(self):
+ # type: () -> None
+ self._last_update = time.time()
+
+
+class SpinnerInterface(object):
+ def spin(self):
+ # type: () -> None
+ raise NotImplementedError()
+
+ def finish(self, final_status):
+ # type: (str) -> None
+ raise NotImplementedError()
+
+
+class InteractiveSpinner(SpinnerInterface):
+ def __init__(self, message, file=None, spin_chars="-\\|/",
+ # Empirically, 8 updates/second looks nice
+ min_update_interval_seconds=0.125):
+ self._message = message
+ if file is None:
+ file = sys.stdout
+ self._file = file
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
+ self._finished = False
+
+ self._spin_cycle = itertools.cycle(spin_chars)
+
+ self._file.write(" " * get_indentation() + self._message + " ... ")
+ self._width = 0
+
+ def _write(self, status):
+ assert not self._finished
+ # Erase what we wrote before by backspacing to the beginning, writing
+ # spaces to overwrite the old text, and then backspacing again
+ backup = "\b" * self._width
+ self._file.write(backup + " " * self._width + backup)
+ # Now we have a blank slate to add our status
+ self._file.write(status)
+ self._width = len(status)
+ self._file.flush()
+ self._rate_limiter.reset()
+
+ def spin(self):
+ # type: () -> None
+ if self._finished:
+ return
+ if not self._rate_limiter.ready():
+ return
+ self._write(next(self._spin_cycle))
+
+ def finish(self, final_status):
+ # type: (str) -> None
+ if self._finished:
+ return
+ self._write(final_status)
+ self._file.write("\n")
+ self._file.flush()
+ self._finished = True
+
+
+# Used for dumb terminals, non-interactive installs (no tty), etc.
+# We still print updates occasionally (once every 60 seconds by default) to
+# act as a keep-alive for systems like Travis-CI that take lack-of-output as
+# an indication that a task has frozen.
+class NonInteractiveSpinner(SpinnerInterface):
+ def __init__(self, message, min_update_interval_seconds=60):
+ # type: (str, float) -> None
+ self._message = message
+ self._finished = False
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
+ self._update("started")
+
+ def _update(self, status):
+ assert not self._finished
+ self._rate_limiter.reset()
+ logger.info("%s: %s", self._message, status)
+
+ def spin(self):
+ # type: () -> None
+ if self._finished:
+ return
+ if not self._rate_limiter.ready():
+ return
+ self._update("still running...")
+
+ def finish(self, final_status):
+ # type: (str) -> None
+ if self._finished:
+ return
+ self._update("finished with status '%s'" % (final_status,))
+ self._finished = True
+
+
+@contextlib.contextmanager
+def open_spinner(message):
+ # type: (str) -> Iterator[SpinnerInterface]
+ # Interactive spinner goes directly to sys.stdout rather than being routed
+ # through the logging system, but it acts like it has level INFO,
+ # i.e. it's only displayed if we're at level INFO or better.
+ # Non-interactive spinner goes through the logging system, so it is always
+ # in sync with logging configuration.
+ if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
+ spinner = InteractiveSpinner(message) # type: SpinnerInterface
+ else:
+ spinner = NonInteractiveSpinner(message)
+ try:
+ with hidden_cursor(sys.stdout):
+ yield spinner
+ except KeyboardInterrupt:
+ spinner.finish("canceled")
+ raise
+ except Exception:
+ spinner.finish("error")
+ raise
+ else:
+ spinner.finish("done")
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/unpacking.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/unpacking.py
new file mode 100644
index 0000000000000000000000000000000000000000..7252dc217bfaece6fedbaf835cecbb2a06cdcbb0
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/unpacking.py
@@ -0,0 +1,272 @@
+"""Utilities related archives.
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import logging
+import os
+import shutil
+import stat
+import tarfile
+import zipfile
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.utils.filetypes import (
+ BZ2_EXTENSIONS,
+ TAR_EXTENSIONS,
+ XZ_EXTENSIONS,
+ ZIP_EXTENSIONS,
+)
+from pip._internal.utils.misc import ensure_dir
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Iterable, List, Optional, Text, Union
+
+
+logger = logging.getLogger(__name__)
+
+
+SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
+
+try:
+ import bz2 # noqa
+ SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
+except ImportError:
+ logger.debug('bz2 module is not available')
+
+try:
+ # Only for Python 3.3+
+ import lzma # noqa
+ SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
+except ImportError:
+ logger.debug('lzma module is not available')
+
+
+def current_umask():
+ """Get the current umask which involves having to set it temporarily."""
+ mask = os.umask(0)
+ os.umask(mask)
+ return mask
+
+
+def split_leading_dir(path):
+ # type: (Union[str, Text]) -> List[Union[str, Text]]
+ path = path.lstrip('/').lstrip('\\')
+ if (
+ '/' in path and (
+ ('\\' in path and path.find('/') < path.find('\\')) or
+ '\\' not in path
+ )
+ ):
+ return path.split('/', 1)
+ elif '\\' in path:
+ return path.split('\\', 1)
+ else:
+ return [path, '']
+
+
+def has_leading_dir(paths):
+ # type: (Iterable[Union[str, Text]]) -> bool
+ """Returns true if all the paths have the same leading path name
+ (i.e., everything is in one subdirectory in an archive)"""
+ common_prefix = None
+ for path in paths:
+ prefix, rest = split_leading_dir(path)
+ if not prefix:
+ return False
+ elif common_prefix is None:
+ common_prefix = prefix
+ elif prefix != common_prefix:
+ return False
+ return True
+
+
+def is_within_directory(directory, target):
+ # type: ((Union[str, Text]), (Union[str, Text])) -> bool
+ """
+ Return true if the absolute path of target is within the directory
+ """
+ abs_directory = os.path.abspath(directory)
+ abs_target = os.path.abspath(target)
+
+ prefix = os.path.commonprefix([abs_directory, abs_target])
+ return prefix == abs_directory
+
+
+def unzip_file(filename, location, flatten=True):
+ # type: (str, str, bool) -> None
+ """
+ Unzip the file (with path `filename`) to the destination `location`. All
+ files are written based on system defaults and umask (i.e. permissions are
+ not preserved), except that regular file members with any execute
+ permissions (user, group, or world) have "chmod +x" applied after being
+ written. Note that for windows, any execute changes using os.chmod are
+ no-ops per the python docs.
+ """
+ ensure_dir(location)
+ zipfp = open(filename, 'rb')
+ try:
+ zip = zipfile.ZipFile(zipfp, allowZip64=True)
+ leading = has_leading_dir(zip.namelist()) and flatten
+ for info in zip.infolist():
+ name = info.filename
+ fn = name
+ if leading:
+ fn = split_leading_dir(name)[1]
+ fn = os.path.join(location, fn)
+ dir = os.path.dirname(fn)
+ if not is_within_directory(location, fn):
+ message = (
+ 'The zip file ({}) has a file ({}) trying to install '
+ 'outside target directory ({})'
+ )
+ raise InstallationError(message.format(filename, fn, location))
+ if fn.endswith('/') or fn.endswith('\\'):
+ # A directory
+ ensure_dir(fn)
+ else:
+ ensure_dir(dir)
+ # Don't use read() to avoid allocating an arbitrarily large
+ # chunk of memory for the file's content
+ fp = zip.open(name)
+ try:
+ with open(fn, 'wb') as destfp:
+ shutil.copyfileobj(fp, destfp)
+ finally:
+ fp.close()
+ mode = info.external_attr >> 16
+ # if mode and regular file and any execute permissions for
+ # user/group/world?
+ if mode and stat.S_ISREG(mode) and mode & 0o111:
+ # make dest file have execute for user/group/world
+ # (chmod +x) no-op on windows per python docs
+ os.chmod(fn, (0o777 - current_umask() | 0o111))
+ finally:
+ zipfp.close()
+
+
+def untar_file(filename, location):
+ # type: (str, str) -> None
+ """
+ Untar the file (with path `filename`) to the destination `location`.
+ All files are written based on system defaults and umask (i.e. permissions
+ are not preserved), except that regular file members with any execute
+ permissions (user, group, or world) have "chmod +x" applied after being
+ written. Note that for windows, any execute changes using os.chmod are
+ no-ops per the python docs.
+ """
+ ensure_dir(location)
+ if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
+ mode = 'r:gz'
+ elif filename.lower().endswith(BZ2_EXTENSIONS):
+ mode = 'r:bz2'
+ elif filename.lower().endswith(XZ_EXTENSIONS):
+ mode = 'r:xz'
+ elif filename.lower().endswith('.tar'):
+ mode = 'r'
+ else:
+ logger.warning(
+ 'Cannot determine compression type for file %s', filename,
+ )
+ mode = 'r:*'
+ tar = tarfile.open(filename, mode)
+ try:
+ leading = has_leading_dir([
+ member.name for member in tar.getmembers()
+ ])
+ for member in tar.getmembers():
+ fn = member.name
+ if leading:
+ # https://github.com/python/mypy/issues/1174
+ fn = split_leading_dir(fn)[1] # type: ignore
+ path = os.path.join(location, fn)
+ if not is_within_directory(location, path):
+ message = (
+ 'The tar file ({}) has a file ({}) trying to install '
+ 'outside target directory ({})'
+ )
+ raise InstallationError(
+ message.format(filename, path, location)
+ )
+ if member.isdir():
+ ensure_dir(path)
+ elif member.issym():
+ try:
+ # https://github.com/python/typeshed/issues/2673
+ tar._extract_member(member, path) # type: ignore
+ except Exception as exc:
+ # Some corrupt tar files seem to produce this
+ # (specifically bad symlinks)
+ logger.warning(
+ 'In the tar file %s the member %s is invalid: %s',
+ filename, member.name, exc,
+ )
+ continue
+ else:
+ try:
+ fp = tar.extractfile(member)
+ except (KeyError, AttributeError) as exc:
+ # Some corrupt tar files seem to produce this
+ # (specifically bad symlinks)
+ logger.warning(
+ 'In the tar file %s the member %s is invalid: %s',
+ filename, member.name, exc,
+ )
+ continue
+ ensure_dir(os.path.dirname(path))
+ with open(path, 'wb') as destfp:
+ shutil.copyfileobj(fp, destfp)
+ fp.close()
+ # Update the timestamp (useful for cython compiled files)
+ # https://github.com/python/typeshed/issues/2673
+ tar.utime(member, path) # type: ignore
+ # member have any execute permissions for user/group/world?
+ if member.mode & 0o111:
+ # make dest file have execute for user/group/world
+ # no-op on windows per python docs
+ os.chmod(path, (0o777 - current_umask() | 0o111))
+ finally:
+ tar.close()
+
+
+def unpack_file(
+ filename, # type: str
+ location, # type: str
+ content_type=None, # type: Optional[str]
+):
+ # type: (...) -> None
+ filename = os.path.realpath(filename)
+ if (
+ content_type == 'application/zip' or
+ filename.lower().endswith(ZIP_EXTENSIONS) or
+ zipfile.is_zipfile(filename)
+ ):
+ unzip_file(
+ filename,
+ location,
+ flatten=not filename.endswith('.whl')
+ )
+ elif (
+ content_type == 'application/x-gzip' or
+ tarfile.is_tarfile(filename) or
+ filename.lower().endswith(
+ TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS
+ )
+ ):
+ untar_file(filename, location)
+ else:
+ # FIXME: handle?
+ # FIXME: magic signatures?
+ logger.critical(
+ 'Cannot unpack file %s (downloaded from %s, content-type: %s); '
+ 'cannot detect archive format',
+ filename, location, content_type,
+ )
+ raise InstallationError(
+ 'Cannot determine archive format of {}'.format(location)
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/urls.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/urls.py
new file mode 100644
index 0000000000000000000000000000000000000000..9ad40feb345423ea76d86cc0e4541e3de84bae34
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/urls.py
@@ -0,0 +1,54 @@
+import os
+import sys
+
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+from pip._vendor.six.moves.urllib import request as urllib_request
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Text, Union
+
+
+def get_url_scheme(url):
+ # type: (Union[str, Text]) -> Optional[Text]
+ if ':' not in url:
+ return None
+ return url.split(':', 1)[0].lower()
+
+
+def path_to_url(path):
+ # type: (Union[str, Text]) -> str
+ """
+ Convert a path to a file: URL. The path will be made absolute and have
+ quoted path parts.
+ """
+ path = os.path.normpath(os.path.abspath(path))
+ url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path))
+ return url
+
+
+def url_to_path(url):
+ # type: (str) -> str
+ """
+ Convert a file: URL to a path.
+ """
+ assert url.startswith('file:'), (
+ "You can only turn file: urls into filenames (not %r)" % url)
+
+ _, netloc, path, _, _ = urllib_parse.urlsplit(url)
+
+ if not netloc or netloc == 'localhost':
+ # According to RFC 8089, same as empty authority.
+ netloc = ''
+ elif sys.platform == 'win32':
+ # If we have a UNC path, prepend UNC share notation.
+ netloc = '\\\\' + netloc
+ else:
+ raise ValueError(
+ 'non-local file URIs are not supported on this platform: %r'
+ % url
+ )
+
+ path = urllib_request.url2pathname(netloc + path)
+ return path
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/virtualenv.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/virtualenv.py
new file mode 100644
index 0000000000000000000000000000000000000000..d81e6ac54bb13a898295923126a934b9ea76f641
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/virtualenv.py
@@ -0,0 +1,115 @@
+from __future__ import absolute_import
+
+import logging
+import os
+import re
+import site
+import sys
+
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from typing import List, Optional
+
+logger = logging.getLogger(__name__)
+_INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile(
+ r"include-system-site-packages\s*=\s*(?P<value>true|false)"
+)
+
+
+def _running_under_venv():
+ # type: () -> bool
+ """Checks if sys.base_prefix and sys.prefix match.
+
+ This handles PEP 405 compliant virtual environments.
+ """
+ return sys.prefix != getattr(sys, "base_prefix", sys.prefix)
+
+
+def _running_under_regular_virtualenv():
+ # type: () -> bool
+ """Checks if sys.real_prefix is set.
+
+ This handles virtual environments created with pypa's virtualenv.
+ """
+ # pypa/virtualenv case
+ return hasattr(sys, 'real_prefix')
+
+
+def running_under_virtualenv():
+ # type: () -> bool
+ """Return True if we're running inside a virtualenv, False otherwise.
+ """
+ return _running_under_venv() or _running_under_regular_virtualenv()
+
+
+def _get_pyvenv_cfg_lines():
+ # type: () -> Optional[List[str]]
+ """Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines
+
+ Returns None, if it could not read/access the file.
+ """
+ pyvenv_cfg_file = os.path.join(sys.prefix, 'pyvenv.cfg')
+ try:
+ with open(pyvenv_cfg_file) as f:
+ return f.read().splitlines() # avoids trailing newlines
+ except IOError:
+ return None
+
+
+def _no_global_under_venv():
+ # type: () -> bool
+ """Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion
+
+ PEP 405 specifies that when system site-packages are not supposed to be
+ visible from a virtual environment, `pyvenv.cfg` must contain the following
+ line:
+
+ include-system-site-packages = false
+
+ Additionally, log a warning if accessing the file fails.
+ """
+ cfg_lines = _get_pyvenv_cfg_lines()
+ if cfg_lines is None:
+ # We're not in a "sane" venv, so assume there is no system
+ # site-packages access (since that's PEP 405's default state).
+ logger.warning(
+ "Could not access 'pyvenv.cfg' despite a virtual environment "
+ "being active. Assuming global site-packages is not accessible "
+ "in this environment."
+ )
+ return True
+
+ for line in cfg_lines:
+ match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line)
+ if match is not None and match.group('value') == 'false':
+ return True
+ return False
+
+
+def _no_global_under_regular_virtualenv():
+ # type: () -> bool
+ """Check if "no-global-site-packages.txt" exists beside site.py
+
+ This mirrors logic in pypa/virtualenv for determining whether system
+ site-packages are visible in the virtual environment.
+ """
+ site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
+ no_global_site_packages_file = os.path.join(
+ site_mod_dir, 'no-global-site-packages.txt',
+ )
+ return os.path.exists(no_global_site_packages_file)
+
+
+def virtualenv_no_global():
+ # type: () -> bool
+ """Returns a boolean, whether running in venv with no system site-packages.
+ """
+
+ if _running_under_regular_virtualenv():
+ return _no_global_under_regular_virtualenv()
+
+ if _running_under_venv():
+ return _no_global_under_venv()
+
+ return False
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/wheel.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/wheel.py
new file mode 100644
index 0000000000000000000000000000000000000000..837e0afd7e5ca32666ffd0acdc33549d03626bcd
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/utils/wheel.py
@@ -0,0 +1,225 @@
+"""Support functions for working with wheel files.
+"""
+
+from __future__ import absolute_import
+
+import logging
+from email.parser import Parser
+from zipfile import ZipFile
+
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.pkg_resources import DistInfoDistribution
+from pip._vendor.six import PY2, ensure_str
+
+from pip._internal.exceptions import UnsupportedWheel
+from pip._internal.utils.pkg_resources import DictMetadata
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+
+if MYPY_CHECK_RUNNING:
+ from email.message import Message
+ from typing import Dict, Tuple
+
+ from pip._vendor.pkg_resources import Distribution
+
+if PY2:
+ from zipfile import BadZipfile as BadZipFile
+else:
+ from zipfile import BadZipFile
+
+
+VERSION_COMPATIBLE = (1, 0)
+
+
+logger = logging.getLogger(__name__)
+
+
+class WheelMetadata(DictMetadata):
+ """Metadata provider that maps metadata decoding exceptions to our
+ internal exception type.
+ """
+ def __init__(self, metadata, wheel_name):
+ # type: (Dict[str, bytes], str) -> None
+ super(WheelMetadata, self).__init__(metadata)
+ self._wheel_name = wheel_name
+
+ def get_metadata(self, name):
+ # type: (str) -> str
+ try:
+ return super(WheelMetadata, self).get_metadata(name)
+ except UnicodeDecodeError as e:
+ # Augment the default error with the origin of the file.
+ raise UnsupportedWheel(
+ "Error decoding metadata for {}: {}".format(
+ self._wheel_name, e
+ )
+ )
+
+
+def pkg_resources_distribution_for_wheel(wheel_zip, name, location):
+ # type: (ZipFile, str, str) -> Distribution
+ """Get a pkg_resources distribution given a wheel.
+
+ :raises UnsupportedWheel: on any errors
+ """
+ info_dir, _ = parse_wheel(wheel_zip, name)
+
+ metadata_files = [
+ p for p in wheel_zip.namelist() if p.startswith("{}/".format(info_dir))
+ ]
+
+ metadata_text = {} # type: Dict[str, bytes]
+ for path in metadata_files:
+ # If a flag is set, namelist entries may be unicode in Python 2.
+ # We coerce them to native str type to match the types used in the rest
+ # of the code. This cannot fail because unicode can always be encoded
+ # with UTF-8.
+ full_path = ensure_str(path)
+ _, metadata_name = full_path.split("/", 1)
+
+ try:
+ metadata_text[metadata_name] = read_wheel_metadata_file(
+ wheel_zip, full_path
+ )
+ except UnsupportedWheel as e:
+ raise UnsupportedWheel(
+ "{} has an invalid wheel, {}".format(name, str(e))
+ )
+
+ metadata = WheelMetadata(metadata_text, location)
+
+ return DistInfoDistribution(
+ location=location, metadata=metadata, project_name=name
+ )
+
+
+def parse_wheel(wheel_zip, name):
+ # type: (ZipFile, str) -> Tuple[str, Message]
+ """Extract information from the provided wheel, ensuring it meets basic
+ standards.
+
+ Returns the name of the .dist-info directory and the parsed WHEEL metadata.
+ """
+ try:
+ info_dir = wheel_dist_info_dir(wheel_zip, name)
+ metadata = wheel_metadata(wheel_zip, info_dir)
+ version = wheel_version(metadata)
+ except UnsupportedWheel as e:
+ raise UnsupportedWheel(
+ "{} has an invalid wheel, {}".format(name, str(e))
+ )
+
+ check_compatibility(version, name)
+
+ return info_dir, metadata
+
+
+def wheel_dist_info_dir(source, name):
+ # type: (ZipFile, str) -> str
+ """Returns the name of the contained .dist-info directory.
+
+ Raises AssertionError or UnsupportedWheel if not found, >1 found, or
+ it doesn't match the provided name.
+ """
+ # Zip file path separators must be /
+ subdirs = list(set(p.split("/")[0] for p in source.namelist()))
+
+ info_dirs = [s for s in subdirs if s.endswith('.dist-info')]
+
+ if not info_dirs:
+ raise UnsupportedWheel(".dist-info directory not found")
+
+ if len(info_dirs) > 1:
+ raise UnsupportedWheel(
+ "multiple .dist-info directories found: {}".format(
+ ", ".join(info_dirs)
+ )
+ )
+
+ info_dir = info_dirs[0]
+
+ info_dir_name = canonicalize_name(info_dir)
+ canonical_name = canonicalize_name(name)
+ if not info_dir_name.startswith(canonical_name):
+ raise UnsupportedWheel(
+ ".dist-info directory {!r} does not start with {!r}".format(
+ info_dir, canonical_name
+ )
+ )
+
+ # Zip file paths can be unicode or str depending on the zip entry flags,
+ # so normalize it.
+ return ensure_str(info_dir)
+
+
+def read_wheel_metadata_file(source, path):
+ # type: (ZipFile, str) -> bytes
+ try:
+ return source.read(path)
+ # BadZipFile for general corruption, KeyError for missing entry,
+ # and RuntimeError for password-protected files
+ except (BadZipFile, KeyError, RuntimeError) as e:
+ raise UnsupportedWheel(
+ "could not read {!r} file: {!r}".format(path, e)
+ )
+
+
+def wheel_metadata(source, dist_info_dir):
+ # type: (ZipFile, str) -> Message
+ """Return the WHEEL metadata of an extracted wheel, if possible.
+ Otherwise, raise UnsupportedWheel.
+ """
+ path = "{}/WHEEL".format(dist_info_dir)
+ # Zip file path separators must be /
+ wheel_contents = read_wheel_metadata_file(source, path)
+
+ try:
+ wheel_text = ensure_str(wheel_contents)
+ except UnicodeDecodeError as e:
+ raise UnsupportedWheel("error decoding {!r}: {!r}".format(path, e))
+
+ # FeedParser (used by Parser) does not raise any exceptions. The returned
+ # message may have .defects populated, but for backwards-compatibility we
+ # currently ignore them.
+ return Parser().parsestr(wheel_text)
+
+
+def wheel_version(wheel_data):
+ # type: (Message) -> Tuple[int, ...]
+ """Given WHEEL metadata, return the parsed Wheel-Version.
+ Otherwise, raise UnsupportedWheel.
+ """
+ version_text = wheel_data["Wheel-Version"]
+ if version_text is None:
+ raise UnsupportedWheel("WHEEL is missing Wheel-Version")
+
+ version = version_text.strip()
+
+ try:
+ return tuple(map(int, version.split('.')))
+ except ValueError:
+ raise UnsupportedWheel("invalid Wheel-Version: {!r}".format(version))
+
+
+def check_compatibility(version, name):
+ # type: (Tuple[int, ...], str) -> None
+ """Raises errors or warns if called with an incompatible Wheel-Version.
+
+ Pip should refuse to install a Wheel-Version that's a major series
+ ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
+ installing a version only minor version ahead (e.g 1.2 > 1.1).
+
+ version: a 2-tuple representing a Wheel-Version (Major, Minor)
+ name: name of wheel or package to raise exception about
+
+ :raises UnsupportedWheel: when an incompatible Wheel-Version is given
+ """
+ if version[0] > VERSION_COMPATIBLE[0]:
+ raise UnsupportedWheel(
+ "%s's Wheel-Version (%s) is not compatible with this version "
+ "of pip" % (name, '.'.join(map(str, version)))
+ )
+ elif version > VERSION_COMPATIBLE:
+ logger.warning(
+ 'Installing from a newer Wheel-Version (%s)',
+ '.'.join(map(str, version)),
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__init__.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..2a4eb1375763fa3287d171a2a1b0766d1d9d1224
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__init__.py
@@ -0,0 +1,15 @@
+# Expose a limited set of classes and functions so callers outside of
+# the vcs package don't need to import deeper than `pip._internal.vcs`.
+# (The test directory and imports protected by MYPY_CHECK_RUNNING may
+# still need to import from a vcs sub-package.)
+# Import all vcs modules to register each VCS in the VcsSupport object.
+import pip._internal.vcs.bazaar
+import pip._internal.vcs.git
+import pip._internal.vcs.mercurial
+import pip._internal.vcs.subversion # noqa: F401
+from pip._internal.vcs.versioncontrol import ( # noqa: F401
+ RemoteNotFoundError,
+ is_url,
+ make_vcs_requirement_url,
+ vcs,
+)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fdee7f4094fd04f9bdc67f7e3f0d62a53e203fcf
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2e2c6e1b7ba05d83c5819c6eb7fcdbe8defe8dfa
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/git.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/git.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..949f5098e2a26b6ac1aca95e7ef8c44ee5ea0b32
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/git.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d70f5b8ed0199768cb1aa43c81f10a98be92e310
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cde25e588da4c44a24ae70a0cddfd9db4d6d3732
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6dbb9eefeb6b67ab7a5abf22725a97c3647749ca
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/bazaar.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/bazaar.py
new file mode 100644
index 0000000000000000000000000000000000000000..347c06f9dc7c882299bf1a829049849a06328fe5
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/bazaar.py
@@ -0,0 +1,120 @@
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import logging
+import os
+
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+from pip._internal.utils.misc import display_path, rmtree
+from pip._internal.utils.subprocess import make_command
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.urls import path_to_url
+from pip._internal.vcs.versioncontrol import VersionControl, vcs
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Tuple
+ from pip._internal.utils.misc import HiddenText
+ from pip._internal.vcs.versioncontrol import AuthInfo, RevOptions
+
+
+logger = logging.getLogger(__name__)
+
+
+class Bazaar(VersionControl):
+ name = 'bzr'
+ dirname = '.bzr'
+ repo_name = 'branch'
+ schemes = (
+ 'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp',
+ 'bzr+lp',
+ )
+
+ def __init__(self, *args, **kwargs):
+ super(Bazaar, self).__init__(*args, **kwargs)
+ # This is only needed for python <2.7.5
+ # Register lp but do not expose as a scheme to support bzr+lp.
+ if getattr(urllib_parse, 'uses_fragment', None):
+ urllib_parse.uses_fragment.extend(['lp'])
+
+ @staticmethod
+ def get_base_rev_args(rev):
+ return ['-r', rev]
+
+ def export(self, location, url):
+ # type: (str, HiddenText) -> None
+ """
+ Export the Bazaar repository at the url to the destination location
+ """
+ # Remove the location to make sure Bazaar can export it correctly
+ if os.path.exists(location):
+ rmtree(location)
+
+ url, rev_options = self.get_url_rev_options(url)
+ self.run_command(
+ make_command('export', location, url, rev_options.to_args()),
+ show_stdout=False,
+ )
+
+ def fetch_new(self, dest, url, rev_options):
+ # type: (str, HiddenText, RevOptions) -> None
+ rev_display = rev_options.to_display()
+ logger.info(
+ 'Checking out %s%s to %s',
+ url,
+ rev_display,
+ display_path(dest),
+ )
+ cmd_args = (
+ make_command('branch', '-q', rev_options.to_args(), url, dest)
+ )
+ self.run_command(cmd_args)
+
+ def switch(self, dest, url, rev_options):
+ # type: (str, HiddenText, RevOptions) -> None
+ self.run_command(make_command('switch', url), cwd=dest)
+
+ def update(self, dest, url, rev_options):
+ # type: (str, HiddenText, RevOptions) -> None
+ cmd_args = make_command('pull', '-q', rev_options.to_args())
+ self.run_command(cmd_args, cwd=dest)
+
+ @classmethod
+ def get_url_rev_and_auth(cls, url):
+ # type: (str) -> Tuple[str, Optional[str], AuthInfo]
+ # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
+ url, rev, user_pass = super(Bazaar, cls).get_url_rev_and_auth(url)
+ if url.startswith('ssh://'):
+ url = 'bzr+' + url
+ return url, rev, user_pass
+
+ @classmethod
+ def get_remote_url(cls, location):
+ urls = cls.run_command(['info'], show_stdout=False, cwd=location)
+ for line in urls.splitlines():
+ line = line.strip()
+ for x in ('checkout of branch: ',
+ 'parent branch: '):
+ if line.startswith(x):
+ repo = line.split(x)[1]
+ if cls._is_local_repository(repo):
+ return path_to_url(repo)
+ return repo
+ return None
+
+ @classmethod
+ def get_revision(cls, location):
+ revision = cls.run_command(
+ ['revno'], show_stdout=False, cwd=location,
+ )
+ return revision.splitlines()[-1]
+
+ @classmethod
+ def is_commit_id_equal(cls, dest, name):
+ """Always assume the versions don't match"""
+ return False
+
+
+vcs.register(Bazaar)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/git.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/git.py
new file mode 100644
index 0000000000000000000000000000000000000000..d706064e75b6639338e197124e75dadda5811332
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/git.py
@@ -0,0 +1,395 @@
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import logging
+import os.path
+import re
+
+from pip._vendor.packaging.version import parse as parse_version
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+from pip._vendor.six.moves.urllib import request as urllib_request
+
+from pip._internal.exceptions import BadCommand
+from pip._internal.utils.misc import display_path, hide_url
+from pip._internal.utils.subprocess import make_command
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.vcs.versioncontrol import (
+ RemoteNotFoundError,
+ VersionControl,
+ find_path_to_setup_from_repo_root,
+ vcs,
+)
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Tuple
+ from pip._internal.utils.misc import HiddenText
+ from pip._internal.vcs.versioncontrol import AuthInfo, RevOptions
+
+
+urlsplit = urllib_parse.urlsplit
+urlunsplit = urllib_parse.urlunsplit
+
+
+logger = logging.getLogger(__name__)
+
+
+HASH_REGEX = re.compile('^[a-fA-F0-9]{40}$')
+
+
+def looks_like_hash(sha):
+ return bool(HASH_REGEX.match(sha))
+
+
+class Git(VersionControl):
+ name = 'git'
+ dirname = '.git'
+ repo_name = 'clone'
+ schemes = (
+ 'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file',
+ )
+ # Prevent the user's environment variables from interfering with pip:
+ # https://github.com/pypa/pip/issues/1130
+ unset_environ = ('GIT_DIR', 'GIT_WORK_TREE')
+ default_arg_rev = 'HEAD'
+
+ @staticmethod
+ def get_base_rev_args(rev):
+ return [rev]
+
+ def is_immutable_rev_checkout(self, url, dest):
+ # type: (str, str) -> bool
+ _, rev_options = self.get_url_rev_options(hide_url(url))
+ if not rev_options.rev:
+ return False
+ if not self.is_commit_id_equal(dest, rev_options.rev):
+ # the current commit is different from rev,
+ # which means rev was something else than a commit hash
+ return False
+ # return False in the rare case rev is both a commit hash
+ # and a tag or a branch; we don't want to cache in that case
+ # because that branch/tag could point to something else in the future
+ is_tag_or_branch = bool(
+ self.get_revision_sha(dest, rev_options.rev)[0]
+ )
+ return not is_tag_or_branch
+
+ def get_git_version(self):
+ VERSION_PFX = 'git version '
+ version = self.run_command(['version'], show_stdout=False)
+ if version.startswith(VERSION_PFX):
+ version = version[len(VERSION_PFX):].split()[0]
+ else:
+ version = ''
+ # get first 3 positions of the git version because
+ # on windows it is x.y.z.windows.t, and this parses as
+ # LegacyVersion which always smaller than a Version.
+ version = '.'.join(version.split('.')[:3])
+ return parse_version(version)
+
+ @classmethod
+ def get_current_branch(cls, location):
+ """
+ Return the current branch, or None if HEAD isn't at a branch
+ (e.g. detached HEAD).
+ """
+ # git-symbolic-ref exits with empty stdout if "HEAD" is a detached
+ # HEAD rather than a symbolic ref. In addition, the -q causes the
+ # command to exit with status code 1 instead of 128 in this case
+ # and to suppress the message to stderr.
+ args = ['symbolic-ref', '-q', 'HEAD']
+ output = cls.run_command(
+ args, extra_ok_returncodes=(1, ), show_stdout=False, cwd=location,
+ )
+ ref = output.strip()
+
+ if ref.startswith('refs/heads/'):
+ return ref[len('refs/heads/'):]
+
+ return None
+
+ def export(self, location, url):
+ # type: (str, HiddenText) -> None
+ """Export the Git repository at the url to the destination location"""
+ if not location.endswith('/'):
+ location = location + '/'
+
+ with TempDirectory(kind="export") as temp_dir:
+ self.unpack(temp_dir.path, url=url)
+ self.run_command(
+ ['checkout-index', '-a', '-f', '--prefix', location],
+ show_stdout=False, cwd=temp_dir.path
+ )
+
+ @classmethod
+ def get_revision_sha(cls, dest, rev):
+ """
+ Return (sha_or_none, is_branch), where sha_or_none is a commit hash
+ if the revision names a remote branch or tag, otherwise None.
+
+ Args:
+ dest: the repository directory.
+ rev: the revision name.
+ """
+ # Pass rev to pre-filter the list.
+ output = cls.run_command(['show-ref', rev], cwd=dest,
+ show_stdout=False, on_returncode='ignore')
+ refs = {}
+ # NOTE: We do not use splitlines here since that would split on other
+ # unicode separators, which can be maliciously used to install a
+ # different revision.
+ for line in output.strip().split("\n"):
+ line = line.rstrip("\r")
+ if not line:
+ continue
+ try:
+ sha, ref = line.split(" ", maxsplit=2)
+ except ValueError:
+ # Include the offending line to simplify troubleshooting if
+ # this error ever occurs.
+ raise ValueError('unexpected show-ref line: {!r}'.format(line))
+
+ refs[ref] = sha
+
+ branch_ref = 'refs/remotes/origin/{}'.format(rev)
+ tag_ref = 'refs/tags/{}'.format(rev)
+
+ sha = refs.get(branch_ref)
+ if sha is not None:
+ return (sha, True)
+
+ sha = refs.get(tag_ref)
+
+ return (sha, False)
+
+ @classmethod
+ def resolve_revision(cls, dest, url, rev_options):
+ # type: (str, HiddenText, RevOptions) -> RevOptions
+ """
+ Resolve a revision to a new RevOptions object with the SHA1 of the
+ branch, tag, or ref if found.
+
+ Args:
+ rev_options: a RevOptions object.
+ """
+ rev = rev_options.arg_rev
+ # The arg_rev property's implementation for Git ensures that the
+ # rev return value is always non-None.
+ assert rev is not None
+
+ sha, is_branch = cls.get_revision_sha(dest, rev)
+
+ if sha is not None:
+ rev_options = rev_options.make_new(sha)
+ rev_options.branch_name = rev if is_branch else None
+
+ return rev_options
+
+ # Do not show a warning for the common case of something that has
+ # the form of a Git commit hash.
+ if not looks_like_hash(rev):
+ logger.warning(
+ "Did not find branch or tag '%s', assuming revision or ref.",
+ rev,
+ )
+
+ if not rev.startswith('refs/'):
+ return rev_options
+
+ # If it looks like a ref, we have to fetch it explicitly.
+ cls.run_command(
+ make_command('fetch', '-q', url, rev_options.to_args()),
+ cwd=dest,
+ )
+ # Change the revision to the SHA of the ref we fetched
+ sha = cls.get_revision(dest, rev='FETCH_HEAD')
+ rev_options = rev_options.make_new(sha)
+
+ return rev_options
+
+ @classmethod
+ def is_commit_id_equal(cls, dest, name):
+ """
+ Return whether the current commit hash equals the given name.
+
+ Args:
+ dest: the repository directory.
+ name: a string name.
+ """
+ if not name:
+ # Then avoid an unnecessary subprocess call.
+ return False
+
+ return cls.get_revision(dest) == name
+
+ def fetch_new(self, dest, url, rev_options):
+ # type: (str, HiddenText, RevOptions) -> None
+ rev_display = rev_options.to_display()
+ logger.info('Cloning %s%s to %s', url, rev_display, display_path(dest))
+ self.run_command(make_command('clone', '-q', url, dest))
+
+ if rev_options.rev:
+ # Then a specific revision was requested.
+ rev_options = self.resolve_revision(dest, url, rev_options)
+ branch_name = getattr(rev_options, 'branch_name', None)
+ if branch_name is None:
+ # Only do a checkout if the current commit id doesn't match
+ # the requested revision.
+ if not self.is_commit_id_equal(dest, rev_options.rev):
+ cmd_args = make_command(
+ 'checkout', '-q', rev_options.to_args(),
+ )
+ self.run_command(cmd_args, cwd=dest)
+ elif self.get_current_branch(dest) != branch_name:
+ # Then a specific branch was requested, and that branch
+ # is not yet checked out.
+ track_branch = 'origin/{}'.format(branch_name)
+ cmd_args = [
+ 'checkout', '-b', branch_name, '--track', track_branch,
+ ]
+ self.run_command(cmd_args, cwd=dest)
+
+ #: repo may contain submodules
+ self.update_submodules(dest)
+
+ def switch(self, dest, url, rev_options):
+ # type: (str, HiddenText, RevOptions) -> None
+ self.run_command(
+ make_command('config', 'remote.origin.url', url),
+ cwd=dest,
+ )
+ cmd_args = make_command('checkout', '-q', rev_options.to_args())
+ self.run_command(cmd_args, cwd=dest)
+
+ self.update_submodules(dest)
+
+ def update(self, dest, url, rev_options):
+ # type: (str, HiddenText, RevOptions) -> None
+ # First fetch changes from the default remote
+ if self.get_git_version() >= parse_version('1.9.0'):
+ # fetch tags in addition to everything else
+ self.run_command(['fetch', '-q', '--tags'], cwd=dest)
+ else:
+ self.run_command(['fetch', '-q'], cwd=dest)
+ # Then reset to wanted revision (maybe even origin/master)
+ rev_options = self.resolve_revision(dest, url, rev_options)
+ cmd_args = make_command('reset', '--hard', '-q', rev_options.to_args())
+ self.run_command(cmd_args, cwd=dest)
+ #: update submodules
+ self.update_submodules(dest)
+
+ @classmethod
+ def get_remote_url(cls, location):
+ """
+ Return URL of the first remote encountered.
+
+ Raises RemoteNotFoundError if the repository does not have a remote
+ url configured.
+ """
+ # We need to pass 1 for extra_ok_returncodes since the command
+ # exits with return code 1 if there are no matching lines.
+ stdout = cls.run_command(
+ ['config', '--get-regexp', r'remote\..*\.url'],
+ extra_ok_returncodes=(1, ), show_stdout=False, cwd=location,
+ )
+ remotes = stdout.splitlines()
+ try:
+ found_remote = remotes[0]
+ except IndexError:
+ raise RemoteNotFoundError
+
+ for remote in remotes:
+ if remote.startswith('remote.origin.url '):
+ found_remote = remote
+ break
+ url = found_remote.split(' ')[1]
+ return url.strip()
+
+ @classmethod
+ def get_revision(cls, location, rev=None):
+ if rev is None:
+ rev = 'HEAD'
+ current_rev = cls.run_command(
+ ['rev-parse', rev], show_stdout=False, cwd=location,
+ )
+ return current_rev.strip()
+
+ @classmethod
+ def get_subdirectory(cls, location):
+ """
+ Return the path to setup.py, relative to the repo root.
+ Return None if setup.py is in the repo root.
+ """
+ # find the repo root
+ git_dir = cls.run_command(
+ ['rev-parse', '--git-dir'],
+ show_stdout=False, cwd=location).strip()
+ if not os.path.isabs(git_dir):
+ git_dir = os.path.join(location, git_dir)
+ repo_root = os.path.abspath(os.path.join(git_dir, '..'))
+ return find_path_to_setup_from_repo_root(location, repo_root)
+
+ @classmethod
+ def get_url_rev_and_auth(cls, url):
+ # type: (str) -> Tuple[str, Optional[str], AuthInfo]
+ """
+ Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
+ That's required because although they use SSH they sometimes don't
+ work with a ssh:// scheme (e.g. GitHub). But we need a scheme for
+ parsing. Hence we remove it again afterwards and return it as a stub.
+ """
+ # Works around an apparent Git bug
+ # (see https://article.gmane.org/gmane.comp.version-control.git/146500)
+ scheme, netloc, path, query, fragment = urlsplit(url)
+ if scheme.endswith('file'):
+ initial_slashes = path[:-len(path.lstrip('/'))]
+ newpath = (
+ initial_slashes +
+ urllib_request.url2pathname(path)
+ .replace('\\', '/').lstrip('/')
+ )
+ url = urlunsplit((scheme, netloc, newpath, query, fragment))
+ after_plus = scheme.find('+') + 1
+ url = scheme[:after_plus] + urlunsplit(
+ (scheme[after_plus:], netloc, newpath, query, fragment),
+ )
+
+ if '://' not in url:
+ assert 'file:' not in url
+ url = url.replace('git+', 'git+ssh://')
+ url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url)
+ url = url.replace('ssh://', '')
+ else:
+ url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url)
+
+ return url, rev, user_pass
+
+ @classmethod
+ def update_submodules(cls, location):
+ if not os.path.exists(os.path.join(location, '.gitmodules')):
+ return
+ cls.run_command(
+ ['submodule', 'update', '--init', '--recursive', '-q'],
+ cwd=location,
+ )
+
+ @classmethod
+ def controls_location(cls, location):
+ if super(Git, cls).controls_location(location):
+ return True
+ try:
+ r = cls.run_command(['rev-parse'],
+ cwd=location,
+ show_stdout=False,
+ on_returncode='ignore',
+ log_failed_cmd=False)
+ return not r
+ except BadCommand:
+ logger.debug("could not determine if %s is under git control "
+ "because git is not available", location)
+ return False
+
+
+vcs.register(Git)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/mercurial.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/mercurial.py
new file mode 100644
index 0000000000000000000000000000000000000000..d9b58cfe9a4b7a437e0899945243f7e9be5215e9
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/mercurial.py
@@ -0,0 +1,155 @@
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import logging
+import os
+
+from pip._vendor.six.moves import configparser
+
+from pip._internal.exceptions import BadCommand, InstallationError
+from pip._internal.utils.misc import display_path
+from pip._internal.utils.subprocess import make_command
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.urls import path_to_url
+from pip._internal.vcs.versioncontrol import (
+ VersionControl,
+ find_path_to_setup_from_repo_root,
+ vcs,
+)
+
+if MYPY_CHECK_RUNNING:
+ from pip._internal.utils.misc import HiddenText
+ from pip._internal.vcs.versioncontrol import RevOptions
+
+
+logger = logging.getLogger(__name__)
+
+
+class Mercurial(VersionControl):
+ name = 'hg'
+ dirname = '.hg'
+ repo_name = 'clone'
+ schemes = (
+ 'hg', 'hg+file', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http',
+ )
+
+ @staticmethod
+ def get_base_rev_args(rev):
+ return [rev]
+
+ def export(self, location, url):
+ # type: (str, HiddenText) -> None
+ """Export the Hg repository at the url to the destination location"""
+ with TempDirectory(kind="export") as temp_dir:
+ self.unpack(temp_dir.path, url=url)
+
+ self.run_command(
+ ['archive', location], show_stdout=False, cwd=temp_dir.path
+ )
+
+ def fetch_new(self, dest, url, rev_options):
+ # type: (str, HiddenText, RevOptions) -> None
+ rev_display = rev_options.to_display()
+ logger.info(
+ 'Cloning hg %s%s to %s',
+ url,
+ rev_display,
+ display_path(dest),
+ )
+ self.run_command(make_command('clone', '--noupdate', '-q', url, dest))
+ self.run_command(
+ make_command('update', '-q', rev_options.to_args()),
+ cwd=dest,
+ )
+
+ def switch(self, dest, url, rev_options):
+ # type: (str, HiddenText, RevOptions) -> None
+ repo_config = os.path.join(dest, self.dirname, 'hgrc')
+ config = configparser.RawConfigParser()
+ try:
+ config.read(repo_config)
+ config.set('paths', 'default', url.secret)
+ with open(repo_config, 'w') as config_file:
+ config.write(config_file)
+ except (OSError, configparser.NoSectionError) as exc:
+ logger.warning(
+ 'Could not switch Mercurial repository to %s: %s', url, exc,
+ )
+ else:
+ cmd_args = make_command('update', '-q', rev_options.to_args())
+ self.run_command(cmd_args, cwd=dest)
+
+ def update(self, dest, url, rev_options):
+ # type: (str, HiddenText, RevOptions) -> None
+ self.run_command(['pull', '-q'], cwd=dest)
+ cmd_args = make_command('update', '-q', rev_options.to_args())
+ self.run_command(cmd_args, cwd=dest)
+
+ @classmethod
+ def get_remote_url(cls, location):
+ url = cls.run_command(
+ ['showconfig', 'paths.default'],
+ show_stdout=False, cwd=location).strip()
+ if cls._is_local_repository(url):
+ url = path_to_url(url)
+ return url.strip()
+
+ @classmethod
+ def get_revision(cls, location):
+ """
+ Return the repository-local changeset revision number, as an integer.
+ """
+ current_revision = cls.run_command(
+ ['parents', '--template={rev}'],
+ show_stdout=False, cwd=location).strip()
+ return current_revision
+
+ @classmethod
+ def get_requirement_revision(cls, location):
+ """
+ Return the changeset identification hash, as a 40-character
+ hexadecimal string
+ """
+ current_rev_hash = cls.run_command(
+ ['parents', '--template={node}'],
+ show_stdout=False, cwd=location).strip()
+ return current_rev_hash
+
+ @classmethod
+ def is_commit_id_equal(cls, dest, name):
+ """Always assume the versions don't match"""
+ return False
+
+ @classmethod
+ def get_subdirectory(cls, location):
+ """
+ Return the path to setup.py, relative to the repo root.
+ Return None if setup.py is in the repo root.
+ """
+ # find the repo root
+ repo_root = cls.run_command(
+ ['root'], show_stdout=False, cwd=location).strip()
+ if not os.path.isabs(repo_root):
+ repo_root = os.path.abspath(os.path.join(location, repo_root))
+ return find_path_to_setup_from_repo_root(location, repo_root)
+
+ @classmethod
+ def controls_location(cls, location):
+ if super(Mercurial, cls).controls_location(location):
+ return True
+ try:
+ cls.run_command(
+ ['identify'],
+ cwd=location,
+ show_stdout=False,
+ on_returncode='raise',
+ log_failed_cmd=False)
+ return True
+ except (BadCommand, InstallationError):
+ return False
+
+
+vcs.register(Mercurial)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/subversion.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/subversion.py
new file mode 100644
index 0000000000000000000000000000000000000000..6c76d1ad435ab4718c95a09aafa8a8e69a996452
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/subversion.py
@@ -0,0 +1,333 @@
+# The following comment should be removed at some point in the future.
+# mypy: disallow-untyped-defs=False
+
+from __future__ import absolute_import
+
+import logging
+import os
+import re
+
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import (
+ display_path,
+ is_console_interactive,
+ rmtree,
+ split_auth_from_netloc,
+)
+from pip._internal.utils.subprocess import make_command
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.vcs.versioncontrol import VersionControl, vcs
+
+_svn_xml_url_re = re.compile('url="([^"]+)"')
+_svn_rev_re = re.compile(r'committed-rev="(\d+)"')
+_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
+_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
+
+
+if MYPY_CHECK_RUNNING:
+ from typing import Optional, Tuple
+ from pip._internal.utils.subprocess import CommandArgs
+ from pip._internal.utils.misc import HiddenText
+ from pip._internal.vcs.versioncontrol import AuthInfo, RevOptions
+
+
+logger = logging.getLogger(__name__)
+
+
+class Subversion(VersionControl):
+ name = 'svn'
+ dirname = '.svn'
+ repo_name = 'checkout'
+ schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
+
+ @classmethod
+ def should_add_vcs_url_prefix(cls, remote_url):
+ return True
+
+ @staticmethod
+ def get_base_rev_args(rev):
+ return ['-r', rev]
+
+ @classmethod
+ def get_revision(cls, location):
+ """
+ Return the maximum revision for all files under a given location
+ """
+ # Note: taken from setuptools.command.egg_info
+ revision = 0
+
+ for base, dirs, files in os.walk(location):
+ if cls.dirname not in dirs:
+ dirs[:] = []
+ continue # no sense walking uncontrolled subdirs
+ dirs.remove(cls.dirname)
+ entries_fn = os.path.join(base, cls.dirname, 'entries')
+ if not os.path.exists(entries_fn):
+ # FIXME: should we warn?
+ continue
+
+ dirurl, localrev = cls._get_svn_url_rev(base)
+
+ if base == location:
+ base = dirurl + '/' # save the root url
+ elif not dirurl or not dirurl.startswith(base):
+ dirs[:] = []
+ continue # not part of the same svn tree, skip it
+ revision = max(revision, localrev)
+ return revision
+
+ @classmethod
+ def get_netloc_and_auth(cls, netloc, scheme):
+ """
+ This override allows the auth information to be passed to svn via the
+ --username and --password options instead of via the URL.
+ """
+ if scheme == 'ssh':
+ # The --username and --password options can't be used for
+ # svn+ssh URLs, so keep the auth information in the URL.
+ return super(Subversion, cls).get_netloc_and_auth(netloc, scheme)
+
+ return split_auth_from_netloc(netloc)
+
+ @classmethod
+ def get_url_rev_and_auth(cls, url):
+ # type: (str) -> Tuple[str, Optional[str], AuthInfo]
+ # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
+ url, rev, user_pass = super(Subversion, cls).get_url_rev_and_auth(url)
+ if url.startswith('ssh://'):
+ url = 'svn+' + url
+ return url, rev, user_pass
+
+ @staticmethod
+ def make_rev_args(username, password):
+ # type: (Optional[str], Optional[HiddenText]) -> CommandArgs
+ extra_args = [] # type: CommandArgs
+ if username:
+ extra_args += ['--username', username]
+ if password:
+ extra_args += ['--password', password]
+
+ return extra_args
+
+ @classmethod
+ def get_remote_url(cls, location):
+ # In cases where the source is in a subdirectory, not alongside
+ # setup.py we have to look up in the location until we find a real
+ # setup.py
+ orig_location = location
+ while not os.path.exists(os.path.join(location, 'setup.py')):
+ last_location = location
+ location = os.path.dirname(location)
+ if location == last_location:
+ # We've traversed up to the root of the filesystem without
+ # finding setup.py
+ logger.warning(
+ "Could not find setup.py for directory %s (tried all "
+ "parent directories)",
+ orig_location,
+ )
+ return None
+
+ return cls._get_svn_url_rev(location)[0]
+
+ @classmethod
+ def _get_svn_url_rev(cls, location):
+ from pip._internal.exceptions import InstallationError
+
+ entries_path = os.path.join(location, cls.dirname, 'entries')
+ if os.path.exists(entries_path):
+ with open(entries_path) as f:
+ data = f.read()
+ else: # subversion >= 1.7 does not have the 'entries' file
+ data = ''
+
+ if (data.startswith('8') or
+ data.startswith('9') or
+ data.startswith('10')):
+ data = list(map(str.splitlines, data.split('\n\x0c\n')))
+ del data[0][0] # get rid of the '8'
+ url = data[0][3]
+ revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
+ elif data.startswith('<?xml'):
+ match = _svn_xml_url_re.search(data)
+ if not match:
+ raise ValueError('Badly formatted data: %r' % data)
+ url = match.group(1) # get repository URL
+ revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
+ else:
+ try:
+ # subversion >= 1.7
+ # Note that using get_remote_call_options is not necessary here
+ # because `svn info` is being run against a local directory.
+ # We don't need to worry about making sure interactive mode
+ # is being used to prompt for passwords, because passwords
+ # are only potentially needed for remote server requests.
+ xml = cls.run_command(
+ ['info', '--xml', location],
+ show_stdout=False,
+ )
+ url = _svn_info_xml_url_re.search(xml).group(1)
+ revs = [
+ int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)
+ ]
+ except InstallationError:
+ url, revs = None, []
+
+ if revs:
+ rev = max(revs)
+ else:
+ rev = 0
+
+ return url, rev
+
+ @classmethod
+ def is_commit_id_equal(cls, dest, name):
+ """Always assume the versions don't match"""
+ return False
+
+ def __init__(self, use_interactive=None):
+ # type: (bool) -> None
+ if use_interactive is None:
+ use_interactive = is_console_interactive()
+ self.use_interactive = use_interactive
+
+ # This member is used to cache the fetched version of the current
+ # ``svn`` client.
+ # Special value definitions:
+ # None: Not evaluated yet.
+ # Empty tuple: Could not parse version.
+ self._vcs_version = None # type: Optional[Tuple[int, ...]]
+
+ super(Subversion, self).__init__()
+
+ def call_vcs_version(self):
+ # type: () -> Tuple[int, ...]
+ """Query the version of the currently installed Subversion client.
+
+ :return: A tuple containing the parts of the version information or
+ ``()`` if the version returned from ``svn`` could not be parsed.
+ :raises: BadCommand: If ``svn`` is not installed.
+ """
+ # Example versions:
+ # svn, version 1.10.3 (r1842928)
+ # compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0
+ # svn, version 1.7.14 (r1542130)
+ # compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu
+ version_prefix = 'svn, version '
+ version = self.run_command(['--version'], show_stdout=False)
+ if not version.startswith(version_prefix):
+ return ()
+
+ version = version[len(version_prefix):].split()[0]
+ version_list = version.split('.')
+ try:
+ parsed_version = tuple(map(int, version_list))
+ except ValueError:
+ return ()
+
+ return parsed_version
+
+ def get_vcs_version(self):
+ # type: () -> Tuple[int, ...]
+ """Return the version of the currently installed Subversion client.
+
+ If the version of the Subversion client has already been queried,
+ a cached value will be used.
+
+ :return: A tuple containing the parts of the version information or
+ ``()`` if the version returned from ``svn`` could not be parsed.
+ :raises: BadCommand: If ``svn`` is not installed.
+ """
+ if self._vcs_version is not None:
+ # Use cached version, if available.
+ # If parsing the version failed previously (empty tuple),
+ # do not attempt to parse it again.
+ return self._vcs_version
+
+ vcs_version = self.call_vcs_version()
+ self._vcs_version = vcs_version
+ return vcs_version
+
+ def get_remote_call_options(self):
+ # type: () -> CommandArgs
+ """Return options to be used on calls to Subversion that contact the server.
+
+ These options are applicable for the following ``svn`` subcommands used
+ in this class.
+
+ - checkout
+ - export
+ - switch
+ - update
+
+ :return: A list of command line arguments to pass to ``svn``.
+ """
+ if not self.use_interactive:
+ # --non-interactive switch is available since Subversion 0.14.4.
+ # Subversion < 1.8 runs in interactive mode by default.
+ return ['--non-interactive']
+
+ svn_version = self.get_vcs_version()
+ # By default, Subversion >= 1.8 runs in non-interactive mode if
+ # stdin is not a TTY. Since that is how pip invokes SVN, in
+ # call_subprocess(), pip must pass --force-interactive to ensure
+ # the user can be prompted for a password, if required.
+ # SVN added the --force-interactive option in SVN 1.8. Since
+ # e.g. RHEL/CentOS 7, which is supported until 2024, ships with
+ # SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip
+ # can't safely add the option if the SVN version is < 1.8 (or unknown).
+ if svn_version >= (1, 8):
+ return ['--force-interactive']
+
+ return []
+
+ def export(self, location, url):
+ # type: (str, HiddenText) -> None
+ """Export the svn repository at the url to the destination location"""
+ url, rev_options = self.get_url_rev_options(url)
+
+ logger.info('Exporting svn repository %s to %s', url, location)
+ with indent_log():
+ if os.path.exists(location):
+ # Subversion doesn't like to check out over an existing
+ # directory --force fixes this, but was only added in svn 1.5
+ rmtree(location)
+ cmd_args = make_command(
+ 'export', self.get_remote_call_options(),
+ rev_options.to_args(), url, location,
+ )
+ self.run_command(cmd_args, show_stdout=False)
+
+ def fetch_new(self, dest, url, rev_options):
+ # type: (str, HiddenText, RevOptions) -> None
+ rev_display = rev_options.to_display()
+ logger.info(
+ 'Checking out %s%s to %s',
+ url,
+ rev_display,
+ display_path(dest),
+ )
+ cmd_args = make_command(
+ 'checkout', '-q', self.get_remote_call_options(),
+ rev_options.to_args(), url, dest,
+ )
+ self.run_command(cmd_args)
+
+ def switch(self, dest, url, rev_options):
+ # type: (str, HiddenText, RevOptions) -> None
+ cmd_args = make_command(
+ 'switch', self.get_remote_call_options(), rev_options.to_args(),
+ url, dest,
+ )
+ self.run_command(cmd_args)
+
+ def update(self, dest, url, rev_options):
+ # type: (str, HiddenText, RevOptions) -> None
+ cmd_args = make_command(
+ 'update', self.get_remote_call_options(), rev_options.to_args(),
+ dest,
+ )
+ self.run_command(cmd_args)
+
+
+vcs.register(Subversion)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/versioncontrol.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/versioncontrol.py
new file mode 100644
index 0000000000000000000000000000000000000000..7cfd568829f27f188cf7a3cec86c3c840b8463cb
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/vcs/versioncontrol.py
@@ -0,0 +1,700 @@
+"""Handles all VCS (version control) support"""
+
+from __future__ import absolute_import
+
+import errno
+import logging
+import os
+import shutil
+import sys
+
+from pip._vendor import pkg_resources
+from pip._vendor.six.moves.urllib import parse as urllib_parse
+
+from pip._internal.exceptions import BadCommand
+from pip._internal.utils.compat import samefile
+from pip._internal.utils.misc import (
+ ask_path_exists,
+ backup_dir,
+ display_path,
+ hide_url,
+ hide_value,
+ rmtree,
+)
+from pip._internal.utils.subprocess import call_subprocess, make_command
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.urls import get_url_scheme
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, Dict, Iterable, Iterator, List, Mapping, Optional, Text, Tuple,
+ Type, Union
+ )
+ from pip._internal.utils.ui import SpinnerInterface
+ from pip._internal.utils.misc import HiddenText
+ from pip._internal.utils.subprocess import CommandArgs
+
+ AuthInfo = Tuple[Optional[str], Optional[str]]
+
+
+__all__ = ['vcs']
+
+
+logger = logging.getLogger(__name__)
+
+
+def is_url(name):
+ # type: (Union[str, Text]) -> bool
+ """
+ Return true if the name looks like a URL.
+ """
+ scheme = get_url_scheme(name)
+ if scheme is None:
+ return False
+ return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
+
+
+def make_vcs_requirement_url(repo_url, rev, project_name, subdir=None):
+ # type: (str, str, str, Optional[str]) -> str
+ """
+ Return the URL for a VCS requirement.
+
+ Args:
+ repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+").
+ project_name: the (unescaped) project name.
+ """
+ egg_project_name = pkg_resources.to_filename(project_name)
+ req = '{}@{}#egg={}'.format(repo_url, rev, egg_project_name)
+ if subdir:
+ req += '&subdirectory={}'.format(subdir)
+
+ return req
+
+
+def find_path_to_setup_from_repo_root(location, repo_root):
+ # type: (str, str) -> Optional[str]
+ """
+ Find the path to `setup.py` by searching up the filesystem from `location`.
+ Return the path to `setup.py` relative to `repo_root`.
+ Return None if `setup.py` is in `repo_root` or cannot be found.
+ """
+ # find setup.py
+ orig_location = location
+ while not os.path.exists(os.path.join(location, 'setup.py')):
+ last_location = location
+ location = os.path.dirname(location)
+ if location == last_location:
+ # We've traversed up to the root of the filesystem without
+ # finding setup.py
+ logger.warning(
+ "Could not find setup.py for directory %s (tried all "
+ "parent directories)",
+ orig_location,
+ )
+ return None
+
+ if samefile(repo_root, location):
+ return None
+
+ return os.path.relpath(location, repo_root)
+
+
+class RemoteNotFoundError(Exception):
+ pass
+
+
+class RevOptions(object):
+
+ """
+ Encapsulates a VCS-specific revision to install, along with any VCS
+ install options.
+
+ Instances of this class should be treated as if immutable.
+ """
+
+ def __init__(
+ self,
+ vc_class, # type: Type[VersionControl]
+ rev=None, # type: Optional[str]
+ extra_args=None, # type: Optional[CommandArgs]
+ ):
+ # type: (...) -> None
+ """
+ Args:
+ vc_class: a VersionControl subclass.
+ rev: the name of the revision to install.
+ extra_args: a list of extra options.
+ """
+ if extra_args is None:
+ extra_args = []
+
+ self.extra_args = extra_args
+ self.rev = rev
+ self.vc_class = vc_class
+ self.branch_name = None # type: Optional[str]
+
+ def __repr__(self):
+ # type: () -> str
+ return '<RevOptions {}: rev={!r}>'.format(self.vc_class.name, self.rev)
+
+ @property
+ def arg_rev(self):
+ # type: () -> Optional[str]
+ if self.rev is None:
+ return self.vc_class.default_arg_rev
+
+ return self.rev
+
+ def to_args(self):
+ # type: () -> CommandArgs
+ """
+ Return the VCS-specific command arguments.
+ """
+ args = [] # type: CommandArgs
+ rev = self.arg_rev
+ if rev is not None:
+ args += self.vc_class.get_base_rev_args(rev)
+ args += self.extra_args
+
+ return args
+
+ def to_display(self):
+ # type: () -> str
+ if not self.rev:
+ return ''
+
+ return ' (to revision {})'.format(self.rev)
+
+ def make_new(self, rev):
+ # type: (str) -> RevOptions
+ """
+ Make a copy of the current instance, but with a new rev.
+
+ Args:
+ rev: the name of the revision for the new object.
+ """
+ return self.vc_class.make_rev_options(rev, extra_args=self.extra_args)
+
+
+class VcsSupport(object):
+ _registry = {} # type: Dict[str, VersionControl]
+ schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
+
+ def __init__(self):
+ # type: () -> None
+ # Register more schemes with urlparse for various version control
+ # systems
+ urllib_parse.uses_netloc.extend(self.schemes)
+ # Python >= 2.7.4, 3.3 doesn't have uses_fragment
+ if getattr(urllib_parse, 'uses_fragment', None):
+ urllib_parse.uses_fragment.extend(self.schemes)
+ super(VcsSupport, self).__init__()
+
+ def __iter__(self):
+ # type: () -> Iterator[str]
+ return self._registry.__iter__()
+
+ @property
+ def backends(self):
+ # type: () -> List[VersionControl]
+ return list(self._registry.values())
+
+ @property
+ def dirnames(self):
+ # type: () -> List[str]
+ return [backend.dirname for backend in self.backends]
+
+ @property
+ def all_schemes(self):
+ # type: () -> List[str]
+ schemes = [] # type: List[str]
+ for backend in self.backends:
+ schemes.extend(backend.schemes)
+ return schemes
+
+ def register(self, cls):
+ # type: (Type[VersionControl]) -> None
+ if not hasattr(cls, 'name'):
+ logger.warning('Cannot register VCS %s', cls.__name__)
+ return
+ if cls.name not in self._registry:
+ self._registry[cls.name] = cls()
+ logger.debug('Registered VCS backend: %s', cls.name)
+
+ def unregister(self, name):
+ # type: (str) -> None
+ if name in self._registry:
+ del self._registry[name]
+
+ def get_backend_for_dir(self, location):
+ # type: (str) -> Optional[VersionControl]
+ """
+ Return a VersionControl object if a repository of that type is found
+ at the given directory.
+ """
+ for vcs_backend in self._registry.values():
+ if vcs_backend.controls_location(location):
+ logger.debug('Determine that %s uses VCS: %s',
+ location, vcs_backend.name)
+ return vcs_backend
+ return None
+
+ def get_backend_for_scheme(self, scheme):
+ # type: (str) -> Optional[VersionControl]
+ """
+ Return a VersionControl object or None.
+ """
+ for vcs_backend in self._registry.values():
+ if scheme in vcs_backend.schemes:
+ return vcs_backend
+ return None
+
+ def get_backend(self, name):
+ # type: (str) -> Optional[VersionControl]
+ """
+ Return a VersionControl object or None.
+ """
+ name = name.lower()
+ return self._registry.get(name)
+
+
+vcs = VcsSupport()
+
+
+class VersionControl(object):
+ name = ''
+ dirname = ''
+ repo_name = ''
+ # List of supported schemes for this Version Control
+ schemes = () # type: Tuple[str, ...]
+ # Iterable of environment variable names to pass to call_subprocess().
+ unset_environ = () # type: Tuple[str, ...]
+ default_arg_rev = None # type: Optional[str]
+
+ @classmethod
+ def should_add_vcs_url_prefix(cls, remote_url):
+ # type: (str) -> bool
+ """
+ Return whether the vcs prefix (e.g. "git+") should be added to a
+ repository's remote url when used in a requirement.
+ """
+ return not remote_url.lower().startswith('{}:'.format(cls.name))
+
+ @classmethod
+ def get_subdirectory(cls, location):
+ # type: (str) -> Optional[str]
+ """
+ Return the path to setup.py, relative to the repo root.
+ Return None if setup.py is in the repo root.
+ """
+ return None
+
+ @classmethod
+ def get_requirement_revision(cls, repo_dir):
+ # type: (str) -> str
+ """
+ Return the revision string that should be used in a requirement.
+ """
+ return cls.get_revision(repo_dir)
+
+ @classmethod
+ def get_src_requirement(cls, repo_dir, project_name):
+ # type: (str, str) -> Optional[str]
+ """
+ Return the requirement string to use to redownload the files
+ currently at the given repository directory.
+
+ Args:
+ project_name: the (unescaped) project name.
+
+ The return value has a form similar to the following:
+
+ {repository_url}@{revision}#egg={project_name}
+ """
+ repo_url = cls.get_remote_url(repo_dir)
+ if repo_url is None:
+ return None
+
+ if cls.should_add_vcs_url_prefix(repo_url):
+ repo_url = '{}+{}'.format(cls.name, repo_url)
+
+ revision = cls.get_requirement_revision(repo_dir)
+ subdir = cls.get_subdirectory(repo_dir)
+ req = make_vcs_requirement_url(repo_url, revision, project_name,
+ subdir=subdir)
+
+ return req
+
+ @staticmethod
+ def get_base_rev_args(rev):
+ # type: (str) -> List[str]
+ """
+ Return the base revision arguments for a vcs command.
+
+ Args:
+ rev: the name of a revision to install. Cannot be None.
+ """
+ raise NotImplementedError
+
+ def is_immutable_rev_checkout(self, url, dest):
+ # type: (str, str) -> bool
+ """
+ Return true if the commit hash checked out at dest matches
+ the revision in url.
+
+ Always return False, if the VCS does not support immutable commit
+ hashes.
+
+ This method does not check if there are local uncommitted changes
+ in dest after checkout, as pip currently has no use case for that.
+ """
+ return False
+
+ @classmethod
+ def make_rev_options(cls, rev=None, extra_args=None):
+ # type: (Optional[str], Optional[CommandArgs]) -> RevOptions
+ """
+ Return a RevOptions object.
+
+ Args:
+ rev: the name of a revision to install.
+ extra_args: a list of extra options.
+ """
+ return RevOptions(cls, rev, extra_args=extra_args)
+
+ @classmethod
+ def _is_local_repository(cls, repo):
+ # type: (str) -> bool
+ """
+ posix absolute paths start with os.path.sep,
+ win32 ones start with drive (like c:\\folder)
+ """
+ drive, tail = os.path.splitdrive(repo)
+ return repo.startswith(os.path.sep) or bool(drive)
+
+ def export(self, location, url):
+ # type: (str, HiddenText) -> None
+ """
+ Export the repository at the url to the destination location
+ i.e. only download the files, without vcs informations
+
+ :param url: the repository URL starting with a vcs prefix.
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def get_netloc_and_auth(cls, netloc, scheme):
+ # type: (str, str) -> Tuple[str, Tuple[Optional[str], Optional[str]]]
+ """
+ Parse the repository URL's netloc, and return the new netloc to use
+ along with auth information.
+
+ Args:
+ netloc: the original repository URL netloc.
+ scheme: the repository URL's scheme without the vcs prefix.
+
+ This is mainly for the Subversion class to override, so that auth
+ information can be provided via the --username and --password options
+ instead of through the URL. For other subclasses like Git without
+ such an option, auth information must stay in the URL.
+
+ Returns: (netloc, (username, password)).
+ """
+ return netloc, (None, None)
+
+ @classmethod
+ def get_url_rev_and_auth(cls, url):
+ # type: (str) -> Tuple[str, Optional[str], AuthInfo]
+ """
+ Parse the repository URL to use, and return the URL, revision,
+ and auth info to use.
+
+ Returns: (url, rev, (username, password)).
+ """
+ scheme, netloc, path, query, frag = urllib_parse.urlsplit(url)
+ if '+' not in scheme:
+ raise ValueError(
+ "Sorry, {!r} is a malformed VCS url. "
+ "The format is <vcs>+<protocol>://<url>, "
+ "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url)
+ )
+ # Remove the vcs prefix.
+ scheme = scheme.split('+', 1)[1]
+ netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme)
+ rev = None
+ if '@' in path:
+ path, rev = path.rsplit('@', 1)
+ url = urllib_parse.urlunsplit((scheme, netloc, path, query, ''))
+ return url, rev, user_pass
+
+ @staticmethod
+ def make_rev_args(username, password):
+ # type: (Optional[str], Optional[HiddenText]) -> CommandArgs
+ """
+ Return the RevOptions "extra arguments" to use in obtain().
+ """
+ return []
+
+ def get_url_rev_options(self, url):
+ # type: (HiddenText) -> Tuple[HiddenText, RevOptions]
+ """
+ Return the URL and RevOptions object to use in obtain() and in
+ some cases export(), as a tuple (url, rev_options).
+ """
+ secret_url, rev, user_pass = self.get_url_rev_and_auth(url.secret)
+ username, secret_password = user_pass
+ password = None # type: Optional[HiddenText]
+ if secret_password is not None:
+ password = hide_value(secret_password)
+ extra_args = self.make_rev_args(username, password)
+ rev_options = self.make_rev_options(rev, extra_args=extra_args)
+
+ return hide_url(secret_url), rev_options
+
+ @staticmethod
+ def normalize_url(url):
+ # type: (str) -> str
+ """
+ Normalize a URL for comparison by unquoting it and removing any
+ trailing slash.
+ """
+ return urllib_parse.unquote(url).rstrip('/')
+
+ @classmethod
+ def compare_urls(cls, url1, url2):
+ # type: (str, str) -> bool
+ """
+ Compare two repo URLs for identity, ignoring incidental differences.
+ """
+ return (cls.normalize_url(url1) == cls.normalize_url(url2))
+
+ def fetch_new(self, dest, url, rev_options):
+ # type: (str, HiddenText, RevOptions) -> None
+ """
+ Fetch a revision from a repository, in the case that this is the
+ first fetch from the repository.
+
+ Args:
+ dest: the directory to fetch the repository to.
+ rev_options: a RevOptions object.
+ """
+ raise NotImplementedError
+
+ def switch(self, dest, url, rev_options):
+ # type: (str, HiddenText, RevOptions) -> None
+ """
+ Switch the repo at ``dest`` to point to ``URL``.
+
+ Args:
+ rev_options: a RevOptions object.
+ """
+ raise NotImplementedError
+
+ def update(self, dest, url, rev_options):
+ # type: (str, HiddenText, RevOptions) -> None
+ """
+ Update an already-existing repo to the given ``rev_options``.
+
+ Args:
+ rev_options: a RevOptions object.
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def is_commit_id_equal(cls, dest, name):
+ # type: (str, Optional[str]) -> bool
+ """
+ Return whether the id of the current commit equals the given name.
+
+ Args:
+ dest: the repository directory.
+ name: a string name.
+ """
+ raise NotImplementedError
+
+ def obtain(self, dest, url):
+ # type: (str, HiddenText) -> None
+ """
+ Install or update in editable mode the package represented by this
+ VersionControl object.
+
+ :param dest: the repository directory in which to install or update.
+ :param url: the repository URL starting with a vcs prefix.
+ """
+ url, rev_options = self.get_url_rev_options(url)
+
+ if not os.path.exists(dest):
+ self.fetch_new(dest, url, rev_options)
+ return
+
+ rev_display = rev_options.to_display()
+ if self.is_repository_directory(dest):
+ existing_url = self.get_remote_url(dest)
+ if self.compare_urls(existing_url, url.secret):
+ logger.debug(
+ '%s in %s exists, and has correct URL (%s)',
+ self.repo_name.title(),
+ display_path(dest),
+ url,
+ )
+ if not self.is_commit_id_equal(dest, rev_options.rev):
+ logger.info(
+ 'Updating %s %s%s',
+ display_path(dest),
+ self.repo_name,
+ rev_display,
+ )
+ self.update(dest, url, rev_options)
+ else:
+ logger.info('Skipping because already up-to-date.')
+ return
+
+ logger.warning(
+ '%s %s in %s exists with URL %s',
+ self.name,
+ self.repo_name,
+ display_path(dest),
+ existing_url,
+ )
+ prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
+ ('s', 'i', 'w', 'b'))
+ else:
+ logger.warning(
+ 'Directory %s already exists, and is not a %s %s.',
+ dest,
+ self.name,
+ self.repo_name,
+ )
+ # https://github.com/python/mypy/issues/1174
+ prompt = ('(i)gnore, (w)ipe, (b)ackup ', # type: ignore
+ ('i', 'w', 'b'))
+
+ logger.warning(
+ 'The plan is to install the %s repository %s',
+ self.name,
+ url,
+ )
+ response = ask_path_exists('What to do? %s' % prompt[0], prompt[1])
+
+ if response == 'a':
+ sys.exit(-1)
+
+ if response == 'w':
+ logger.warning('Deleting %s', display_path(dest))
+ rmtree(dest)
+ self.fetch_new(dest, url, rev_options)
+ return
+
+ if response == 'b':
+ dest_dir = backup_dir(dest)
+ logger.warning(
+ 'Backing up %s to %s', display_path(dest), dest_dir,
+ )
+ shutil.move(dest, dest_dir)
+ self.fetch_new(dest, url, rev_options)
+ return
+
+ # Do nothing if the response is "i".
+ if response == 's':
+ logger.info(
+ 'Switching %s %s to %s%s',
+ self.repo_name,
+ display_path(dest),
+ url,
+ rev_display,
+ )
+ self.switch(dest, url, rev_options)
+
+ def unpack(self, location, url):
+ # type: (str, HiddenText) -> None
+ """
+ Clean up current location and download the url repository
+ (and vcs infos) into location
+
+ :param url: the repository URL starting with a vcs prefix.
+ """
+ if os.path.exists(location):
+ rmtree(location)
+ self.obtain(location, url=url)
+
+ @classmethod
+ def get_remote_url(cls, location):
+ # type: (str) -> str
+ """
+ Return the url used at location
+
+ Raises RemoteNotFoundError if the repository does not have a remote
+ url configured.
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def get_revision(cls, location):
+ # type: (str) -> str
+ """
+ Return the current commit id of the files at the given location.
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def run_command(
+ cls,
+ cmd, # type: Union[List[str], CommandArgs]
+ show_stdout=True, # type: bool
+ cwd=None, # type: Optional[str]
+ on_returncode='raise', # type: str
+ extra_ok_returncodes=None, # type: Optional[Iterable[int]]
+ command_desc=None, # type: Optional[str]
+ extra_environ=None, # type: Optional[Mapping[str, Any]]
+ spinner=None, # type: Optional[SpinnerInterface]
+ log_failed_cmd=True # type: bool
+ ):
+ # type: (...) -> Text
+ """
+ Run a VCS subcommand
+ This is simply a wrapper around call_subprocess that adds the VCS
+ command name, and checks that the VCS is available
+ """
+ cmd = make_command(cls.name, *cmd)
+ try:
+ return call_subprocess(cmd, show_stdout, cwd,
+ on_returncode=on_returncode,
+ extra_ok_returncodes=extra_ok_returncodes,
+ command_desc=command_desc,
+ extra_environ=extra_environ,
+ unset_environ=cls.unset_environ,
+ spinner=spinner,
+ log_failed_cmd=log_failed_cmd)
+ except OSError as e:
+ # errno.ENOENT = no such file or directory
+ # In other words, the VCS executable isn't available
+ if e.errno == errno.ENOENT:
+ raise BadCommand(
+ 'Cannot find command %r - do you have '
+ '%r installed and in your '
+ 'PATH?' % (cls.name, cls.name))
+ else:
+ raise # re-raise exception if a different error occurred
+
+ @classmethod
+ def is_repository_directory(cls, path):
+ # type: (str) -> bool
+ """
+ Return whether a directory path is a repository directory.
+ """
+ logger.debug('Checking in %s for %s (%s)...',
+ path, cls.dirname, cls.name)
+ return os.path.exists(os.path.join(path, cls.dirname))
+
+ @classmethod
+ def controls_location(cls, location):
+ # type: (str) -> bool
+ """
+ Check if a location is controlled by the vcs.
+ It is meant to be overridden to implement smarter detection
+ mechanisms for specific vcs.
+
+ This can do more than is_repository_directory() alone. For example,
+ the Git override checks that Git is actually available.
+ """
+ return cls.is_repository_directory(location)
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_internal/wheel_builder.py b/monEnvTP/lib/python3.8/site-packages/pip/_internal/wheel_builder.py
new file mode 100644
index 0000000000000000000000000000000000000000..7c7820d4f26560dff25801dd5034b355d2823795
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_internal/wheel_builder.py
@@ -0,0 +1,305 @@
+"""Orchestrator for building wheels from InstallRequirements.
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+import logging
+import os.path
+import re
+import shutil
+
+from pip._internal.models.link import Link
+from pip._internal.operations.build.wheel import build_wheel_pep517
+from pip._internal.operations.build.wheel_legacy import build_wheel_legacy
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import ensure_dir, hash_file, is_wheel_installed
+from pip._internal.utils.setuptools_build import make_setuptools_clean_args
+from pip._internal.utils.subprocess import call_subprocess
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.typing import MYPY_CHECK_RUNNING
+from pip._internal.utils.urls import path_to_url
+from pip._internal.vcs import vcs
+
+if MYPY_CHECK_RUNNING:
+ from typing import (
+ Any, Callable, Iterable, List, Optional, Pattern, Tuple,
+ )
+
+ from pip._internal.cache import WheelCache
+ from pip._internal.req.req_install import InstallRequirement
+
+ BinaryAllowedPredicate = Callable[[InstallRequirement], bool]
+ BuildResult = Tuple[List[InstallRequirement], List[InstallRequirement]]
+
+logger = logging.getLogger(__name__)
+
+
+def _contains_egg_info(
+ s, _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)):
+ # type: (str, Pattern[str]) -> bool
+ """Determine whether the string looks like an egg_info.
+
+ :param s: The string to parse. E.g. foo-2.1
+ """
+ return bool(_egg_info_re.search(s))
+
+
+def _should_build(
+ req, # type: InstallRequirement
+ need_wheel, # type: bool
+ check_binary_allowed, # type: BinaryAllowedPredicate
+):
+ # type: (...) -> bool
+ """Return whether an InstallRequirement should be built into a wheel."""
+ if req.constraint:
+ # never build requirements that are merely constraints
+ return False
+ if req.is_wheel:
+ if need_wheel:
+ logger.info(
+ 'Skipping %s, due to already being wheel.', req.name,
+ )
+ return False
+
+ if need_wheel:
+ # i.e. pip wheel, not pip install
+ return True
+
+ # From this point, this concerns the pip install command only
+ # (need_wheel=False).
+
+ if not req.use_pep517 and not is_wheel_installed():
+ # we don't build legacy requirements if wheel is not installed
+ return False
+
+ if req.editable or not req.source_dir:
+ return False
+
+ if not check_binary_allowed(req):
+ logger.info(
+ "Skipping wheel build for %s, due to binaries "
+ "being disabled for it.", req.name,
+ )
+ return False
+
+ return True
+
+
+def should_build_for_wheel_command(
+ req, # type: InstallRequirement
+):
+ # type: (...) -> bool
+ return _should_build(
+ req, need_wheel=True, check_binary_allowed=_always_true
+ )
+
+
+def should_build_for_install_command(
+ req, # type: InstallRequirement
+ check_binary_allowed, # type: BinaryAllowedPredicate
+):
+ # type: (...) -> bool
+ return _should_build(
+ req, need_wheel=False, check_binary_allowed=check_binary_allowed
+ )
+
+
+def _should_cache(
+ req, # type: InstallRequirement
+):
+ # type: (...) -> Optional[bool]
+ """
+ Return whether a built InstallRequirement can be stored in the persistent
+ wheel cache, assuming the wheel cache is available, and _should_build()
+ has determined a wheel needs to be built.
+ """
+ if not should_build_for_install_command(
+ req, check_binary_allowed=_always_true
+ ):
+ # never cache if pip install would not have built
+ # (editable mode, etc)
+ return False
+
+ if req.link and req.link.is_vcs:
+ # VCS checkout. Do not cache
+ # unless it points to an immutable commit hash.
+ assert not req.editable
+ assert req.source_dir
+ vcs_backend = vcs.get_backend_for_scheme(req.link.scheme)
+ assert vcs_backend
+ if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir):
+ return True
+ return False
+
+ base, ext = req.link.splitext()
+ if _contains_egg_info(base):
+ return True
+
+ # Otherwise, do not cache.
+ return False
+
+
+def _get_cache_dir(
+ req, # type: InstallRequirement
+ wheel_cache, # type: WheelCache
+):
+ # type: (...) -> str
+ """Return the persistent or temporary cache directory where the built
+ wheel need to be stored.
+ """
+ cache_available = bool(wheel_cache.cache_dir)
+ if cache_available and _should_cache(req):
+ cache_dir = wheel_cache.get_path_for_link(req.link)
+ else:
+ cache_dir = wheel_cache.get_ephem_path_for_link(req.link)
+ return cache_dir
+
+
+def _always_true(_):
+ # type: (Any) -> bool
+ return True
+
+
+def _build_one(
+ req, # type: InstallRequirement
+ output_dir, # type: str
+ build_options, # type: List[str]
+ global_options, # type: List[str]
+):
+ # type: (...) -> Optional[str]
+ """Build one wheel.
+
+ :return: The filename of the built wheel, or None if the build failed.
+ """
+ try:
+ ensure_dir(output_dir)
+ except OSError as e:
+ logger.warning(
+ "Building wheel for %s failed: %s",
+ req.name, e,
+ )
+ return None
+
+ # Install build deps into temporary directory (PEP 518)
+ with req.build_env:
+ return _build_one_inside_env(
+ req, output_dir, build_options, global_options
+ )
+
+
+def _build_one_inside_env(
+ req, # type: InstallRequirement
+ output_dir, # type: str
+ build_options, # type: List[str]
+ global_options, # type: List[str]
+):
+ # type: (...) -> Optional[str]
+ with TempDirectory(kind="wheel") as temp_dir:
+ if req.use_pep517:
+ wheel_path = build_wheel_pep517(
+ name=req.name,
+ backend=req.pep517_backend,
+ metadata_directory=req.metadata_directory,
+ build_options=build_options,
+ tempd=temp_dir.path,
+ )
+ else:
+ wheel_path = build_wheel_legacy(
+ name=req.name,
+ setup_py_path=req.setup_py_path,
+ source_dir=req.unpacked_source_directory,
+ global_options=global_options,
+ build_options=build_options,
+ tempd=temp_dir.path,
+ )
+
+ if wheel_path is not None:
+ wheel_name = os.path.basename(wheel_path)
+ dest_path = os.path.join(output_dir, wheel_name)
+ try:
+ wheel_hash, length = hash_file(wheel_path)
+ shutil.move(wheel_path, dest_path)
+ logger.info('Created wheel for %s: '
+ 'filename=%s size=%d sha256=%s',
+ req.name, wheel_name, length,
+ wheel_hash.hexdigest())
+ logger.info('Stored in directory: %s', output_dir)
+ return dest_path
+ except Exception as e:
+ logger.warning(
+ "Building wheel for %s failed: %s",
+ req.name, e,
+ )
+ # Ignore return, we can't do anything else useful.
+ if not req.use_pep517:
+ _clean_one_legacy(req, global_options)
+ return None
+
+
+def _clean_one_legacy(req, global_options):
+ # type: (InstallRequirement, List[str]) -> bool
+ clean_args = make_setuptools_clean_args(
+ req.setup_py_path,
+ global_options=global_options,
+ )
+
+ logger.info('Running setup.py clean for %s', req.name)
+ try:
+ call_subprocess(clean_args, cwd=req.source_dir)
+ return True
+ except Exception:
+ logger.error('Failed cleaning build dir for %s', req.name)
+ return False
+
+
+def build(
+ requirements, # type: Iterable[InstallRequirement]
+ wheel_cache, # type: WheelCache
+ build_options, # type: List[str]
+ global_options, # type: List[str]
+):
+ # type: (...) -> BuildResult
+ """Build wheels.
+
+ :return: The list of InstallRequirement that succeeded to build and
+ the list of InstallRequirement that failed to build.
+ """
+ if not requirements:
+ return [], []
+
+ # Build the wheels.
+ logger.info(
+ 'Building wheels for collected packages: %s',
+ ', '.join(req.name for req in requirements),
+ )
+
+ with indent_log():
+ build_successes, build_failures = [], []
+ for req in requirements:
+ cache_dir = _get_cache_dir(req, wheel_cache)
+ wheel_file = _build_one(
+ req, cache_dir, build_options, global_options
+ )
+ if wheel_file:
+ # Update the link for this.
+ req.link = Link(path_to_url(wheel_file))
+ req.local_file_path = req.link.file_path
+ assert req.link.is_wheel
+ build_successes.append(req)
+ else:
+ build_failures.append(req)
+
+ # notify success/failure
+ if build_successes:
+ logger.info(
+ 'Successfully built %s',
+ ' '.join([req.name for req in build_successes]),
+ )
+ if build_failures:
+ logger.info(
+ 'Failed to build %s',
+ ' '.join([req.name for req in build_failures]),
+ )
+ # Return a list of requirements that failed to build
+ return build_successes, build_failures
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_vendor/__init__.py b/monEnvTP/lib/python3.8/site-packages/pip/_vendor/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e02eaef6d8ab18106bf6600e87c269b41d923fa2
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pip/_vendor/__init__.py
@@ -0,0 +1,119 @@
+"""
+pip._vendor is for vendoring dependencies of pip to prevent needing pip to
+depend on something external.
+
+Files inside of pip._vendor should be considered immutable and should only be
+updated to versions from upstream.
+"""
+from __future__ import absolute_import
+
+import glob
+import os.path
+import sys
+
+# Downstream redistributors which have debundled our dependencies should also
+# patch this value to be true. This will trigger the additional patching
+# to cause things like "six" to be available as pip.
+DEBUNDLED = True
+
+# By default, look in this directory for a bunch of .whl files which we will
+# add to the beginning of sys.path before attempting to import anything. This
+# is done to support downstream re-distributors like Debian and Fedora who
+# wish to create their own Wheels for our dependencies to aid in debundling.
+prefix = getattr(sys, "base_prefix", sys.prefix)
+if prefix.startswith('/usr/lib/pypy'):
+ prefix = '/usr'
+WHEEL_DIR = os.path.abspath(os.path.join(prefix, 'share', 'python-wheels'))
+
+
+# Define a small helper function to alias our vendored modules to the real ones
+# if the vendored ones do not exist. This idea of this was taken from
+# https://github.com/kennethreitz/requests/pull/2567.
+def vendored(modulename):
+ vendored_name = "{0}.{1}".format(__name__, modulename)
+
+ try:
+ __import__(modulename, globals(), locals(), level=0)
+ except ImportError:
+ # We can just silently allow import failures to pass here. If we
+ # got to this point it means that ``import pip._vendor.whatever``
+ # failed and so did ``import whatever``. Since we're importing this
+ # upfront in an attempt to alias imports, not erroring here will
+ # just mean we get a regular import error whenever pip *actually*
+ # tries to import one of these modules to use it, which actually
+ # gives us a better error message than we would have otherwise
+ # gotten.
+ pass
+ else:
+ sys.modules[vendored_name] = sys.modules[modulename]
+ base, head = vendored_name.rsplit(".", 1)
+ setattr(sys.modules[base], head, sys.modules[modulename])
+
+
+# If we're operating in a debundled setup, then we want to go ahead and trigger
+# the aliasing of our vendored libraries as well as looking for wheels to add
+# to our sys.path. This will cause all of this code to be a no-op typically
+# however downstream redistributors can enable it in a consistent way across
+# all platforms.
+if DEBUNDLED:
+ # Actually look inside of WHEEL_DIR to find .whl files and add them to the
+ # front of our sys.path.
+ sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path
+
+ # Actually alias all of our vendored dependencies.
+ vendored("appdirs")
+ vendored("cachecontrol")
+ vendored("colorama")
+ vendored("contextlib2")
+ vendored("distlib")
+ vendored("distro")
+ vendored("html5lib")
+ vendored("six")
+ vendored("six.moves")
+ vendored("six.moves.urllib")
+ vendored("six.moves.urllib.parse")
+ vendored("packaging")
+ vendored("packaging.version")
+ vendored("packaging.specifiers")
+ vendored("pep517")
+ vendored("pkg_resources")
+ vendored("progress")
+ vendored("retrying")
+ vendored("requests")
+ vendored("requests.exceptions")
+ vendored("requests.packages")
+ vendored("requests.packages.urllib3")
+ vendored("requests.packages.urllib3._collections")
+ vendored("requests.packages.urllib3.connection")
+ vendored("requests.packages.urllib3.connectionpool")
+ vendored("requests.packages.urllib3.contrib")
+ vendored("requests.packages.urllib3.contrib.ntlmpool")
+ vendored("requests.packages.urllib3.contrib.pyopenssl")
+ vendored("requests.packages.urllib3.exceptions")
+ vendored("requests.packages.urllib3.fields")
+ vendored("requests.packages.urllib3.filepost")
+ vendored("requests.packages.urllib3.packages")
+ try:
+ vendored("requests.packages.urllib3.packages.ordered_dict")
+ vendored("requests.packages.urllib3.packages.six")
+ except ImportError:
+ # Debian already unbundles these from requests.
+ pass
+ vendored("requests.packages.urllib3.packages.ssl_match_hostname")
+ vendored("requests.packages.urllib3.packages.ssl_match_hostname."
+ "_implementation")
+ vendored("requests.packages.urllib3.poolmanager")
+ vendored("requests.packages.urllib3.request")
+ vendored("requests.packages.urllib3.response")
+ vendored("requests.packages.urllib3.util")
+ vendored("requests.packages.urllib3.util.connection")
+ vendored("requests.packages.urllib3.util.request")
+ vendored("requests.packages.urllib3.util.response")
+ vendored("requests.packages.urllib3.util.retry")
+ vendored("requests.packages.urllib3.util.ssl_")
+ vendored("requests.packages.urllib3.util.timeout")
+ vendored("requests.packages.urllib3.util.url")
+ vendored("toml")
+ vendored("toml.encoder")
+ vendored("toml.decoder")
+ vendored("urllib3")
diff --git a/monEnvTP/lib/python3.8/site-packages/pip/_vendor/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pip/_vendor/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..64dcc34e2a7cce06e258a409ca2d76f30782084d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pip/_vendor/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/AUTHORS.txt b/monEnvTP/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/AUTHORS.txt
new file mode 100644
index 0000000000000000000000000000000000000000..72c87d7d38ae7bf859717c333a5ee8230f6ce624
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/AUTHORS.txt
@@ -0,0 +1,562 @@
+A_Rog <adam.thomas.rogerson@gmail.com>
+Aakanksha Agrawal <11389424+rasponic@users.noreply.github.com>
+Abhinav Sagar <40603139+abhinavsagar@users.noreply.github.com>
+ABHYUDAY PRATAP SINGH <abhyudaypratap@outlook.com>
+abs51295 <aagams68@gmail.com>
+AceGentile <ventogrigio83@gmail.com>
+Adam Chainz <adam@adamj.eu>
+Adam Tse <adam.tse@me.com>
+Adam Tse <atse@users.noreply.github.com>
+Adam Wentz <awentz@theonion.com>
+admin <admin@admins-MacBook-Pro.local>
+Adrien Morison <adrien.morison@gmail.com>
+ahayrapetyan <ahayrapetya2@bloomberg.net>
+Ahilya <ahilya16009@iiitd.ac.in>
+AinsworthK <yat626@yahoo.com.hk>
+Akash Srivastava <akashsrivastava4927@gmail.com>
+Alan Yee <alyee@ucsd.edu>
+Albert Tugushev <albert@tugushev.ru>
+Albert-Guan <albert.guan94@gmail.com>
+albertg <albert.guan94@gmail.com>
+Aleks Bunin <github@compuix.com>
+Alethea Flowers <magicalgirl@google.com>
+Alex Gaynor <alex.gaynor@gmail.com>
+Alex Grönholm <alex.gronholm@nextday.fi>
+Alex Loosley <a.loosley@reply.de>
+Alex Morega <alex@grep.ro>
+Alex Stachowiak <alexander@computer.org>
+Alexander Shtyrov <rawzausho@gmail.com>
+Alexandre Conrad <alexandre.conrad@gmail.com>
+Alexey Popravka <a.popravka@smartweb.com.ua>
+Alexey Popravka <alexey.popravka@horsedevel.com>
+Alli <alzeih@users.noreply.github.com>
+Ami Fischman <ami@fischman.org>
+Ananya Maiti <ananyoevo@gmail.com>
+Anatoly Techtonik <techtonik@gmail.com>
+Anders Kaseorg <andersk@mit.edu>
+Andreas Lutro <anlutro@gmail.com>
+Andrei Geacar <andrei.geacar@gmail.com>
+Andrew Gaul <andrew@gaul.org>
+Andrey Bulgakov <mail@andreiko.ru>
+Andrés Delfino <34587441+andresdelfino@users.noreply.github.com>
+Andrés Delfino <adelfino@gmail.com>
+Andy Freeland <andy.freeland@redjack.com>
+Andy Freeland <andy@andyfreeland.net>
+Andy Kluger <AndydeCleyre@users.noreply.github.com>
+Ani Hayrapetyan <ahayrapetya2@bloomberg.net>
+Aniruddha Basak <codewithaniruddha@gmail.com>
+Anish Tambe <anish.tambe@yahoo.in>
+Anrs Hu <anrs@douban.com>
+Anthony Sottile <asottile@umich.edu>
+Antoine Musso <hashar@free.fr>
+Anton Ovchinnikov <revolver112@gmail.com>
+Anton Patrushev <apatrushev@gmail.com>
+Antonio Alvarado Hernandez <tnotstar@gmail.com>
+Antony Lee <anntzer.lee@gmail.com>
+Antti Kaihola <akaihol+github@ambitone.com>
+Anubhav Patel <anubhavp28@gmail.com>
+Anuj Godase <godaseanuj@gmail.com>
+AQNOUCH Mohammed <aqnouch.mohammed@gmail.com>
+AraHaan <seandhunt_7@yahoo.com>
+Arindam Choudhury <arindam@live.com>
+Armin Ronacher <armin.ronacher@active-4.com>
+Artem <duketemon@users.noreply.github.com>
+Ashley Manton <ajd.manton@googlemail.com>
+Ashwin Ramaswami <aramaswamis@gmail.com>
+atse <atse@users.noreply.github.com>
+Atsushi Odagiri <aodagx@gmail.com>
+Avner Cohen <israbirding@gmail.com>
+Baptiste Mispelon <bmispelon@gmail.com>
+Barney Gale <barney.gale@gmail.com>
+barneygale <barney.gale@gmail.com>
+Bartek Ogryczak <b.ogryczak@gmail.com>
+Bastian Venthur <mail@venthur.de>
+Ben Darnell <ben@bendarnell.com>
+Ben Hoyt <benhoyt@gmail.com>
+Ben Rosser <rosser.bjr@gmail.com>
+Bence Nagy <bence@underyx.me>
+Benjamin Peterson <benjamin@python.org>
+Benjamin VanEvery <ben@simondata.com>
+Benoit Pierre <benoit.pierre@gmail.com>
+Berker Peksag <berker.peksag@gmail.com>
+Bernardo B. Marques <bernardo.fire@gmail.com>
+Bernhard M. Wiedemann <bwiedemann@suse.de>
+Bertil Hatt <bertil.hatt@farfetch.com>
+Bogdan Opanchuk <bogdan@opanchuk.net>
+BorisZZZ <BorisZZZ@users.noreply.github.com>
+Brad Erickson <eosrei@gmail.com>
+Bradley Ayers <bradley.ayers@gmail.com>
+Brandon L. Reiss <brandon@damyata.co>
+Brandt Bucher <brandtbucher@gmail.com>
+Brett Randall <javabrett@gmail.com>
+Brian Cristante <33549821+brcrista@users.noreply.github.com>
+Brian Cristante <brcrista@microsoft.com>
+Brian Rosner <brosner@gmail.com>
+BrownTruck <BrownTruck@users.noreply.github.com>
+Bruno Oliveira <nicoddemus@gmail.com>
+Bruno Renié <brutasse@gmail.com>
+Bstrdsmkr <bstrdsmkr@gmail.com>
+Buck Golemon <buck@yelp.com>
+burrows <burrows@preveil.com>
+Bussonnier Matthias <bussonniermatthias@gmail.com>
+c22 <c22@users.noreply.github.com>
+Caleb Martinez <accounts@calebmartinez.com>
+Calvin Smith <eukaryote@users.noreply.github.com>
+Carl Meyer <carl@oddbird.net>
+Carlos Liam <carlos@aarzee.me>
+Carol Willing <carolcode@willingconsulting.com>
+Carter Thayer <carterwthayer@gmail.com>
+Cass <cass.petrus@gmail.com>
+Chandrasekhar Atina <chandu.atina@gmail.com>
+Chih-Hsuan Yen <yan12125@gmail.com>
+Chih-Hsuan Yen <yen@chyen.cc>
+Chris Brinker <chris.brinker@gmail.com>
+Chris Hunt <chrahunt@gmail.com>
+Chris Jerdonek <chris.jerdonek@gmail.com>
+Chris McDonough <chrism@plope.com>
+Chris Wolfe <chriswwolfe@gmail.com>
+Christian Heimes <christian@python.org>
+Christian Oudard <christian.oudard@gmail.com>
+Christopher Hunt <chrahunt@gmail.com>
+Christopher Snyder <cnsnyder@users.noreply.github.com>
+Clark Boylan <clark.boylan@gmail.com>
+Clay McClure <clay@daemons.net>
+Cody <Purring@users.noreply.github.com>
+Cody Soyland <codysoyland@gmail.com>
+Colin Watson <cjwatson@debian.org>
+Connor Osborn <cdosborn@email.arizona.edu>
+Cooper Lees <me@cooperlees.com>
+Cooper Ry Lees <me@cooperlees.com>
+Cory Benfield <lukasaoz@gmail.com>
+Cory Wright <corywright@gmail.com>
+Craig Kerstiens <craig.kerstiens@gmail.com>
+Cristian Sorinel <cristian.sorinel@gmail.com>
+Curtis Doty <Curtis@GreenKey.net>
+cytolentino <ctolentino8@bloomberg.net>
+Damian Quiroga <qdamian@gmail.com>
+Dan Black <dyspop@gmail.com>
+Dan Savilonis <djs@n-cube.org>
+Dan Sully <daniel-github@electricrain.com>
+daniel <mcdonaldd@unimelb.edu.au>
+Daniel Collins <accounts@dac.io>
+Daniel Hahler <git@thequod.de>
+Daniel Holth <dholth@fastmail.fm>
+Daniel Jost <torpedojost@gmail.com>
+Daniel Shaulov <daniel.shaulov@gmail.com>
+Daniele Esposti <expobrain@users.noreply.github.com>
+Daniele Procida <daniele@vurt.org>
+Danny Hermes <daniel.j.hermes@gmail.com>
+Dav Clark <davclark@gmail.com>
+Dave Abrahams <dave@boostpro.com>
+Dave Jones <dave@waveform.org.uk>
+David Aguilar <davvid@gmail.com>
+David Black <db@d1b.org>
+David Bordeynik <david.bordeynik@gmail.com>
+David Bordeynik <david@zebra-med.com>
+David Caro <david@dcaro.es>
+David Evans <d@drhevans.com>
+David Linke <dr.david.linke@gmail.com>
+David Pursehouse <david.pursehouse@gmail.com>
+David Tucker <david@tucker.name>
+David Wales <daviewales@gmail.com>
+Davidovich <david.genest@gmail.com>
+derwolfe <chriswwolfe@gmail.com>
+Desetude <harry@desetude.com>
+Diego Caraballo <diegocaraballo84@gmail.com>
+DiegoCaraballo <diegocaraballo84@gmail.com>
+Dmitry Gladkov <dmitry.gladkov@gmail.com>
+Domen Kožar <domen@dev.si>
+Donald Stufft <donald@stufft.io>
+Dongweiming <dongweiming@admaster.com.cn>
+Douglas Thor <dougthor42@users.noreply.github.com>
+DrFeathers <WilliamGeorgeBurgess@gmail.com>
+Dustin Ingram <di@di.codes>
+Dwayne Bailey <dwayne@translate.org.za>
+Ed Morley <501702+edmorley@users.noreply.github.com>
+Ed Morley <emorley@mozilla.com>
+Eitan Adler <lists@eitanadler.com>
+ekristina <panacejja@gmail.com>
+elainechan <elaine.chan@outlook.com>
+Eli Schwartz <eschwartz93@gmail.com>
+Eli Schwartz <eschwartz@archlinux.org>
+Emil Burzo <contact@emilburzo.com>
+Emil Styrke <emil.styrke@gmail.com>
+Endoh Takanao <djmchl@gmail.com>
+enoch <lanxenet@gmail.com>
+Erdinc Mutlu <erdinc_mutlu@yahoo.com>
+Eric Gillingham <Gillingham@bikezen.net>
+Eric Hanchrow <eric.hanchrow@gmail.com>
+Eric Hopper <hopper@omnifarious.org>
+Erik M. Bray <embray@stsci.edu>
+Erik Rose <erik@mozilla.com>
+Ernest W Durbin III <ewdurbin@gmail.com>
+Ernest W. Durbin III <ewdurbin@gmail.com>
+Erwin Janssen <erwinjanssen@outlook.com>
+Eugene Vereshchagin <evvers@gmail.com>
+everdimension <everdimension@gmail.com>
+Felix Yan <felixonmars@archlinux.org>
+fiber-space <fiber-space@users.noreply.github.com>
+Filip Kokosiński <filip.kokosinski@gmail.com>
+Florian Briand <ownerfrance+github@hotmail.com>
+Florian Rathgeber <florian.rathgeber@gmail.com>
+Francesco <f.guerrieri@gmail.com>
+Francesco Montesano <franz.bergesund@gmail.com>
+Frost Ming <mianghong@gmail.com>
+Gabriel Curio <g.curio@gmail.com>
+Gabriel de Perthuis <g2p.code@gmail.com>
+Garry Polley <garrympolley@gmail.com>
+gdanielson <graeme.danielson@gmail.com>
+Geoffrey Lehée <geoffrey@lehee.name>
+Geoffrey Sneddon <me@gsnedders.com>
+George Song <george@55minutes.com>
+Georgi Valkov <georgi.t.valkov@gmail.com>
+Giftlin Rajaiah <giftlin.rgn@gmail.com>
+gizmoguy1 <gizmoguy1@gmail.com>
+gkdoc <40815324+gkdoc@users.noreply.github.com>
+Gopinath M <31352222+mgopi1990@users.noreply.github.com>
+GOTO Hayato <3532528+gh640@users.noreply.github.com>
+gpiks <gaurav.pikale@gmail.com>
+Guilherme Espada <porcariadagata@gmail.com>
+Guy Rozendorn <guy@rzn.co.il>
+gzpan123 <gzpan123@gmail.com>
+Hanjun Kim <hallazzang@gmail.com>
+Hari Charan <hcharan997@gmail.com>
+Harsh Vardhan <harsh59v@gmail.com>
+Herbert Pfennig <herbert@albinen.com>
+Hsiaoming Yang <lepture@me.com>
+Hugo <hugovk@users.noreply.github.com>
+Hugo Lopes Tavares <hltbra@gmail.com>
+Hugo van Kemenade <hugovk@users.noreply.github.com>
+hugovk <hugovk@users.noreply.github.com>
+Hynek Schlawack <hs@ox.cx>
+Ian Bicking <ianb@colorstudy.com>
+Ian Cordasco <graffatcolmingov@gmail.com>
+Ian Lee <IanLee1521@gmail.com>
+Ian Stapleton Cordasco <graffatcolmingov@gmail.com>
+Ian Wienand <ian@wienand.org>
+Ian Wienand <iwienand@redhat.com>
+Igor Kuzmitshov <kuzmiigo@gmail.com>
+Igor Sobreira <igor@igorsobreira.com>
+Ilya Baryshev <baryshev@gmail.com>
+INADA Naoki <songofacandy@gmail.com>
+Ionel Cristian Mărieș <contact@ionelmc.ro>
+Ionel Maries Cristian <ionel.mc@gmail.com>
+Ivan Pozdeev <vano@mail.mipt.ru>
+Jacob Kim <me@thejacobkim.com>
+jakirkham <jakirkham@gmail.com>
+Jakub Stasiak <kuba.stasiak@gmail.com>
+Jakub Vysoky <jakub@borka.cz>
+Jakub Wilk <jwilk@jwilk.net>
+James Cleveland <jamescleveland@gmail.com>
+James Cleveland <radiosilence@users.noreply.github.com>
+James Firth <hello@james-firth.com>
+James Polley <jp@jamezpolley.com>
+Jan Pokorný <jpokorny@redhat.com>
+Jannis Leidel <jannis@leidel.info>
+jarondl <me@jarondl.net>
+Jason R. Coombs <jaraco@jaraco.com>
+Jay Graves <jay@skabber.com>
+Jean-Christophe Fillion-Robin <jchris.fillionr@kitware.com>
+Jeff Barber <jbarber@computer.org>
+Jeff Dairiki <dairiki@dairiki.org>
+Jelmer Vernooij <jelmer@jelmer.uk>
+jenix21 <devfrog@gmail.com>
+Jeremy Stanley <fungi@yuggoth.org>
+Jeremy Zafran <jzafran@users.noreply.github.com>
+Jiashuo Li <jiasli@microsoft.com>
+Jim Garrison <jim@garrison.cc>
+Jivan Amara <Development@JivanAmara.net>
+John Paton <j.paton@catawiki.nl>
+John-Scott Atlakson <john.scott.atlakson@gmail.com>
+johnthagen <johnthagen@gmail.com>
+johnthagen <johnthagen@users.noreply.github.com>
+Jon Banafato <jon@jonafato.com>
+Jon Dufresne <jon.dufresne@gmail.com>
+Jon Parise <jon@indelible.org>
+Jonas Nockert <jonasnockert@gmail.com>
+Jonathan Herbert <foohyfooh@gmail.com>
+Joost Molenaar <j.j.molenaar@gmail.com>
+Jorge Niedbalski <niedbalski@gmail.com>
+Joseph Long <jdl@fastmail.fm>
+Josh Bronson <jabronson@gmail.com>
+Josh Hansen <josh@skwash.net>
+Josh Schneier <josh.schneier@gmail.com>
+Juanjo Bazán <jjbazan@gmail.com>
+Julian Berman <Julian@GrayVines.com>
+Julian Gethmann <julian.gethmann@kit.edu>
+Julien Demoor <julien@jdemoor.com>
+jwg4 <jack.grahl@yahoo.co.uk>
+Jyrki Pulliainen <jyrki@spotify.com>
+Kai Chen <kaichen120@gmail.com>
+Kamal Bin Mustafa <kamal@smach.net>
+kaustav haldar <hi@kaustav.me>
+keanemind <keanemind@gmail.com>
+Keith Maxwell <keith.maxwell@gmail.com>
+Kelsey Hightower <kelsey.hightower@gmail.com>
+Kenneth Belitzky <kenny@belitzky.com>
+Kenneth Reitz <me@kennethreitz.com>
+Kenneth Reitz <me@kennethreitz.org>
+Kevin Burke <kev@inburke.com>
+Kevin Carter <kevin.carter@rackspace.com>
+Kevin Frommelt <kevin.frommelt@webfilings.com>
+Kevin R Patterson <kevin.r.patterson@intel.com>
+Kexuan Sun <me@kianasun.com>
+Kit Randel <kit@nocturne.net.nz>
+kpinc <kop@meme.com>
+Krishna Oza <krishoza15sep@gmail.com>
+Kumar McMillan <kumar.mcmillan@gmail.com>
+Kyle Persohn <kyle.persohn@gmail.com>
+lakshmanaram <lakshmanaram.n@gmail.com>
+Laszlo Kiss-Kollar <kiss.kollar.laszlo@gmail.com>
+Laurent Bristiel <laurent@bristiel.com>
+Laurie Opperman <laurie@sitesee.com.au>
+Leon Sasson <leonsassonha@gmail.com>
+Lev Givon <lev@columbia.edu>
+Lincoln de Sousa <lincoln@comum.org>
+Lipis <lipiridis@gmail.com>
+Loren Carvalho <lcarvalho@linkedin.com>
+Lucas Cimon <lucas.cimon@gmail.com>
+Ludovic Gasc <gmludo@gmail.com>
+Luke Macken <lmacken@redhat.com>
+Luo Jiebin <luo.jiebin@qq.com>
+luojiebin <luojiebin@users.noreply.github.com>
+luz.paz <luzpaz@users.noreply.github.com>
+László Kiss Kollár <lkisskollar@bloomberg.net>
+László Kiss Kollár <lkollar@users.noreply.github.com>
+Marc Abramowitz <marc@marc-abramowitz.com>
+Marc Tamlyn <marc.tamlyn@gmail.com>
+Marcus Smith <qwcode@gmail.com>
+Mariatta <Mariatta@users.noreply.github.com>
+Mark Kohler <mark.kohler@proteinsimple.com>
+Mark Williams <markrwilliams@gmail.com>
+Mark Williams <mrw@enotuniq.org>
+Markus Hametner <fin+github@xbhd.org>
+Masaki <mk5986@nyu.edu>
+Masklinn <bitbucket.org@masklinn.net>
+Matej Stuchlik <mstuchli@redhat.com>
+Mathew Jennings <mjennings@foursquare.com>
+Mathieu Bridon <bochecha@daitauha.fr>
+Matt Good <matt@matt-good.net>
+Matt Maker <trip@monstro.us>
+Matt Robenolt <matt@ydekproductions.com>
+matthew <matthew@trumbell.net>
+Matthew Einhorn <moiein2000@gmail.com>
+Matthew Gilliard <matthew.gilliard@gmail.com>
+Matthew Iversen <teh.ivo@gmail.com>
+Matthew Trumbell <matthew@thirdstonepartners.com>
+Matthew Willson <matthew@swiftkey.com>
+Matthias Bussonnier <bussonniermatthias@gmail.com>
+mattip <matti.picus@gmail.com>
+Maxim Kurnikov <maxim.kurnikov@gmail.com>
+Maxime Rouyrre <rouyrre+git@gmail.com>
+mayeut <mayeut@users.noreply.github.com>
+mbaluna <44498973+mbaluna@users.noreply.github.com>
+mdebi <17590103+mdebi@users.noreply.github.com>
+memoselyk <memoselyk@gmail.com>
+Michael <michael-k@users.noreply.github.com>
+Michael Aquilina <michaelaquilina@gmail.com>
+Michael E. Karpeles <michael.karpeles@gmail.com>
+Michael Klich <michal@michalklich.com>
+Michael Williamson <mike@zwobble.org>
+michaelpacer <michaelpacer@gmail.com>
+Mickaël Schoentgen <mschoentgen@nuxeo.com>
+Miguel Araujo Perez <miguel.araujo.perez@gmail.com>
+Mihir Singh <git.service@mihirsingh.com>
+Mike <mikeh@blur.com>
+Mike Hendricks <mikeh@blur.com>
+Min RK <benjaminrk@gmail.com>
+MinRK <benjaminrk@gmail.com>
+Miro Hrončok <miro@hroncok.cz>
+Monica Baluna <mbaluna@bloomberg.net>
+montefra <franz.bergesund@gmail.com>
+Monty Taylor <mordred@inaugust.com>
+Nate Coraor <nate@bx.psu.edu>
+Nathaniel J. Smith <njs@pobox.com>
+Nehal J Wani <nehaljw.kkd1@gmail.com>
+Neil Botelho <neil.botelho321@gmail.com>
+Nick Coghlan <ncoghlan@gmail.com>
+Nick Stenning <nick@whiteink.com>
+Nick Timkovich <prometheus235@gmail.com>
+Nicolas Bock <nicolasbock@gmail.com>
+Nikhil Benesch <nikhil.benesch@gmail.com>
+Nitesh Sharma <nbsharma@outlook.com>
+Nowell Strite <nowell@strite.org>
+NtaleGrey <Shadikntale@gmail.com>
+nvdv <modestdev@gmail.com>
+Ofekmeister <ofekmeister@gmail.com>
+ofrinevo <ofrine@gmail.com>
+Oliver Jeeves <oliver.jeeves@ocado.com>
+Oliver Tonnhofer <olt@bogosoft.com>
+Olivier Girardot <ssaboum@gmail.com>
+Olivier Grisel <olivier.grisel@ensta.org>
+Ollie Rutherfurd <orutherfurd@gmail.com>
+OMOTO Kenji <k-omoto@m3.com>
+Omry Yadan <omry@fb.com>
+Oren Held <orenhe@il.ibm.com>
+Oscar Benjamin <oscar.j.benjamin@gmail.com>
+Oz N Tiram <oz.tiram@gmail.com>
+Pachwenko <32424503+Pachwenko@users.noreply.github.com>
+Patrick Dubroy <pdubroy@gmail.com>
+Patrick Jenkins <patrick@socialgrowthtechnologies.com>
+Patrick Lawson <pl@foursquare.com>
+patricktokeeffe <patricktokeeffe@users.noreply.github.com>
+Patrik Kopkan <pkopkan@redhat.com>
+Paul Kehrer <paul.l.kehrer@gmail.com>
+Paul Moore <p.f.moore@gmail.com>
+Paul Nasrat <pnasrat@gmail.com>
+Paul Oswald <pauloswald@gmail.com>
+Paul van der Linden <mail@paultjuh.org>
+Paulus Schoutsen <paulus@paulusschoutsen.nl>
+Pavithra Eswaramoorthy <33131404+QueenCoffee@users.noreply.github.com>
+Pawel Jasinski <pawel.jasinski@gmail.com>
+Pekka Klärck <peke@iki.fi>
+Peter Lisák <peter.lisak@showmax.com>
+Peter Waller <peter.waller@gmail.com>
+petr-tik <petr-tik@users.noreply.github.com>
+Phaneendra Chiruvella <hi@pcx.io>
+Phil Freo <phil@philfreo.com>
+Phil Pennock <phil@pennock-tech.com>
+Phil Whelan <phil123@gmail.com>
+Philip Jägenstedt <philip@foolip.org>
+Philip Molloy <pamolloy@users.noreply.github.com>
+Philippe Ombredanne <pombredanne@gmail.com>
+Pi Delport <pjdelport@gmail.com>
+Pierre-Yves Rofes <github@rofes.fr>
+pip <pypa-dev@googlegroups.com>
+Prabakaran Kumaresshan <k_prabakaran+github@hotmail.com>
+Prabhjyotsing Surjit Singh Sodhi <psinghsodhi@bloomberg.net>
+Prabhu Marappan <prabhum.794@gmail.com>
+Pradyun Gedam <pradyunsg@gmail.com>
+Pratik Mallya <mallya@us.ibm.com>
+Preet Thakkar <preet.thakkar@students.iiit.ac.in>
+Preston Holmes <preston@ptone.com>
+Przemek Wrzos <hetmankp@none>
+Pulkit Goyal <7895pulkit@gmail.com>
+Qiangning Hong <hongqn@gmail.com>
+Quentin Pradet <quentin.pradet@gmail.com>
+R. David Murray <rdmurray@bitdance.com>
+Rafael Caricio <rafael.jacinto@gmail.com>
+Ralf Schmitt <ralf@systemexit.de>
+Razzi Abuissa <razzi53@gmail.com>
+rdb <rdb@users.noreply.github.com>
+Remi Rampin <r@remirampin.com>
+Remi Rampin <remirampin@gmail.com>
+Rene Dudfield <renesd@gmail.com>
+Riccardo Magliocchetti <riccardo.magliocchetti@gmail.com>
+Richard Jones <r1chardj0n3s@gmail.com>
+RobberPhex <robberphex@gmail.com>
+Robert Collins <rbtcollins@hp.com>
+Robert McGibbon <rmcgibbo@gmail.com>
+Robert T. McGibbon <rmcgibbo@gmail.com>
+robin elisha robinson <elisha.rob@gmail.com>
+Roey Berman <roey.berman@gmail.com>
+Rohan Jain <crodjer@gmail.com>
+Rohan Jain <crodjer@users.noreply.github.com>
+Rohan Jain <mail@rohanjain.in>
+Roman Bogorodskiy <roman.bogorodskiy@ericsson.com>
+Romuald Brunet <romuald@chivil.com>
+Ronny Pfannschmidt <Ronny.Pfannschmidt@gmx.de>
+Rory McCann <rory@technomancy.org>
+Ross Brattain <ross.b.brattain@intel.com>
+Roy Wellington Ⅳ <cactus_hugged@yahoo.com>
+Roy Wellington Ⅳ <roy@mybasis.com>
+Ryan Wooden <rygwdn@gmail.com>
+ryneeverett <ryneeverett@gmail.com>
+Sachi King <nakato@nakato.io>
+Salvatore Rinchiera <salvatore@rinchiera.com>
+Savio Jomton <sajo240519@gmail.com>
+schlamar <marc.schlaich@gmail.com>
+Scott Kitterman <sklist@kitterman.com>
+Sean <me@sean.taipei>
+seanj <seanj@xyke.com>
+Sebastian Jordan <sebastian.jordan.mail@googlemail.com>
+Sebastian Schaetz <sschaetz@butterflynetinc.com>
+Segev Finer <segev208@gmail.com>
+SeongSoo Cho <ppiyakk2@printf.kr>
+Sergey Vasilyev <nolar@nolar.info>
+Seth Woodworth <seth@sethish.com>
+Shlomi Fish <shlomif@shlomifish.org>
+Shovan Maity <shovan.maity@mayadata.io>
+Simeon Visser <svisser@users.noreply.github.com>
+Simon Cross <hodgestar@gmail.com>
+Simon Pichugin <simon.pichugin@gmail.com>
+sinoroc <sinoroc.code+git@gmail.com>
+Sorin Sbarnea <sorin.sbarnea@gmail.com>
+Stavros Korokithakis <stavros@korokithakis.net>
+Stefan Scherfke <stefan@sofa-rockers.org>
+Stephan Erb <github@stephanerb.eu>
+stepshal <nessento@openmailbox.org>
+Steve (Gadget) Barnes <gadgetsteve@hotmail.com>
+Steve Barnes <gadgetsteve@hotmail.com>
+Steve Dower <steve.dower@microsoft.com>
+Steve Kowalik <steven@wedontsleep.org>
+Steven Myint <git@stevenmyint.com>
+stonebig <stonebig34@gmail.com>
+Stéphane Bidoul (ACSONE) <stephane.bidoul@acsone.eu>
+Stéphane Bidoul <stephane.bidoul@acsone.eu>
+Stéphane Klein <contact@stephane-klein.info>
+Sumana Harihareswara <sh@changeset.nyc>
+Sviatoslav Sydorenko <wk.cvs.github@sydorenko.org.ua>
+Sviatoslav Sydorenko <wk@sydorenko.org.ua>
+Swat009 <swatantra.kumar8@gmail.com>
+Takayuki SHIMIZUKAWA <shimizukawa@gmail.com>
+tbeswick <tbeswick@enphaseenergy.com>
+Thijs Triemstra <info@collab.nl>
+Thomas Fenzl <thomas.fenzl@gmail.com>
+Thomas Grainger <tagrain@gmail.com>
+Thomas Guettler <tguettler@tbz-pariv.de>
+Thomas Johansson <devnull@localhost>
+Thomas Kluyver <thomas@kluyver.me.uk>
+Thomas Smith <smithtg@ncbi.nlm.nih.gov>
+Tim D. Smith <github@tim-smith.us>
+Tim Gates <tim.gates@iress.com>
+Tim Harder <radhermit@gmail.com>
+Tim Heap <tim@timheap.me>
+tim smith <github@tim-smith.us>
+tinruufu <tinruufu@gmail.com>
+Tom Forbes <tom@tomforb.es>
+Tom Freudenheim <tom.freudenheim@onepeloton.com>
+Tom V <tom@viner.tv>
+Tomas Orsava <torsava@redhat.com>
+Tomer Chachamu <tomer.chachamu@gmail.com>
+Tony Beswick <tonybeswick@orcon.net.nz>
+Tony Zhaocheng Tan <tony@tonytan.io>
+TonyBeswick <TonyBeswick@users.noreply.github.com>
+toonarmycaptain <toonarmycaptain@hotmail.com>
+Toshio Kuratomi <toshio@fedoraproject.org>
+Travis Swicegood <development@domain51.com>
+Tzu-ping Chung <uranusjr@gmail.com>
+Valentin Haenel <valentin.haenel@gmx.de>
+Victor Stinner <victor.stinner@gmail.com>
+victorvpaulo <victorvpaulo@gmail.com>
+Viktor Szépe <viktor@szepe.net>
+Ville Skyttä <ville.skytta@iki.fi>
+Vinay Sajip <vinay_sajip@yahoo.co.uk>
+Vincent Philippon <sindaewoh@gmail.com>
+Vinicyus Macedo <7549205+vinicyusmacedo@users.noreply.github.com>
+Vitaly Babiy <vbabiy86@gmail.com>
+Vladimir Rutsky <rutsky@users.noreply.github.com>
+W. Trevor King <wking@drexel.edu>
+Wil Tan <wil@dready.org>
+Wilfred Hughes <me@wilfred.me.uk>
+William ML Leslie <william.leslie.ttg@gmail.com>
+William T Olson <trevor@heytrevor.com>
+Wilson Mo <wilsonfv@126.com>
+wim glenn <wim.glenn@gmail.com>
+Wolfgang Maier <wolfgang.maier@biologie.uni-freiburg.de>
+Xavier Fernandez <xav.fernandez@gmail.com>
+Xavier Fernandez <xavier.fernandez@polyconseil.fr>
+xoviat <xoviat@users.noreply.github.com>
+xtreak <tir.karthi@gmail.com>
+YAMAMOTO Takashi <yamamoto@midokura.com>
+Yen Chi Hsuan <yan12125@gmail.com>
+Yeray Diaz Diaz <yeraydiazdiaz@gmail.com>
+Yoval P <yoval@gmx.com>
+Yu Jian <askingyj@gmail.com>
+Yuan Jing Vincent Yan <yyan82@bloomberg.net>
+Zearin <zearin@gonk.net>
+Zearin <Zearin@users.noreply.github.com>
+Zhiping Deng <kofreestyler@gmail.com>
+Zvezdan Petkovic <zpetkovic@acm.org>
+Łukasz Langa <lukasz@langa.pl>
+Семён Марьясин <simeon@maryasin.name>
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/INSTALLER b/monEnvTP/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/LICENSE.txt b/monEnvTP/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/LICENSE.txt
new file mode 100644
index 0000000000000000000000000000000000000000..737fec5c5352af3d9a6a47a0670da4bdb52c5725
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/LICENSE.txt
@@ -0,0 +1,20 @@
+Copyright (c) 2008-2019 The pip developers (see AUTHORS.txt file)
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/METADATA b/monEnvTP/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..cf6c9302c5b0495077d258b68c77e2fe11f90f8f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/METADATA
@@ -0,0 +1,13 @@
+Metadata-Version: 2.1
+Name: pkg_resources
+Version: 0.0.0
+Summary: UNKNOWN
+Home-page: UNKNOWN
+Author: UNKNOWN
+Author-email: UNKNOWN
+License: UNKNOWN
+Platform: UNKNOWN
+
+UNKNOWN
+
+
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/RECORD b/monEnvTP/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..2bbae1dbdbd1db605ceceba7988d706cca98a055
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/RECORD
@@ -0,0 +1,38 @@
+pkg_resources-0.0.0.dist-info/AUTHORS.txt,sha256=RtqU9KfonVGhI48DAA4-yTOBUhBtQTjFhaDzHoyh7uU,21518
+pkg_resources-0.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+pkg_resources-0.0.0.dist-info/LICENSE.txt,sha256=W6Ifuwlk-TatfRU2LR7W1JMcyMj5_y1NkRkOEJvnRDE,1090
+pkg_resources-0.0.0.dist-info/METADATA,sha256=V9_WPOtD1FnuKrTGv6Ique7kAOn2lasvT8W0_iMCCCk,177
+pkg_resources-0.0.0.dist-info/RECORD,,
+pkg_resources-0.0.0.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
+pkg_resources/__init__.py,sha256=0IssxXPnaDKpYZRra8Ime0JG4hwosQljItGD0bnIkGk,108349
+pkg_resources/__pycache__/__init__.cpython-38.pyc,,
+pkg_resources/__pycache__/py31compat.cpython-38.pyc,,
+pkg_resources/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pkg_resources/_vendor/__pycache__/__init__.cpython-38.pyc,,
+pkg_resources/_vendor/__pycache__/appdirs.cpython-38.pyc,,
+pkg_resources/_vendor/__pycache__/pyparsing.cpython-38.pyc,,
+pkg_resources/_vendor/__pycache__/six.cpython-38.pyc,,
+pkg_resources/_vendor/appdirs.py,sha256=MievUEuv3l_mQISH5SF0shDk_BNhHHzYiAPrT3ITN4I,24701
+pkg_resources/_vendor/packaging/__about__.py,sha256=zkcCPTN_6TcLW0Nrlg0176-R1QQ_WVPTm8sz1R4-HjM,720
+pkg_resources/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513
+pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-38.pyc,,
+pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-38.pyc,,
+pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-38.pyc,,
+pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-38.pyc,,
+pkg_resources/_vendor/packaging/__pycache__/markers.cpython-38.pyc,,
+pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-38.pyc,,
+pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-38.pyc,,
+pkg_resources/_vendor/packaging/__pycache__/utils.cpython-38.pyc,,
+pkg_resources/_vendor/packaging/__pycache__/version.cpython-38.pyc,,
+pkg_resources/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860
+pkg_resources/_vendor/packaging/_structures.py,sha256=RImECJ4c_wTlaTYYwZYLHEiebDMaAJmK1oPARhw1T5o,1416
+pkg_resources/_vendor/packaging/markers.py,sha256=uEcBBtGvzqltgnArqb9c4RrcInXezDLos14zbBHhWJo,8248
+pkg_resources/_vendor/packaging/requirements.py,sha256=SikL2UynbsT0qtY9ltqngndha_sfo0w6XGFhAhoSoaQ,4355
+pkg_resources/_vendor/packaging/specifiers.py,sha256=SAMRerzO3fK2IkFZCaZkuwZaL_EGqHNOz4pni4vhnN0,28025
+pkg_resources/_vendor/packaging/utils.py,sha256=3m6WvPm6NNxE8rkTGmn0r75B_GZSGg7ikafxHsBN1WA,421
+pkg_resources/_vendor/packaging/version.py,sha256=OwGnxYfr2ghNzYx59qWIBkrK3SnB6n-Zfd1XaLpnnM0,11556
+pkg_resources/_vendor/pyparsing.py,sha256=tmrp-lu-qO1i75ZzIN5A12nKRRD1Cm4Vpk-5LR9rims,232055
+pkg_resources/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098
+pkg_resources/extern/__init__.py,sha256=cHiEfHuLmm6rs5Ve_ztBfMI7Lr31vss-D4wkqF5xzlI,2498
+pkg_resources/extern/__pycache__/__init__.cpython-38.pyc,,
+pkg_resources/py31compat.py,sha256=-WQ0e4c3RG_acdhwC3gLiXhP_lg4G5q7XYkZkQg0gxU,558
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/WHEEL b/monEnvTP/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..ef99c6cf3283b50a273ac4c6d009a0aa85597070
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/__init__.py b/monEnvTP/lib/python3.8/site-packages/pkg_resources/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..2f5aa64a6e10832f407601d668e4ef0d9d5d0aeb
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources/__init__.py
@@ -0,0 +1,3296 @@
+# coding: utf-8
+"""
+Package resource API
+--------------------
+
+A resource is a logical file contained within a package, or a logical
+subdirectory thereof. The package resource API expects resource names
+to have their path parts separated with ``/``, *not* whatever the local
+path separator is. Do not use os.path operations to manipulate resource
+names being passed into the API.
+
+The package resource API is designed to work with normal filesystem packages,
+.egg files, and unpacked .egg files. It can also work in a limited way with
+.zip files and with custom PEP 302 loaders that support the ``get_data()``
+method.
+"""
+
+from __future__ import absolute_import
+
+import sys
+import os
+import io
+import time
+import re
+import types
+import zipfile
+import zipimport
+import warnings
+import stat
+import functools
+import pkgutil
+import operator
+import platform
+import collections
+import plistlib
+import email.parser
+import errno
+import tempfile
+import textwrap
+import itertools
+import inspect
+import ntpath
+import posixpath
+from pkgutil import get_importer
+
+try:
+ import _imp
+except ImportError:
+ # Python 3.2 compatibility
+ import imp as _imp
+
+try:
+ FileExistsError
+except NameError:
+ FileExistsError = OSError
+
+from pkg_resources.extern import six
+from pkg_resources.extern.six.moves import urllib, map, filter
+
+# capture these to bypass sandboxing
+from os import utime
+try:
+ from os import mkdir, rename, unlink
+ WRITE_SUPPORT = True
+except ImportError:
+ # no write support, probably under GAE
+ WRITE_SUPPORT = False
+
+from os import open as os_open
+from os.path import isdir, split
+
+try:
+ import importlib.machinery as importlib_machinery
+ # access attribute to force import under delayed import mechanisms.
+ importlib_machinery.__name__
+except ImportError:
+ importlib_machinery = None
+
+from . import py31compat
+from pkg_resources.extern import appdirs
+from pkg_resources.extern import packaging
+__import__('pkg_resources.extern.packaging.version')
+__import__('pkg_resources.extern.packaging.specifiers')
+__import__('pkg_resources.extern.packaging.requirements')
+__import__('pkg_resources.extern.packaging.markers')
+
+
+__metaclass__ = type
+
+
+if (3, 0) < sys.version_info < (3, 5):
+ raise RuntimeError("Python 3.5 or later is required")
+
+if six.PY2:
+ # Those builtin exceptions are only defined in Python 3
+ PermissionError = None
+ NotADirectoryError = None
+
+# declare some globals that will be defined later to
+# satisfy the linters.
+require = None
+working_set = None
+add_activation_listener = None
+resources_stream = None
+cleanup_resources = None
+resource_dir = None
+resource_stream = None
+set_extraction_path = None
+resource_isdir = None
+resource_string = None
+iter_entry_points = None
+resource_listdir = None
+resource_filename = None
+resource_exists = None
+_distribution_finders = None
+_namespace_handlers = None
+_namespace_packages = None
+
+
+class PEP440Warning(RuntimeWarning):
+ """
+ Used when there is an issue with a version or specifier not complying with
+ PEP 440.
+ """
+
+
+def parse_version(v):
+ try:
+ return packaging.version.Version(v)
+ except packaging.version.InvalidVersion:
+ return packaging.version.LegacyVersion(v)
+
+
+_state_vars = {}
+
+
+def _declare_state(vartype, **kw):
+ globals().update(kw)
+ _state_vars.update(dict.fromkeys(kw, vartype))
+
+
+def __getstate__():
+ state = {}
+ g = globals()
+ for k, v in _state_vars.items():
+ state[k] = g['_sget_' + v](g[k])
+ return state
+
+
+def __setstate__(state):
+ g = globals()
+ for k, v in state.items():
+ g['_sset_' + _state_vars[k]](k, g[k], v)
+ return state
+
+
+def _sget_dict(val):
+ return val.copy()
+
+
+def _sset_dict(key, ob, state):
+ ob.clear()
+ ob.update(state)
+
+
+def _sget_object(val):
+ return val.__getstate__()
+
+
+def _sset_object(key, ob, state):
+ ob.__setstate__(state)
+
+
+_sget_none = _sset_none = lambda *args: None
+
+
+def get_supported_platform():
+ """Return this platform's maximum compatible version.
+
+ distutils.util.get_platform() normally reports the minimum version
+ of Mac OS X that would be required to *use* extensions produced by
+ distutils. But what we want when checking compatibility is to know the
+ version of Mac OS X that we are *running*. To allow usage of packages that
+ explicitly require a newer version of Mac OS X, we must also know the
+ current version of the OS.
+
+ If this condition occurs for any other platform with a version in its
+ platform strings, this function should be extended accordingly.
+ """
+ plat = get_build_platform()
+ m = macosVersionString.match(plat)
+ if m is not None and sys.platform == "darwin":
+ try:
+ plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
+ except ValueError:
+ # not Mac OS X
+ pass
+ return plat
+
+
+__all__ = [
+ # Basic resource access and distribution/entry point discovery
+ 'require', 'run_script', 'get_provider', 'get_distribution',
+ 'load_entry_point', 'get_entry_map', 'get_entry_info',
+ 'iter_entry_points',
+ 'resource_string', 'resource_stream', 'resource_filename',
+ 'resource_listdir', 'resource_exists', 'resource_isdir',
+
+ # Environmental control
+ 'declare_namespace', 'working_set', 'add_activation_listener',
+ 'find_distributions', 'set_extraction_path', 'cleanup_resources',
+ 'get_default_cache',
+
+ # Primary implementation classes
+ 'Environment', 'WorkingSet', 'ResourceManager',
+ 'Distribution', 'Requirement', 'EntryPoint',
+
+ # Exceptions
+ 'ResolutionError', 'VersionConflict', 'DistributionNotFound',
+ 'UnknownExtra', 'ExtractionError',
+
+ # Warnings
+ 'PEP440Warning',
+
+ # Parsing functions and string utilities
+ 'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
+ 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
+ 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
+
+ # filesystem utilities
+ 'ensure_directory', 'normalize_path',
+
+ # Distribution "precedence" constants
+ 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
+
+ # "Provider" interfaces, implementations, and registration/lookup APIs
+ 'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
+ 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
+ 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
+ 'register_finder', 'register_namespace_handler', 'register_loader_type',
+ 'fixup_namespace_packages', 'get_importer',
+
+ # Warnings
+ 'PkgResourcesDeprecationWarning',
+
+ # Deprecated/backward compatibility only
+ 'run_main', 'AvailableDistributions',
+]
+
+
+class ResolutionError(Exception):
+ """Abstract base for dependency resolution errors"""
+
+ def __repr__(self):
+ return self.__class__.__name__ + repr(self.args)
+
+
+class VersionConflict(ResolutionError):
+ """
+ An already-installed version conflicts with the requested version.
+
+ Should be initialized with the installed Distribution and the requested
+ Requirement.
+ """
+
+ _template = "{self.dist} is installed but {self.req} is required"
+
+ @property
+ def dist(self):
+ return self.args[0]
+
+ @property
+ def req(self):
+ return self.args[1]
+
+ def report(self):
+ return self._template.format(**locals())
+
+ def with_context(self, required_by):
+ """
+ If required_by is non-empty, return a version of self that is a
+ ContextualVersionConflict.
+ """
+ if not required_by:
+ return self
+ args = self.args + (required_by,)
+ return ContextualVersionConflict(*args)
+
+
+class ContextualVersionConflict(VersionConflict):
+ """
+ A VersionConflict that accepts a third parameter, the set of the
+ requirements that required the installed Distribution.
+ """
+
+ _template = VersionConflict._template + ' by {self.required_by}'
+
+ @property
+ def required_by(self):
+ return self.args[2]
+
+
+class DistributionNotFound(ResolutionError):
+ """A requested distribution was not found"""
+
+ _template = ("The '{self.req}' distribution was not found "
+ "and is required by {self.requirers_str}")
+
+ @property
+ def req(self):
+ return self.args[0]
+
+ @property
+ def requirers(self):
+ return self.args[1]
+
+ @property
+ def requirers_str(self):
+ if not self.requirers:
+ return 'the application'
+ return ', '.join(self.requirers)
+
+ def report(self):
+ return self._template.format(**locals())
+
+ def __str__(self):
+ return self.report()
+
+
+class UnknownExtra(ResolutionError):
+ """Distribution doesn't have an "extra feature" of the given name"""
+
+
+_provider_factories = {}
+
+PY_MAJOR = '{}.{}'.format(*sys.version_info)
+EGG_DIST = 3
+BINARY_DIST = 2
+SOURCE_DIST = 1
+CHECKOUT_DIST = 0
+DEVELOP_DIST = -1
+
+
+def register_loader_type(loader_type, provider_factory):
+ """Register `provider_factory` to make providers for `loader_type`
+
+ `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
+ and `provider_factory` is a function that, passed a *module* object,
+ returns an ``IResourceProvider`` for that module.
+ """
+ _provider_factories[loader_type] = provider_factory
+
+
+def get_provider(moduleOrReq):
+ """Return an IResourceProvider for the named module or requirement"""
+ if isinstance(moduleOrReq, Requirement):
+ return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
+ try:
+ module = sys.modules[moduleOrReq]
+ except KeyError:
+ __import__(moduleOrReq)
+ module = sys.modules[moduleOrReq]
+ loader = getattr(module, '__loader__', None)
+ return _find_adapter(_provider_factories, loader)(module)
+
+
+def _macosx_vers(_cache=[]):
+ if not _cache:
+ version = platform.mac_ver()[0]
+ # fallback for MacPorts
+ if version == '':
+ plist = '/System/Library/CoreServices/SystemVersion.plist'
+ if os.path.exists(plist):
+ if hasattr(plistlib, 'readPlist'):
+ plist_content = plistlib.readPlist(plist)
+ if 'ProductVersion' in plist_content:
+ version = plist_content['ProductVersion']
+
+ _cache.append(version.split('.'))
+ return _cache[0]
+
+
+def _macosx_arch(machine):
+ return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
+
+
+def get_build_platform():
+ """Return this platform's string for platform-specific distributions
+
+ XXX Currently this is the same as ``distutils.util.get_platform()``, but it
+ needs some hacks for Linux and Mac OS X.
+ """
+ from sysconfig import get_platform
+
+ plat = get_platform()
+ if sys.platform == "darwin" and not plat.startswith('macosx-'):
+ try:
+ version = _macosx_vers()
+ machine = os.uname()[4].replace(" ", "_")
+ return "macosx-%d.%d-%s" % (
+ int(version[0]), int(version[1]),
+ _macosx_arch(machine),
+ )
+ except ValueError:
+ # if someone is running a non-Mac darwin system, this will fall
+ # through to the default implementation
+ pass
+ return plat
+
+
+macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
+darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
+# XXX backward compat
+get_platform = get_build_platform
+
+
+def compatible_platforms(provided, required):
+ """Can code for the `provided` platform run on the `required` platform?
+
+ Returns true if either platform is ``None``, or the platforms are equal.
+
+ XXX Needs compatibility checks for Linux and other unixy OSes.
+ """
+ if provided is None or required is None or provided == required:
+ # easy case
+ return True
+
+ # Mac OS X special cases
+ reqMac = macosVersionString.match(required)
+ if reqMac:
+ provMac = macosVersionString.match(provided)
+
+ # is this a Mac package?
+ if not provMac:
+ # this is backwards compatibility for packages built before
+ # setuptools 0.6. All packages built after this point will
+ # use the new macosx designation.
+ provDarwin = darwinVersionString.match(provided)
+ if provDarwin:
+ dversion = int(provDarwin.group(1))
+ macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
+ if dversion == 7 and macosversion >= "10.3" or \
+ dversion == 8 and macosversion >= "10.4":
+ return True
+ # egg isn't macosx or legacy darwin
+ return False
+
+ # are they the same major version and machine type?
+ if provMac.group(1) != reqMac.group(1) or \
+ provMac.group(3) != reqMac.group(3):
+ return False
+
+ # is the required OS major update >= the provided one?
+ if int(provMac.group(2)) > int(reqMac.group(2)):
+ return False
+
+ return True
+
+ # XXX Linux and other platforms' special cases should go here
+ return False
+
+
+def run_script(dist_spec, script_name):
+ """Locate distribution `dist_spec` and run its `script_name` script"""
+ ns = sys._getframe(1).f_globals
+ name = ns['__name__']
+ ns.clear()
+ ns['__name__'] = name
+ require(dist_spec)[0].run_script(script_name, ns)
+
+
+# backward compatibility
+run_main = run_script
+
+
+def get_distribution(dist):
+ """Return a current distribution object for a Requirement or string"""
+ if isinstance(dist, six.string_types):
+ dist = Requirement.parse(dist)
+ if isinstance(dist, Requirement):
+ dist = get_provider(dist)
+ if not isinstance(dist, Distribution):
+ raise TypeError("Expected string, Requirement, or Distribution", dist)
+ return dist
+
+
+def load_entry_point(dist, group, name):
+ """Return `name` entry point of `group` for `dist` or raise ImportError"""
+ return get_distribution(dist).load_entry_point(group, name)
+
+
+def get_entry_map(dist, group=None):
+ """Return the entry point map for `group`, or the full entry map"""
+ return get_distribution(dist).get_entry_map(group)
+
+
+def get_entry_info(dist, group, name):
+ """Return the EntryPoint object for `group`+`name`, or ``None``"""
+ return get_distribution(dist).get_entry_info(group, name)
+
+
+class IMetadataProvider:
+ def has_metadata(name):
+ """Does the package's distribution contain the named metadata?"""
+
+ def get_metadata(name):
+ """The named metadata resource as a string"""
+
+ def get_metadata_lines(name):
+ """Yield named metadata resource as list of non-blank non-comment lines
+
+ Leading and trailing whitespace is stripped from each line, and lines
+ with ``#`` as the first non-blank character are omitted."""
+
+ def metadata_isdir(name):
+ """Is the named metadata a directory? (like ``os.path.isdir()``)"""
+
+ def metadata_listdir(name):
+ """List of metadata names in the directory (like ``os.listdir()``)"""
+
+ def run_script(script_name, namespace):
+ """Execute the named script in the supplied namespace dictionary"""
+
+
+class IResourceProvider(IMetadataProvider):
+ """An object that provides access to package resources"""
+
+ def get_resource_filename(manager, resource_name):
+ """Return a true filesystem path for `resource_name`
+
+ `manager` must be an ``IResourceManager``"""
+
+ def get_resource_stream(manager, resource_name):
+ """Return a readable file-like object for `resource_name`
+
+ `manager` must be an ``IResourceManager``"""
+
+ def get_resource_string(manager, resource_name):
+ """Return a string containing the contents of `resource_name`
+
+ `manager` must be an ``IResourceManager``"""
+
+ def has_resource(resource_name):
+ """Does the package contain the named resource?"""
+
+ def resource_isdir(resource_name):
+ """Is the named resource a directory? (like ``os.path.isdir()``)"""
+
+ def resource_listdir(resource_name):
+ """List of resource names in the directory (like ``os.listdir()``)"""
+
+
+class WorkingSet:
+ """A collection of active distributions on sys.path (or a similar list)"""
+
+ def __init__(self, entries=None):
+ """Create working set from list of path entries (default=sys.path)"""
+ self.entries = []
+ self.entry_keys = {}
+ self.by_key = {}
+ self.callbacks = []
+
+ if entries is None:
+ entries = sys.path
+
+ for entry in entries:
+ self.add_entry(entry)
+
+ @classmethod
+ def _build_master(cls):
+ """
+ Prepare the master working set.
+ """
+ ws = cls()
+ try:
+ from __main__ import __requires__
+ except ImportError:
+ # The main program does not list any requirements
+ return ws
+
+ # ensure the requirements are met
+ try:
+ ws.require(__requires__)
+ except VersionConflict:
+ return cls._build_from_requirements(__requires__)
+
+ return ws
+
+ @classmethod
+ def _build_from_requirements(cls, req_spec):
+ """
+ Build a working set from a requirement spec. Rewrites sys.path.
+ """
+ # try it without defaults already on sys.path
+ # by starting with an empty path
+ ws = cls([])
+ reqs = parse_requirements(req_spec)
+ dists = ws.resolve(reqs, Environment())
+ for dist in dists:
+ ws.add(dist)
+
+ # add any missing entries from sys.path
+ for entry in sys.path:
+ if entry not in ws.entries:
+ ws.add_entry(entry)
+
+ # then copy back to sys.path
+ sys.path[:] = ws.entries
+ return ws
+
+ def add_entry(self, entry):
+ """Add a path item to ``.entries``, finding any distributions on it
+
+ ``find_distributions(entry, True)`` is used to find distributions
+ corresponding to the path entry, and they are added. `entry` is
+ always appended to ``.entries``, even if it is already present.
+ (This is because ``sys.path`` can contain the same value more than
+ once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
+ equal ``sys.path``.)
+ """
+ self.entry_keys.setdefault(entry, [])
+ self.entries.append(entry)
+ for dist in find_distributions(entry, True):
+ self.add(dist, entry, False)
+
+ def __contains__(self, dist):
+ """True if `dist` is the active distribution for its project"""
+ return self.by_key.get(dist.key) == dist
+
+ def find(self, req):
+ """Find a distribution matching requirement `req`
+
+ If there is an active distribution for the requested project, this
+ returns it as long as it meets the version requirement specified by
+ `req`. But, if there is an active distribution for the project and it
+ does *not* meet the `req` requirement, ``VersionConflict`` is raised.
+ If there is no active distribution for the requested project, ``None``
+ is returned.
+ """
+ dist = self.by_key.get(req.key)
+ if dist is not None and dist not in req:
+ # XXX add more info
+ raise VersionConflict(dist, req)
+ return dist
+
+ def iter_entry_points(self, group, name=None):
+ """Yield entry point objects from `group` matching `name`
+
+ If `name` is None, yields all entry points in `group` from all
+ distributions in the working set, otherwise only ones matching
+ both `group` and `name` are yielded (in distribution order).
+ """
+ return (
+ entry
+ for dist in self
+ for entry in dist.get_entry_map(group).values()
+ if name is None or name == entry.name
+ )
+
+ def run_script(self, requires, script_name):
+ """Locate distribution for `requires` and run `script_name` script"""
+ ns = sys._getframe(1).f_globals
+ name = ns['__name__']
+ ns.clear()
+ ns['__name__'] = name
+ self.require(requires)[0].run_script(script_name, ns)
+
+ def __iter__(self):
+ """Yield distributions for non-duplicate projects in the working set
+
+ The yield order is the order in which the items' path entries were
+ added to the working set.
+ """
+ seen = {}
+ for item in self.entries:
+ if item not in self.entry_keys:
+ # workaround a cache issue
+ continue
+
+ for key in self.entry_keys[item]:
+ if key not in seen:
+ seen[key] = 1
+ yield self.by_key[key]
+
+ def add(self, dist, entry=None, insert=True, replace=False):
+ """Add `dist` to working set, associated with `entry`
+
+ If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
+ On exit from this routine, `entry` is added to the end of the working
+ set's ``.entries`` (if it wasn't already present).
+
+ `dist` is only added to the working set if it's for a project that
+ doesn't already have a distribution in the set, unless `replace=True`.
+ If it's added, any callbacks registered with the ``subscribe()`` method
+ will be called.
+ """
+ if insert:
+ dist.insert_on(self.entries, entry, replace=replace)
+
+ if entry is None:
+ entry = dist.location
+ keys = self.entry_keys.setdefault(entry, [])
+ keys2 = self.entry_keys.setdefault(dist.location, [])
+ if not replace and dist.key in self.by_key:
+ # ignore hidden distros
+ return
+
+ self.by_key[dist.key] = dist
+ if dist.key not in keys:
+ keys.append(dist.key)
+ if dist.key not in keys2:
+ keys2.append(dist.key)
+ self._added_new(dist)
+
+ def resolve(self, requirements, env=None, installer=None,
+ replace_conflicting=False, extras=None):
+ """List all distributions needed to (recursively) meet `requirements`
+
+ `requirements` must be a sequence of ``Requirement`` objects. `env`,
+ if supplied, should be an ``Environment`` instance. If
+ not supplied, it defaults to all distributions available within any
+ entry or distribution in the working set. `installer`, if supplied,
+ will be invoked with each requirement that cannot be met by an
+ already-installed distribution; it should return a ``Distribution`` or
+ ``None``.
+
+ Unless `replace_conflicting=True`, raises a VersionConflict exception
+ if
+ any requirements are found on the path that have the correct name but
+ the wrong version. Otherwise, if an `installer` is supplied it will be
+ invoked to obtain the correct version of the requirement and activate
+ it.
+
+ `extras` is a list of the extras to be used with these requirements.
+ This is important because extra requirements may look like `my_req;
+ extra = "my_extra"`, which would otherwise be interpreted as a purely
+ optional requirement. Instead, we want to be able to assert that these
+ requirements are truly required.
+ """
+
+ # set up the stack
+ requirements = list(requirements)[::-1]
+ # set of processed requirements
+ processed = {}
+ # key -> dist
+ best = {}
+ to_activate = []
+
+ req_extras = _ReqExtras()
+
+ # Mapping of requirement to set of distributions that required it;
+ # useful for reporting info about conflicts.
+ required_by = collections.defaultdict(set)
+
+ while requirements:
+ # process dependencies breadth-first
+ req = requirements.pop(0)
+ if req in processed:
+ # Ignore cyclic or redundant dependencies
+ continue
+
+ if not req_extras.markers_pass(req, extras):
+ continue
+
+ dist = best.get(req.key)
+ if dist is None:
+ # Find the best distribution and add it to the map
+ dist = self.by_key.get(req.key)
+ if dist is None or (dist not in req and replace_conflicting):
+ ws = self
+ if env is None:
+ if dist is None:
+ env = Environment(self.entries)
+ else:
+ # Use an empty environment and workingset to avoid
+ # any further conflicts with the conflicting
+ # distribution
+ env = Environment([])
+ ws = WorkingSet([])
+ dist = best[req.key] = env.best_match(
+ req, ws, installer,
+ replace_conflicting=replace_conflicting
+ )
+ if dist is None:
+ requirers = required_by.get(req, None)
+ raise DistributionNotFound(req, requirers)
+ to_activate.append(dist)
+ if dist not in req:
+ # Oops, the "best" so far conflicts with a dependency
+ dependent_req = required_by[req]
+ raise VersionConflict(dist, req).with_context(dependent_req)
+
+ # push the new requirements onto the stack
+ new_requirements = dist.requires(req.extras)[::-1]
+ requirements.extend(new_requirements)
+
+ # Register the new requirements needed by req
+ for new_requirement in new_requirements:
+ required_by[new_requirement].add(req.project_name)
+ req_extras[new_requirement] = req.extras
+
+ processed[req] = True
+
+ # return list of distros to activate
+ return to_activate
+
+ def find_plugins(
+ self, plugin_env, full_env=None, installer=None, fallback=True):
+ """Find all activatable distributions in `plugin_env`
+
+ Example usage::
+
+ distributions, errors = working_set.find_plugins(
+ Environment(plugin_dirlist)
+ )
+ # add plugins+libs to sys.path
+ map(working_set.add, distributions)
+ # display errors
+ print('Could not load', errors)
+
+ The `plugin_env` should be an ``Environment`` instance that contains
+ only distributions that are in the project's "plugin directory" or
+ directories. The `full_env`, if supplied, should be an ``Environment``
+ contains all currently-available distributions. If `full_env` is not
+ supplied, one is created automatically from the ``WorkingSet`` this
+ method is called on, which will typically mean that every directory on
+ ``sys.path`` will be scanned for distributions.
+
+ `installer` is a standard installer callback as used by the
+ ``resolve()`` method. The `fallback` flag indicates whether we should
+ attempt to resolve older versions of a plugin if the newest version
+ cannot be resolved.
+
+ This method returns a 2-tuple: (`distributions`, `error_info`), where
+ `distributions` is a list of the distributions found in `plugin_env`
+ that were loadable, along with any other distributions that are needed
+ to resolve their dependencies. `error_info` is a dictionary mapping
+ unloadable plugin distributions to an exception instance describing the
+ error that occurred. Usually this will be a ``DistributionNotFound`` or
+ ``VersionConflict`` instance.
+ """
+
+ plugin_projects = list(plugin_env)
+ # scan project names in alphabetic order
+ plugin_projects.sort()
+
+ error_info = {}
+ distributions = {}
+
+ if full_env is None:
+ env = Environment(self.entries)
+ env += plugin_env
+ else:
+ env = full_env + plugin_env
+
+ shadow_set = self.__class__([])
+ # put all our entries in shadow_set
+ list(map(shadow_set.add, self))
+
+ for project_name in plugin_projects:
+
+ for dist in plugin_env[project_name]:
+
+ req = [dist.as_requirement()]
+
+ try:
+ resolvees = shadow_set.resolve(req, env, installer)
+
+ except ResolutionError as v:
+ # save error info
+ error_info[dist] = v
+ if fallback:
+ # try the next older version of project
+ continue
+ else:
+ # give up on this project, keep going
+ break
+
+ else:
+ list(map(shadow_set.add, resolvees))
+ distributions.update(dict.fromkeys(resolvees))
+
+ # success, no need to try any more versions of this project
+ break
+
+ distributions = list(distributions)
+ distributions.sort()
+
+ return distributions, error_info
+
+ def require(self, *requirements):
+ """Ensure that distributions matching `requirements` are activated
+
+ `requirements` must be a string or a (possibly-nested) sequence
+ thereof, specifying the distributions and versions required. The
+ return value is a sequence of the distributions that needed to be
+ activated to fulfill the requirements; all relevant distributions are
+ included, even if they were already activated in this working set.
+ """
+ needed = self.resolve(parse_requirements(requirements))
+
+ for dist in needed:
+ self.add(dist)
+
+ return needed
+
+ def subscribe(self, callback, existing=True):
+ """Invoke `callback` for all distributions
+
+ If `existing=True` (default),
+ call on all existing ones, as well.
+ """
+ if callback in self.callbacks:
+ return
+ self.callbacks.append(callback)
+ if not existing:
+ return
+ for dist in self:
+ callback(dist)
+
+ def _added_new(self, dist):
+ for callback in self.callbacks:
+ callback(dist)
+
+ def __getstate__(self):
+ return (
+ self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
+ self.callbacks[:]
+ )
+
+ def __setstate__(self, e_k_b_c):
+ entries, keys, by_key, callbacks = e_k_b_c
+ self.entries = entries[:]
+ self.entry_keys = keys.copy()
+ self.by_key = by_key.copy()
+ self.callbacks = callbacks[:]
+
+
+class _ReqExtras(dict):
+ """
+ Map each requirement to the extras that demanded it.
+ """
+
+ def markers_pass(self, req, extras=None):
+ """
+ Evaluate markers for req against each extra that
+ demanded it.
+
+ Return False if the req has a marker and fails
+ evaluation. Otherwise, return True.
+ """
+ extra_evals = (
+ req.marker.evaluate({'extra': extra})
+ for extra in self.get(req, ()) + (extras or (None,))
+ )
+ return not req.marker or any(extra_evals)
+
+
+class Environment:
+ """Searchable snapshot of distributions on a search path"""
+
+ def __init__(
+ self, search_path=None, platform=get_supported_platform(),
+ python=PY_MAJOR):
+ """Snapshot distributions available on a search path
+
+ Any distributions found on `search_path` are added to the environment.
+ `search_path` should be a sequence of ``sys.path`` items. If not
+ supplied, ``sys.path`` is used.
+
+ `platform` is an optional string specifying the name of the platform
+ that platform-specific distributions must be compatible with. If
+ unspecified, it defaults to the current platform. `python` is an
+ optional string naming the desired version of Python (e.g. ``'3.6'``);
+ it defaults to the current version.
+
+ You may explicitly set `platform` (and/or `python`) to ``None`` if you
+ wish to map *all* distributions, not just those compatible with the
+ running platform or Python version.
+ """
+ self._distmap = {}
+ self.platform = platform
+ self.python = python
+ self.scan(search_path)
+
+ def can_add(self, dist):
+ """Is distribution `dist` acceptable for this environment?
+
+ The distribution must match the platform and python version
+ requirements specified when this environment was created, or False
+ is returned.
+ """
+ py_compat = (
+ self.python is None
+ or dist.py_version is None
+ or dist.py_version == self.python
+ )
+ return py_compat and compatible_platforms(dist.platform, self.platform)
+
+ def remove(self, dist):
+ """Remove `dist` from the environment"""
+ self._distmap[dist.key].remove(dist)
+
+ def scan(self, search_path=None):
+ """Scan `search_path` for distributions usable in this environment
+
+ Any distributions found are added to the environment.
+ `search_path` should be a sequence of ``sys.path`` items. If not
+ supplied, ``sys.path`` is used. Only distributions conforming to
+ the platform/python version defined at initialization are added.
+ """
+ if search_path is None:
+ search_path = sys.path
+
+ for item in search_path:
+ for dist in find_distributions(item):
+ self.add(dist)
+
+ def __getitem__(self, project_name):
+ """Return a newest-to-oldest list of distributions for `project_name`
+
+ Uses case-insensitive `project_name` comparison, assuming all the
+ project's distributions use their project's name converted to all
+ lowercase as their key.
+
+ """
+ distribution_key = project_name.lower()
+ return self._distmap.get(distribution_key, [])
+
+ def add(self, dist):
+ """Add `dist` if we ``can_add()`` it and it has not already been added
+ """
+ if self.can_add(dist) and dist.has_version():
+ dists = self._distmap.setdefault(dist.key, [])
+ if dist not in dists:
+ dists.append(dist)
+ dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
+
+ def best_match(
+ self, req, working_set, installer=None, replace_conflicting=False):
+ """Find distribution best matching `req` and usable on `working_set`
+
+ This calls the ``find(req)`` method of the `working_set` to see if a
+ suitable distribution is already active. (This may raise
+ ``VersionConflict`` if an unsuitable version of the project is already
+ active in the specified `working_set`.) If a suitable distribution
+ isn't active, this method returns the newest distribution in the
+ environment that meets the ``Requirement`` in `req`. If no suitable
+ distribution is found, and `installer` is supplied, then the result of
+ calling the environment's ``obtain(req, installer)`` method will be
+ returned.
+ """
+ try:
+ dist = working_set.find(req)
+ except VersionConflict:
+ if not replace_conflicting:
+ raise
+ dist = None
+ if dist is not None:
+ return dist
+ for dist in self[req.key]:
+ if dist in req:
+ return dist
+ # try to download/install
+ return self.obtain(req, installer)
+
+ def obtain(self, requirement, installer=None):
+ """Obtain a distribution matching `requirement` (e.g. via download)
+
+ Obtain a distro that matches requirement (e.g. via download). In the
+ base ``Environment`` class, this routine just returns
+ ``installer(requirement)``, unless `installer` is None, in which case
+ None is returned instead. This method is a hook that allows subclasses
+ to attempt other ways of obtaining a distribution before falling back
+ to the `installer` argument."""
+ if installer is not None:
+ return installer(requirement)
+
+ def __iter__(self):
+ """Yield the unique project names of the available distributions"""
+ for key in self._distmap.keys():
+ if self[key]:
+ yield key
+
+ def __iadd__(self, other):
+ """In-place addition of a distribution or environment"""
+ if isinstance(other, Distribution):
+ self.add(other)
+ elif isinstance(other, Environment):
+ for project in other:
+ for dist in other[project]:
+ self.add(dist)
+ else:
+ raise TypeError("Can't add %r to environment" % (other,))
+ return self
+
+ def __add__(self, other):
+ """Add an environment or distribution to an environment"""
+ new = self.__class__([], platform=None, python=None)
+ for env in self, other:
+ new += env
+ return new
+
+
+# XXX backward compatibility
+AvailableDistributions = Environment
+
+
+class ExtractionError(RuntimeError):
+ """An error occurred extracting a resource
+
+ The following attributes are available from instances of this exception:
+
+ manager
+ The resource manager that raised this exception
+
+ cache_path
+ The base directory for resource extraction
+
+ original_error
+ The exception instance that caused extraction to fail
+ """
+
+
+class ResourceManager:
+ """Manage resource extraction and packages"""
+ extraction_path = None
+
+ def __init__(self):
+ self.cached_files = {}
+
+ def resource_exists(self, package_or_requirement, resource_name):
+ """Does the named resource exist?"""
+ return get_provider(package_or_requirement).has_resource(resource_name)
+
+ def resource_isdir(self, package_or_requirement, resource_name):
+ """Is the named resource an existing directory?"""
+ return get_provider(package_or_requirement).resource_isdir(
+ resource_name
+ )
+
+ def resource_filename(self, package_or_requirement, resource_name):
+ """Return a true filesystem path for specified resource"""
+ return get_provider(package_or_requirement).get_resource_filename(
+ self, resource_name
+ )
+
+ def resource_stream(self, package_or_requirement, resource_name):
+ """Return a readable file-like object for specified resource"""
+ return get_provider(package_or_requirement).get_resource_stream(
+ self, resource_name
+ )
+
+ def resource_string(self, package_or_requirement, resource_name):
+ """Return specified resource as a string"""
+ return get_provider(package_or_requirement).get_resource_string(
+ self, resource_name
+ )
+
+ def resource_listdir(self, package_or_requirement, resource_name):
+ """List the contents of the named resource directory"""
+ return get_provider(package_or_requirement).resource_listdir(
+ resource_name
+ )
+
+ def extraction_error(self):
+ """Give an error message for problems extracting file(s)"""
+
+ old_exc = sys.exc_info()[1]
+ cache_path = self.extraction_path or get_default_cache()
+
+ tmpl = textwrap.dedent("""
+ Can't extract file(s) to egg cache
+
+ The following error occurred while trying to extract file(s)
+ to the Python egg cache:
+
+ {old_exc}
+
+ The Python egg cache directory is currently set to:
+
+ {cache_path}
+
+ Perhaps your account does not have write access to this directory?
+ You can change the cache directory by setting the PYTHON_EGG_CACHE
+ environment variable to point to an accessible directory.
+ """).lstrip()
+ err = ExtractionError(tmpl.format(**locals()))
+ err.manager = self
+ err.cache_path = cache_path
+ err.original_error = old_exc
+ raise err
+
+ def get_cache_path(self, archive_name, names=()):
+ """Return absolute location in cache for `archive_name` and `names`
+
+ The parent directory of the resulting path will be created if it does
+ not already exist. `archive_name` should be the base filename of the
+ enclosing egg (which may not be the name of the enclosing zipfile!),
+ including its ".egg" extension. `names`, if provided, should be a
+ sequence of path name parts "under" the egg's extraction location.
+
+ This method should only be called by resource providers that need to
+ obtain an extraction location, and only for names they intend to
+ extract, as it tracks the generated names for possible cleanup later.
+ """
+ extract_path = self.extraction_path or get_default_cache()
+ target_path = os.path.join(extract_path, archive_name + '-tmp', *names)
+ try:
+ _bypass_ensure_directory(target_path)
+ except Exception:
+ self.extraction_error()
+
+ self._warn_unsafe_extraction_path(extract_path)
+
+ self.cached_files[target_path] = 1
+ return target_path
+
+ @staticmethod
+ def _warn_unsafe_extraction_path(path):
+ """
+ If the default extraction path is overridden and set to an insecure
+ location, such as /tmp, it opens up an opportunity for an attacker to
+ replace an extracted file with an unauthorized payload. Warn the user
+ if a known insecure location is used.
+
+ See Distribute #375 for more details.
+ """
+ if os.name == 'nt' and not path.startswith(os.environ['windir']):
+ # On Windows, permissions are generally restrictive by default
+ # and temp directories are not writable by other users, so
+ # bypass the warning.
+ return
+ mode = os.stat(path).st_mode
+ if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
+ msg = (
+ "%s is writable by group/others and vulnerable to attack "
+ "when "
+ "used with get_resource_filename. Consider a more secure "
+ "location (set with .set_extraction_path or the "
+ "PYTHON_EGG_CACHE environment variable)." % path
+ )
+ warnings.warn(msg, UserWarning)
+
+ def postprocess(self, tempname, filename):
+ """Perform any platform-specific postprocessing of `tempname`
+
+ This is where Mac header rewrites should be done; other platforms don't
+ have anything special they should do.
+
+ Resource providers should call this method ONLY after successfully
+ extracting a compressed resource. They must NOT call it on resources
+ that are already in the filesystem.
+
+ `tempname` is the current (temporary) name of the file, and `filename`
+ is the name it will be renamed to by the caller after this routine
+ returns.
+ """
+
+ if os.name == 'posix':
+ # Make the resource executable
+ mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
+ os.chmod(tempname, mode)
+
+ def set_extraction_path(self, path):
+ """Set the base path where resources will be extracted to, if needed.
+
+ If you do not call this routine before any extractions take place, the
+ path defaults to the return value of ``get_default_cache()``. (Which
+ is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
+ platform-specific fallbacks. See that routine's documentation for more
+ details.)
+
+ Resources are extracted to subdirectories of this path based upon
+ information given by the ``IResourceProvider``. You may set this to a
+ temporary directory, but then you must call ``cleanup_resources()`` to
+ delete the extracted files when done. There is no guarantee that
+ ``cleanup_resources()`` will be able to remove all extracted files.
+
+ (Note: you may not change the extraction path for a given resource
+ manager once resources have been extracted, unless you first call
+ ``cleanup_resources()``.)
+ """
+ if self.cached_files:
+ raise ValueError(
+ "Can't change extraction path, files already extracted"
+ )
+
+ self.extraction_path = path
+
+ def cleanup_resources(self, force=False):
+ """
+ Delete all extracted resource files and directories, returning a list
+ of the file and directory names that could not be successfully removed.
+ This function does not have any concurrency protection, so it should
+ generally only be called when the extraction path is a temporary
+ directory exclusive to a single process. This method is not
+ automatically called; you must call it explicitly or register it as an
+ ``atexit`` function if you wish to ensure cleanup of a temporary
+ directory used for extractions.
+ """
+ # XXX
+
+
+def get_default_cache():
+ """
+ Return the ``PYTHON_EGG_CACHE`` environment variable
+ or a platform-relevant user cache dir for an app
+ named "Python-Eggs".
+ """
+ return (
+ os.environ.get('PYTHON_EGG_CACHE')
+ or appdirs.user_cache_dir(appname='Python-Eggs')
+ )
+
+
+def safe_name(name):
+ """Convert an arbitrary string to a standard distribution name
+
+ Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+ """
+ return re.sub('[^A-Za-z0-9.]+', '-', name)
+
+
+def safe_version(version):
+ """
+ Convert an arbitrary string to a standard version string
+ """
+ try:
+ # normalize the version
+ return str(packaging.version.Version(version))
+ except packaging.version.InvalidVersion:
+ version = version.replace(' ', '.')
+ return re.sub('[^A-Za-z0-9.]+', '-', version)
+
+
+def safe_extra(extra):
+ """Convert an arbitrary string to a standard 'extra' name
+
+ Any runs of non-alphanumeric characters are replaced with a single '_',
+ and the result is always lowercased.
+ """
+ return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
+
+
+def to_filename(name):
+ """Convert a project or version name to its filename-escaped form
+
+ Any '-' characters are currently replaced with '_'.
+ """
+ return name.replace('-', '_')
+
+
+def invalid_marker(text):
+ """
+ Validate text as a PEP 508 environment marker; return an exception
+ if invalid or False otherwise.
+ """
+ try:
+ evaluate_marker(text)
+ except SyntaxError as e:
+ e.filename = None
+ e.lineno = None
+ return e
+ return False
+
+
+def evaluate_marker(text, extra=None):
+ """
+ Evaluate a PEP 508 environment marker.
+ Return a boolean indicating the marker result in this environment.
+ Raise SyntaxError if marker is invalid.
+
+ This implementation uses the 'pyparsing' module.
+ """
+ try:
+ marker = packaging.markers.Marker(text)
+ return marker.evaluate()
+ except packaging.markers.InvalidMarker as e:
+ raise SyntaxError(e)
+
+
+class NullProvider:
+ """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
+
+ egg_name = None
+ egg_info = None
+ loader = None
+
+ def __init__(self, module):
+ self.loader = getattr(module, '__loader__', None)
+ self.module_path = os.path.dirname(getattr(module, '__file__', ''))
+
+ def get_resource_filename(self, manager, resource_name):
+ return self._fn(self.module_path, resource_name)
+
+ def get_resource_stream(self, manager, resource_name):
+ return io.BytesIO(self.get_resource_string(manager, resource_name))
+
+ def get_resource_string(self, manager, resource_name):
+ return self._get(self._fn(self.module_path, resource_name))
+
+ def has_resource(self, resource_name):
+ return self._has(self._fn(self.module_path, resource_name))
+
+ def _get_metadata_path(self, name):
+ return self._fn(self.egg_info, name)
+
+ def has_metadata(self, name):
+ if not self.egg_info:
+ return self.egg_info
+
+ path = self._get_metadata_path(name)
+ return self._has(path)
+
+ def get_metadata(self, name):
+ if not self.egg_info:
+ return ""
+ path = self._get_metadata_path(name)
+ value = self._get(path)
+ if six.PY2:
+ return value
+ try:
+ return value.decode('utf-8')
+ except UnicodeDecodeError as exc:
+ # Include the path in the error message to simplify
+ # troubleshooting, and without changing the exception type.
+ exc.reason += ' in {} file at path: {}'.format(name, path)
+ raise
+
+ def get_metadata_lines(self, name):
+ return yield_lines(self.get_metadata(name))
+
+ def resource_isdir(self, resource_name):
+ return self._isdir(self._fn(self.module_path, resource_name))
+
+ def metadata_isdir(self, name):
+ return self.egg_info and self._isdir(self._fn(self.egg_info, name))
+
+ def resource_listdir(self, resource_name):
+ return self._listdir(self._fn(self.module_path, resource_name))
+
+ def metadata_listdir(self, name):
+ if self.egg_info:
+ return self._listdir(self._fn(self.egg_info, name))
+ return []
+
+ def run_script(self, script_name, namespace):
+ script = 'scripts/' + script_name
+ if not self.has_metadata(script):
+ raise ResolutionError(
+ "Script {script!r} not found in metadata at {self.egg_info!r}"
+ .format(**locals()),
+ )
+ script_text = self.get_metadata(script).replace('\r\n', '\n')
+ script_text = script_text.replace('\r', '\n')
+ script_filename = self._fn(self.egg_info, script)
+ namespace['__file__'] = script_filename
+ if os.path.exists(script_filename):
+ source = open(script_filename).read()
+ code = compile(source, script_filename, 'exec')
+ exec(code, namespace, namespace)
+ else:
+ from linecache import cache
+ cache[script_filename] = (
+ len(script_text), 0, script_text.split('\n'), script_filename
+ )
+ script_code = compile(script_text, script_filename, 'exec')
+ exec(script_code, namespace, namespace)
+
+ def _has(self, path):
+ raise NotImplementedError(
+ "Can't perform this operation for unregistered loader type"
+ )
+
+ def _isdir(self, path):
+ raise NotImplementedError(
+ "Can't perform this operation for unregistered loader type"
+ )
+
+ def _listdir(self, path):
+ raise NotImplementedError(
+ "Can't perform this operation for unregistered loader type"
+ )
+
+ def _fn(self, base, resource_name):
+ self._validate_resource_path(resource_name)
+ if resource_name:
+ return os.path.join(base, *resource_name.split('/'))
+ return base
+
+ @staticmethod
+ def _validate_resource_path(path):
+ """
+ Validate the resource paths according to the docs.
+ https://setuptools.readthedocs.io/en/latest/pkg_resources.html#basic-resource-access
+
+ >>> warned = getfixture('recwarn')
+ >>> warnings.simplefilter('always')
+ >>> vrp = NullProvider._validate_resource_path
+ >>> vrp('foo/bar.txt')
+ >>> bool(warned)
+ False
+ >>> vrp('../foo/bar.txt')
+ >>> bool(warned)
+ True
+ >>> warned.clear()
+ >>> vrp('/foo/bar.txt')
+ >>> bool(warned)
+ True
+ >>> vrp('foo/../../bar.txt')
+ >>> bool(warned)
+ True
+ >>> warned.clear()
+ >>> vrp('foo/f../bar.txt')
+ >>> bool(warned)
+ False
+
+ Windows path separators are straight-up disallowed.
+ >>> vrp(r'\\foo/bar.txt')
+ Traceback (most recent call last):
+ ...
+ ValueError: Use of .. or absolute path in a resource path \
+is not allowed.
+
+ >>> vrp(r'C:\\foo/bar.txt')
+ Traceback (most recent call last):
+ ...
+ ValueError: Use of .. or absolute path in a resource path \
+is not allowed.
+
+ Blank values are allowed
+
+ >>> vrp('')
+ >>> bool(warned)
+ False
+
+ Non-string values are not.
+
+ >>> vrp(None)
+ Traceback (most recent call last):
+ ...
+ AttributeError: ...
+ """
+ invalid = (
+ os.path.pardir in path.split(posixpath.sep) or
+ posixpath.isabs(path) or
+ ntpath.isabs(path)
+ )
+ if not invalid:
+ return
+
+ msg = "Use of .. or absolute path in a resource path is not allowed."
+
+ # Aggressively disallow Windows absolute paths
+ if ntpath.isabs(path) and not posixpath.isabs(path):
+ raise ValueError(msg)
+
+ # for compatibility, warn; in future
+ # raise ValueError(msg)
+ warnings.warn(
+ msg[:-1] + " and will raise exceptions in a future release.",
+ DeprecationWarning,
+ stacklevel=4,
+ )
+
+ def _get(self, path):
+ if hasattr(self.loader, 'get_data'):
+ return self.loader.get_data(path)
+ raise NotImplementedError(
+ "Can't perform this operation for loaders without 'get_data()'"
+ )
+
+
+register_loader_type(object, NullProvider)
+
+
+class EggProvider(NullProvider):
+ """Provider based on a virtual filesystem"""
+
+ def __init__(self, module):
+ NullProvider.__init__(self, module)
+ self._setup_prefix()
+
+ def _setup_prefix(self):
+ # we assume here that our metadata may be nested inside a "basket"
+ # of multiple eggs; that's why we use module_path instead of .archive
+ path = self.module_path
+ old = None
+ while path != old:
+ if _is_egg_path(path):
+ self.egg_name = os.path.basename(path)
+ self.egg_info = os.path.join(path, 'EGG-INFO')
+ self.egg_root = path
+ break
+ old = path
+ path, base = os.path.split(path)
+
+
+class DefaultProvider(EggProvider):
+ """Provides access to package resources in the filesystem"""
+
+ def _has(self, path):
+ return os.path.exists(path)
+
+ def _isdir(self, path):
+ return os.path.isdir(path)
+
+ def _listdir(self, path):
+ return os.listdir(path)
+
+ def get_resource_stream(self, manager, resource_name):
+ return open(self._fn(self.module_path, resource_name), 'rb')
+
+ def _get(self, path):
+ with open(path, 'rb') as stream:
+ return stream.read()
+
+ @classmethod
+ def _register(cls):
+ loader_names = 'SourceFileLoader', 'SourcelessFileLoader',
+ for name in loader_names:
+ loader_cls = getattr(importlib_machinery, name, type(None))
+ register_loader_type(loader_cls, cls)
+
+
+DefaultProvider._register()
+
+
+class EmptyProvider(NullProvider):
+ """Provider that returns nothing for all requests"""
+
+ module_path = None
+
+ _isdir = _has = lambda self, path: False
+
+ def _get(self, path):
+ return ''
+
+ def _listdir(self, path):
+ return []
+
+ def __init__(self):
+ pass
+
+
+empty_provider = EmptyProvider()
+
+
+class ZipManifests(dict):
+ """
+ zip manifest builder
+ """
+
+ @classmethod
+ def build(cls, path):
+ """
+ Build a dictionary similar to the zipimport directory
+ caches, except instead of tuples, store ZipInfo objects.
+
+ Use a platform-specific path separator (os.sep) for the path keys
+ for compatibility with pypy on Windows.
+ """
+ with zipfile.ZipFile(path) as zfile:
+ items = (
+ (
+ name.replace('/', os.sep),
+ zfile.getinfo(name),
+ )
+ for name in zfile.namelist()
+ )
+ return dict(items)
+
+ load = build
+
+
+class MemoizedZipManifests(ZipManifests):
+ """
+ Memoized zipfile manifests.
+ """
+ manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
+
+ def load(self, path):
+ """
+ Load a manifest at path or return a suitable manifest already loaded.
+ """
+ path = os.path.normpath(path)
+ mtime = os.stat(path).st_mtime
+
+ if path not in self or self[path].mtime != mtime:
+ manifest = self.build(path)
+ self[path] = self.manifest_mod(manifest, mtime)
+
+ return self[path].manifest
+
+
+class ZipProvider(EggProvider):
+ """Resource support for zips and eggs"""
+
+ eagers = None
+ _zip_manifests = MemoizedZipManifests()
+
+ def __init__(self, module):
+ EggProvider.__init__(self, module)
+ self.zip_pre = self.loader.archive + os.sep
+
+ def _zipinfo_name(self, fspath):
+ # Convert a virtual filename (full path to file) into a zipfile subpath
+ # usable with the zipimport directory cache for our target archive
+ fspath = fspath.rstrip(os.sep)
+ if fspath == self.loader.archive:
+ return ''
+ if fspath.startswith(self.zip_pre):
+ return fspath[len(self.zip_pre):]
+ raise AssertionError(
+ "%s is not a subpath of %s" % (fspath, self.zip_pre)
+ )
+
+ def _parts(self, zip_path):
+ # Convert a zipfile subpath into an egg-relative path part list.
+ # pseudo-fs path
+ fspath = self.zip_pre + zip_path
+ if fspath.startswith(self.egg_root + os.sep):
+ return fspath[len(self.egg_root) + 1:].split(os.sep)
+ raise AssertionError(
+ "%s is not a subpath of %s" % (fspath, self.egg_root)
+ )
+
+ @property
+ def zipinfo(self):
+ return self._zip_manifests.load(self.loader.archive)
+
+ def get_resource_filename(self, manager, resource_name):
+ if not self.egg_name:
+ raise NotImplementedError(
+ "resource_filename() only supported for .egg, not .zip"
+ )
+ # no need to lock for extraction, since we use temp names
+ zip_path = self._resource_to_zip(resource_name)
+ eagers = self._get_eager_resources()
+ if '/'.join(self._parts(zip_path)) in eagers:
+ for name in eagers:
+ self._extract_resource(manager, self._eager_to_zip(name))
+ return self._extract_resource(manager, zip_path)
+
+ @staticmethod
+ def _get_date_and_size(zip_stat):
+ size = zip_stat.file_size
+ # ymdhms+wday, yday, dst
+ date_time = zip_stat.date_time + (0, 0, -1)
+ # 1980 offset already done
+ timestamp = time.mktime(date_time)
+ return timestamp, size
+
+ def _extract_resource(self, manager, zip_path):
+
+ if zip_path in self._index():
+ for name in self._index()[zip_path]:
+ last = self._extract_resource(
+ manager, os.path.join(zip_path, name)
+ )
+ # return the extracted directory name
+ return os.path.dirname(last)
+
+ timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
+
+ if not WRITE_SUPPORT:
+ raise IOError('"os.rename" and "os.unlink" are not supported '
+ 'on this platform')
+ try:
+
+ real_path = manager.get_cache_path(
+ self.egg_name, self._parts(zip_path)
+ )
+
+ if self._is_current(real_path, zip_path):
+ return real_path
+
+ outf, tmpnam = _mkstemp(
+ ".$extract",
+ dir=os.path.dirname(real_path),
+ )
+ os.write(outf, self.loader.get_data(zip_path))
+ os.close(outf)
+ utime(tmpnam, (timestamp, timestamp))
+ manager.postprocess(tmpnam, real_path)
+
+ try:
+ rename(tmpnam, real_path)
+
+ except os.error:
+ if os.path.isfile(real_path):
+ if self._is_current(real_path, zip_path):
+ # the file became current since it was checked above,
+ # so proceed.
+ return real_path
+ # Windows, del old file and retry
+ elif os.name == 'nt':
+ unlink(real_path)
+ rename(tmpnam, real_path)
+ return real_path
+ raise
+
+ except os.error:
+ # report a user-friendly error
+ manager.extraction_error()
+
+ return real_path
+
+ def _is_current(self, file_path, zip_path):
+ """
+ Return True if the file_path is current for this zip_path
+ """
+ timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
+ if not os.path.isfile(file_path):
+ return False
+ stat = os.stat(file_path)
+ if stat.st_size != size or stat.st_mtime != timestamp:
+ return False
+ # check that the contents match
+ zip_contents = self.loader.get_data(zip_path)
+ with open(file_path, 'rb') as f:
+ file_contents = f.read()
+ return zip_contents == file_contents
+
+ def _get_eager_resources(self):
+ if self.eagers is None:
+ eagers = []
+ for name in ('native_libs.txt', 'eager_resources.txt'):
+ if self.has_metadata(name):
+ eagers.extend(self.get_metadata_lines(name))
+ self.eagers = eagers
+ return self.eagers
+
+ def _index(self):
+ try:
+ return self._dirindex
+ except AttributeError:
+ ind = {}
+ for path in self.zipinfo:
+ parts = path.split(os.sep)
+ while parts:
+ parent = os.sep.join(parts[:-1])
+ if parent in ind:
+ ind[parent].append(parts[-1])
+ break
+ else:
+ ind[parent] = [parts.pop()]
+ self._dirindex = ind
+ return ind
+
+ def _has(self, fspath):
+ zip_path = self._zipinfo_name(fspath)
+ return zip_path in self.zipinfo or zip_path in self._index()
+
+ def _isdir(self, fspath):
+ return self._zipinfo_name(fspath) in self._index()
+
+ def _listdir(self, fspath):
+ return list(self._index().get(self._zipinfo_name(fspath), ()))
+
+ def _eager_to_zip(self, resource_name):
+ return self._zipinfo_name(self._fn(self.egg_root, resource_name))
+
+ def _resource_to_zip(self, resource_name):
+ return self._zipinfo_name(self._fn(self.module_path, resource_name))
+
+
+register_loader_type(zipimport.zipimporter, ZipProvider)
+
+
+class FileMetadata(EmptyProvider):
+ """Metadata handler for standalone PKG-INFO files
+
+ Usage::
+
+ metadata = FileMetadata("/path/to/PKG-INFO")
+
+ This provider rejects all data and metadata requests except for PKG-INFO,
+ which is treated as existing, and will be the contents of the file at
+ the provided location.
+ """
+
+ def __init__(self, path):
+ self.path = path
+
+ def _get_metadata_path(self, name):
+ return self.path
+
+ def has_metadata(self, name):
+ return name == 'PKG-INFO' and os.path.isfile(self.path)
+
+ def get_metadata(self, name):
+ if name != 'PKG-INFO':
+ raise KeyError("No metadata except PKG-INFO is available")
+
+ with io.open(self.path, encoding='utf-8', errors="replace") as f:
+ metadata = f.read()
+ self._warn_on_replacement(metadata)
+ return metadata
+
+ def _warn_on_replacement(self, metadata):
+ # Python 2.7 compat for: replacement_char = '�'
+ replacement_char = b'\xef\xbf\xbd'.decode('utf-8')
+ if replacement_char in metadata:
+ tmpl = "{self.path} could not be properly decoded in UTF-8"
+ msg = tmpl.format(**locals())
+ warnings.warn(msg)
+
+ def get_metadata_lines(self, name):
+ return yield_lines(self.get_metadata(name))
+
+
+class PathMetadata(DefaultProvider):
+ """Metadata provider for egg directories
+
+ Usage::
+
+ # Development eggs:
+
+ egg_info = "/path/to/PackageName.egg-info"
+ base_dir = os.path.dirname(egg_info)
+ metadata = PathMetadata(base_dir, egg_info)
+ dist_name = os.path.splitext(os.path.basename(egg_info))[0]
+ dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
+
+ # Unpacked egg directories:
+
+ egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
+ metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
+ dist = Distribution.from_filename(egg_path, metadata=metadata)
+ """
+
+ def __init__(self, path, egg_info):
+ self.module_path = path
+ self.egg_info = egg_info
+
+
+class EggMetadata(ZipProvider):
+ """Metadata provider for .egg files"""
+
+ def __init__(self, importer):
+ """Create a metadata provider from a zipimporter"""
+
+ self.zip_pre = importer.archive + os.sep
+ self.loader = importer
+ if importer.prefix:
+ self.module_path = os.path.join(importer.archive, importer.prefix)
+ else:
+ self.module_path = importer.archive
+ self._setup_prefix()
+
+
+_declare_state('dict', _distribution_finders={})
+
+
+def register_finder(importer_type, distribution_finder):
+ """Register `distribution_finder` to find distributions in sys.path items
+
+ `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
+ handler), and `distribution_finder` is a callable that, passed a path
+ item and the importer instance, yields ``Distribution`` instances found on
+ that path item. See ``pkg_resources.find_on_path`` for an example."""
+ _distribution_finders[importer_type] = distribution_finder
+
+
+def find_distributions(path_item, only=False):
+ """Yield distributions accessible via `path_item`"""
+ importer = get_importer(path_item)
+ finder = _find_adapter(_distribution_finders, importer)
+ return finder(importer, path_item, only)
+
+
+def find_eggs_in_zip(importer, path_item, only=False):
+ """
+ Find eggs in zip files; possibly multiple nested eggs.
+ """
+ if importer.archive.endswith('.whl'):
+ # wheels are not supported with this finder
+ # they don't have PKG-INFO metadata, and won't ever contain eggs
+ return
+ metadata = EggMetadata(importer)
+ if metadata.has_metadata('PKG-INFO'):
+ yield Distribution.from_filename(path_item, metadata=metadata)
+ if only:
+ # don't yield nested distros
+ return
+ for subitem in metadata.resource_listdir(''):
+ if _is_egg_path(subitem):
+ subpath = os.path.join(path_item, subitem)
+ dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath)
+ for dist in dists:
+ yield dist
+ elif subitem.lower().endswith('.dist-info'):
+ subpath = os.path.join(path_item, subitem)
+ submeta = EggMetadata(zipimport.zipimporter(subpath))
+ submeta.egg_info = subpath
+ yield Distribution.from_location(path_item, subitem, submeta)
+
+
+register_finder(zipimport.zipimporter, find_eggs_in_zip)
+
+
+def find_nothing(importer, path_item, only=False):
+ return ()
+
+
+register_finder(object, find_nothing)
+
+
+def _by_version_descending(names):
+ """
+ Given a list of filenames, return them in descending order
+ by version number.
+
+ >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg'
+ >>> _by_version_descending(names)
+ ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar']
+ >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg'
+ >>> _by_version_descending(names)
+ ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg']
+ >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg'
+ >>> _by_version_descending(names)
+ ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg']
+ """
+ def _by_version(name):
+ """
+ Parse each component of the filename
+ """
+ name, ext = os.path.splitext(name)
+ parts = itertools.chain(name.split('-'), [ext])
+ return [packaging.version.parse(part) for part in parts]
+
+ return sorted(names, key=_by_version, reverse=True)
+
+
+def find_on_path(importer, path_item, only=False):
+ """Yield distributions accessible on a sys.path directory"""
+ path_item = _normalize_cached(path_item)
+
+ if _is_unpacked_egg(path_item):
+ yield Distribution.from_filename(
+ path_item, metadata=PathMetadata(
+ path_item, os.path.join(path_item, 'EGG-INFO')
+ )
+ )
+ return
+
+ entries = safe_listdir(path_item)
+
+ # for performance, before sorting by version,
+ # screen entries for only those that will yield
+ # distributions
+ filtered = (
+ entry
+ for entry in entries
+ if dist_factory(path_item, entry, only)
+ )
+
+ # scan for .egg and .egg-info in directory
+ path_item_entries = _by_version_descending(filtered)
+ for entry in path_item_entries:
+ fullpath = os.path.join(path_item, entry)
+ factory = dist_factory(path_item, entry, only)
+ for dist in factory(fullpath):
+ yield dist
+
+
+def dist_factory(path_item, entry, only):
+ """
+ Return a dist_factory for a path_item and entry
+ """
+ lower = entry.lower()
+ is_meta = any(map(lower.endswith, ('.egg-info', '.dist-info')))
+ return (
+ distributions_from_metadata
+ if is_meta else
+ find_distributions
+ if not only and _is_egg_path(entry) else
+ resolve_egg_link
+ if not only and lower.endswith('.egg-link') else
+ NoDists()
+ )
+
+
+class NoDists:
+ """
+ >>> bool(NoDists())
+ False
+
+ >>> list(NoDists()('anything'))
+ []
+ """
+ def __bool__(self):
+ return False
+ if six.PY2:
+ __nonzero__ = __bool__
+
+ def __call__(self, fullpath):
+ return iter(())
+
+
+def safe_listdir(path):
+ """
+ Attempt to list contents of path, but suppress some exceptions.
+ """
+ try:
+ return os.listdir(path)
+ except (PermissionError, NotADirectoryError):
+ pass
+ except OSError as e:
+ # Ignore the directory if does not exist, not a directory or
+ # permission denied
+ ignorable = (
+ e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT)
+ # Python 2 on Windows needs to be handled this way :(
+ or getattr(e, "winerror", None) == 267
+ )
+ if not ignorable:
+ raise
+ return ()
+
+
+def distributions_from_metadata(path):
+ root = os.path.dirname(path)
+ if os.path.isdir(path):
+ if len(os.listdir(path)) == 0:
+ # empty metadata dir; skip
+ return
+ metadata = PathMetadata(root, path)
+ else:
+ metadata = FileMetadata(path)
+ entry = os.path.basename(path)
+ yield Distribution.from_location(
+ root, entry, metadata, precedence=DEVELOP_DIST,
+ )
+
+
+def non_empty_lines(path):
+ """
+ Yield non-empty lines from file at path
+ """
+ with open(path) as f:
+ for line in f:
+ line = line.strip()
+ if line:
+ yield line
+
+
+def resolve_egg_link(path):
+ """
+ Given a path to an .egg-link, resolve distributions
+ present in the referenced path.
+ """
+ referenced_paths = non_empty_lines(path)
+ resolved_paths = (
+ os.path.join(os.path.dirname(path), ref)
+ for ref in referenced_paths
+ )
+ dist_groups = map(find_distributions, resolved_paths)
+ return next(dist_groups, ())
+
+
+register_finder(pkgutil.ImpImporter, find_on_path)
+
+if hasattr(importlib_machinery, 'FileFinder'):
+ register_finder(importlib_machinery.FileFinder, find_on_path)
+
+_declare_state('dict', _namespace_handlers={})
+_declare_state('dict', _namespace_packages={})
+
+
+def register_namespace_handler(importer_type, namespace_handler):
+ """Register `namespace_handler` to declare namespace packages
+
+ `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
+ handler), and `namespace_handler` is a callable like this::
+
+ def namespace_handler(importer, path_entry, moduleName, module):
+ # return a path_entry to use for child packages
+
+ Namespace handlers are only called if the importer object has already
+ agreed that it can handle the relevant path item, and they should only
+ return a subpath if the module __path__ does not already contain an
+ equivalent subpath. For an example namespace handler, see
+ ``pkg_resources.file_ns_handler``.
+ """
+ _namespace_handlers[importer_type] = namespace_handler
+
+
+def _handle_ns(packageName, path_item):
+ """Ensure that named package includes a subpath of path_item (if needed)"""
+
+ importer = get_importer(path_item)
+ if importer is None:
+ return None
+
+ # capture warnings due to #1111
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore")
+ loader = importer.find_module(packageName)
+
+ if loader is None:
+ return None
+ module = sys.modules.get(packageName)
+ if module is None:
+ module = sys.modules[packageName] = types.ModuleType(packageName)
+ module.__path__ = []
+ _set_parent_ns(packageName)
+ elif not hasattr(module, '__path__'):
+ raise TypeError("Not a package:", packageName)
+ handler = _find_adapter(_namespace_handlers, importer)
+ subpath = handler(importer, path_item, packageName, module)
+ if subpath is not None:
+ path = module.__path__
+ path.append(subpath)
+ loader.load_module(packageName)
+ _rebuild_mod_path(path, packageName, module)
+ return subpath
+
+
+def _rebuild_mod_path(orig_path, package_name, module):
+ """
+ Rebuild module.__path__ ensuring that all entries are ordered
+ corresponding to their sys.path order
+ """
+ sys_path = [_normalize_cached(p) for p in sys.path]
+
+ def safe_sys_path_index(entry):
+ """
+ Workaround for #520 and #513.
+ """
+ try:
+ return sys_path.index(entry)
+ except ValueError:
+ return float('inf')
+
+ def position_in_sys_path(path):
+ """
+ Return the ordinal of the path based on its position in sys.path
+ """
+ path_parts = path.split(os.sep)
+ module_parts = package_name.count('.') + 1
+ parts = path_parts[:-module_parts]
+ return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))
+
+ new_path = sorted(orig_path, key=position_in_sys_path)
+ new_path = [_normalize_cached(p) for p in new_path]
+
+ if isinstance(module.__path__, list):
+ module.__path__[:] = new_path
+ else:
+ module.__path__ = new_path
+
+
+def declare_namespace(packageName):
+ """Declare that package 'packageName' is a namespace package"""
+
+ _imp.acquire_lock()
+ try:
+ if packageName in _namespace_packages:
+ return
+
+ path = sys.path
+ parent, _, _ = packageName.rpartition('.')
+
+ if parent:
+ declare_namespace(parent)
+ if parent not in _namespace_packages:
+ __import__(parent)
+ try:
+ path = sys.modules[parent].__path__
+ except AttributeError:
+ raise TypeError("Not a package:", parent)
+
+ # Track what packages are namespaces, so when new path items are added,
+ # they can be updated
+ _namespace_packages.setdefault(parent or None, []).append(packageName)
+ _namespace_packages.setdefault(packageName, [])
+
+ for path_item in path:
+ # Ensure all the parent's path items are reflected in the child,
+ # if they apply
+ _handle_ns(packageName, path_item)
+
+ finally:
+ _imp.release_lock()
+
+
+def fixup_namespace_packages(path_item, parent=None):
+ """Ensure that previously-declared namespace packages include path_item"""
+ _imp.acquire_lock()
+ try:
+ for package in _namespace_packages.get(parent, ()):
+ subpath = _handle_ns(package, path_item)
+ if subpath:
+ fixup_namespace_packages(subpath, package)
+ finally:
+ _imp.release_lock()
+
+
+def file_ns_handler(importer, path_item, packageName, module):
+ """Compute an ns-package subpath for a filesystem or zipfile importer"""
+
+ subpath = os.path.join(path_item, packageName.split('.')[-1])
+ normalized = _normalize_cached(subpath)
+ for item in module.__path__:
+ if _normalize_cached(item) == normalized:
+ break
+ else:
+ # Only return the path if it's not already there
+ return subpath
+
+
+register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
+register_namespace_handler(zipimport.zipimporter, file_ns_handler)
+
+if hasattr(importlib_machinery, 'FileFinder'):
+ register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
+
+
+def null_ns_handler(importer, path_item, packageName, module):
+ return None
+
+
+register_namespace_handler(object, null_ns_handler)
+
+
+def normalize_path(filename):
+ """Normalize a file/dir name for comparison purposes"""
+ return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename))))
+
+
+def _cygwin_patch(filename): # pragma: nocover
+ """
+ Contrary to POSIX 2008, on Cygwin, getcwd (3) contains
+ symlink components. Using
+ os.path.abspath() works around this limitation. A fix in os.getcwd()
+ would probably better, in Cygwin even more so, except
+ that this seems to be by design...
+ """
+ return os.path.abspath(filename) if sys.platform == 'cygwin' else filename
+
+
+def _normalize_cached(filename, _cache={}):
+ try:
+ return _cache[filename]
+ except KeyError:
+ _cache[filename] = result = normalize_path(filename)
+ return result
+
+
+def _is_egg_path(path):
+ """
+ Determine if given path appears to be an egg.
+ """
+ return path.lower().endswith('.egg')
+
+
+def _is_unpacked_egg(path):
+ """
+ Determine if given path appears to be an unpacked egg.
+ """
+ return (
+ _is_egg_path(path) and
+ os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO'))
+ )
+
+
+def _set_parent_ns(packageName):
+ parts = packageName.split('.')
+ name = parts.pop()
+ if parts:
+ parent = '.'.join(parts)
+ setattr(sys.modules[parent], name, sys.modules[packageName])
+
+
+def yield_lines(strs):
+ """Yield non-empty/non-comment lines of a string or sequence"""
+ if isinstance(strs, six.string_types):
+ for s in strs.splitlines():
+ s = s.strip()
+ # skip blank lines/comments
+ if s and not s.startswith('#'):
+ yield s
+ else:
+ for ss in strs:
+ for s in yield_lines(ss):
+ yield s
+
+
+MODULE = re.compile(r"\w+(\.\w+)*$").match
+EGG_NAME = re.compile(
+ r"""
+ (?P<name>[^-]+) (
+ -(?P<ver>[^-]+) (
+ -py(?P<pyver>[^-]+) (
+ -(?P<plat>.+)
+ )?
+ )?
+ )?
+ """,
+ re.VERBOSE | re.IGNORECASE,
+).match
+
+
+class EntryPoint:
+ """Object representing an advertised importable object"""
+
+ def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
+ if not MODULE(module_name):
+ raise ValueError("Invalid module name", module_name)
+ self.name = name
+ self.module_name = module_name
+ self.attrs = tuple(attrs)
+ self.extras = tuple(extras)
+ self.dist = dist
+
+ def __str__(self):
+ s = "%s = %s" % (self.name, self.module_name)
+ if self.attrs:
+ s += ':' + '.'.join(self.attrs)
+ if self.extras:
+ s += ' [%s]' % ','.join(self.extras)
+ return s
+
+ def __repr__(self):
+ return "EntryPoint.parse(%r)" % str(self)
+
+ def load(self, require=True, *args, **kwargs):
+ """
+ Require packages for this EntryPoint, then resolve it.
+ """
+ if not require or args or kwargs:
+ warnings.warn(
+ "Parameters to load are deprecated. Call .resolve and "
+ ".require separately.",
+ PkgResourcesDeprecationWarning,
+ stacklevel=2,
+ )
+ if require:
+ self.require(*args, **kwargs)
+ return self.resolve()
+
+ def resolve(self):
+ """
+ Resolve the entry point from its module and attrs.
+ """
+ module = __import__(self.module_name, fromlist=['__name__'], level=0)
+ try:
+ return functools.reduce(getattr, self.attrs, module)
+ except AttributeError as exc:
+ raise ImportError(str(exc))
+
+ def require(self, env=None, installer=None):
+ if self.extras and not self.dist:
+ raise UnknownExtra("Can't require() without a distribution", self)
+
+ # Get the requirements for this entry point with all its extras and
+ # then resolve them. We have to pass `extras` along when resolving so
+ # that the working set knows what extras we want. Otherwise, for
+ # dist-info distributions, the working set will assume that the
+ # requirements for that extra are purely optional and skip over them.
+ reqs = self.dist.requires(self.extras)
+ items = working_set.resolve(reqs, env, installer, extras=self.extras)
+ list(map(working_set.add, items))
+
+ pattern = re.compile(
+ r'\s*'
+ r'(?P<name>.+?)\s*'
+ r'=\s*'
+ r'(?P<module>[\w.]+)\s*'
+ r'(:\s*(?P<attr>[\w.]+))?\s*'
+ r'(?P<extras>\[.*\])?\s*$'
+ )
+
+ @classmethod
+ def parse(cls, src, dist=None):
+ """Parse a single entry point from string `src`
+
+ Entry point syntax follows the form::
+
+ name = some.module:some.attr [extra1, extra2]
+
+ The entry name and module name are required, but the ``:attrs`` and
+ ``[extras]`` parts are optional
+ """
+ m = cls.pattern.match(src)
+ if not m:
+ msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
+ raise ValueError(msg, src)
+ res = m.groupdict()
+ extras = cls._parse_extras(res['extras'])
+ attrs = res['attr'].split('.') if res['attr'] else ()
+ return cls(res['name'], res['module'], attrs, extras, dist)
+
+ @classmethod
+ def _parse_extras(cls, extras_spec):
+ if not extras_spec:
+ return ()
+ req = Requirement.parse('x' + extras_spec)
+ if req.specs:
+ raise ValueError()
+ return req.extras
+
+ @classmethod
+ def parse_group(cls, group, lines, dist=None):
+ """Parse an entry point group"""
+ if not MODULE(group):
+ raise ValueError("Invalid group name", group)
+ this = {}
+ for line in yield_lines(lines):
+ ep = cls.parse(line, dist)
+ if ep.name in this:
+ raise ValueError("Duplicate entry point", group, ep.name)
+ this[ep.name] = ep
+ return this
+
+ @classmethod
+ def parse_map(cls, data, dist=None):
+ """Parse a map of entry point groups"""
+ if isinstance(data, dict):
+ data = data.items()
+ else:
+ data = split_sections(data)
+ maps = {}
+ for group, lines in data:
+ if group is None:
+ if not lines:
+ continue
+ raise ValueError("Entry points must be listed in groups")
+ group = group.strip()
+ if group in maps:
+ raise ValueError("Duplicate group name", group)
+ maps[group] = cls.parse_group(group, lines, dist)
+ return maps
+
+
+def _remove_md5_fragment(location):
+ if not location:
+ return ''
+ parsed = urllib.parse.urlparse(location)
+ if parsed[-1].startswith('md5='):
+ return urllib.parse.urlunparse(parsed[:-1] + ('',))
+ return location
+
+
+def _version_from_file(lines):
+ """
+ Given an iterable of lines from a Metadata file, return
+ the value of the Version field, if present, or None otherwise.
+ """
+ def is_version_line(line):
+ return line.lower().startswith('version:')
+ version_lines = filter(is_version_line, lines)
+ line = next(iter(version_lines), '')
+ _, _, value = line.partition(':')
+ return safe_version(value.strip()) or None
+
+
+class Distribution:
+ """Wrap an actual or potential sys.path entry w/metadata"""
+ PKG_INFO = 'PKG-INFO'
+
+ def __init__(
+ self, location=None, metadata=None, project_name=None,
+ version=None, py_version=PY_MAJOR, platform=None,
+ precedence=EGG_DIST):
+ self.project_name = safe_name(project_name or 'Unknown')
+ if version is not None:
+ self._version = safe_version(version)
+ self.py_version = py_version
+ self.platform = platform
+ self.location = location
+ self.precedence = precedence
+ self._provider = metadata or empty_provider
+
+ @classmethod
+ def from_location(cls, location, basename, metadata=None, **kw):
+ project_name, version, py_version, platform = [None] * 4
+ basename, ext = os.path.splitext(basename)
+ if ext.lower() in _distributionImpl:
+ cls = _distributionImpl[ext.lower()]
+
+ match = EGG_NAME(basename)
+ if match:
+ project_name, version, py_version, platform = match.group(
+ 'name', 'ver', 'pyver', 'plat'
+ )
+ return cls(
+ location, metadata, project_name=project_name, version=version,
+ py_version=py_version, platform=platform, **kw
+ )._reload_version()
+
+ def _reload_version(self):
+ return self
+
+ @property
+ def hashcmp(self):
+ return (
+ self.parsed_version,
+ self.precedence,
+ self.key,
+ _remove_md5_fragment(self.location),
+ self.py_version or '',
+ self.platform or '',
+ )
+
+ def __hash__(self):
+ return hash(self.hashcmp)
+
+ def __lt__(self, other):
+ return self.hashcmp < other.hashcmp
+
+ def __le__(self, other):
+ return self.hashcmp <= other.hashcmp
+
+ def __gt__(self, other):
+ return self.hashcmp > other.hashcmp
+
+ def __ge__(self, other):
+ return self.hashcmp >= other.hashcmp
+
+ def __eq__(self, other):
+ if not isinstance(other, self.__class__):
+ # It's not a Distribution, so they are not equal
+ return False
+ return self.hashcmp == other.hashcmp
+
+ def __ne__(self, other):
+ return not self == other
+
+ # These properties have to be lazy so that we don't have to load any
+ # metadata until/unless it's actually needed. (i.e., some distributions
+ # may not know their name or version without loading PKG-INFO)
+
+ @property
+ def key(self):
+ try:
+ return self._key
+ except AttributeError:
+ self._key = key = self.project_name.lower()
+ return key
+
+ @property
+ def parsed_version(self):
+ if not hasattr(self, "_parsed_version"):
+ self._parsed_version = parse_version(self.version)
+
+ return self._parsed_version
+
+ def _warn_legacy_version(self):
+ LV = packaging.version.LegacyVersion
+ is_legacy = isinstance(self._parsed_version, LV)
+ if not is_legacy:
+ return
+
+ # While an empty version is technically a legacy version and
+ # is not a valid PEP 440 version, it's also unlikely to
+ # actually come from someone and instead it is more likely that
+ # it comes from setuptools attempting to parse a filename and
+ # including it in the list. So for that we'll gate this warning
+ # on if the version is anything at all or not.
+ if not self.version:
+ return
+
+ tmpl = textwrap.dedent("""
+ '{project_name} ({version})' is being parsed as a legacy,
+ non PEP 440,
+ version. You may find odd behavior and sort order.
+ In particular it will be sorted as less than 0.0. It
+ is recommended to migrate to PEP 440 compatible
+ versions.
+ """).strip().replace('\n', ' ')
+
+ warnings.warn(tmpl.format(**vars(self)), PEP440Warning)
+
+ @property
+ def version(self):
+ try:
+ return self._version
+ except AttributeError:
+ version = self._get_version()
+ if version is None:
+ path = self._get_metadata_path_for_display(self.PKG_INFO)
+ msg = (
+ "Missing 'Version:' header and/or {} file at path: {}"
+ ).format(self.PKG_INFO, path)
+ raise ValueError(msg, self)
+
+ return version
+
+ @property
+ def _dep_map(self):
+ """
+ A map of extra to its list of (direct) requirements
+ for this distribution, including the null extra.
+ """
+ try:
+ return self.__dep_map
+ except AttributeError:
+ self.__dep_map = self._filter_extras(self._build_dep_map())
+ return self.__dep_map
+
+ @staticmethod
+ def _filter_extras(dm):
+ """
+ Given a mapping of extras to dependencies, strip off
+ environment markers and filter out any dependencies
+ not matching the markers.
+ """
+ for extra in list(filter(None, dm)):
+ new_extra = extra
+ reqs = dm.pop(extra)
+ new_extra, _, marker = extra.partition(':')
+ fails_marker = marker and (
+ invalid_marker(marker)
+ or not evaluate_marker(marker)
+ )
+ if fails_marker:
+ reqs = []
+ new_extra = safe_extra(new_extra) or None
+
+ dm.setdefault(new_extra, []).extend(reqs)
+ return dm
+
+ def _build_dep_map(self):
+ dm = {}
+ for name in 'requires.txt', 'depends.txt':
+ for extra, reqs in split_sections(self._get_metadata(name)):
+ dm.setdefault(extra, []).extend(parse_requirements(reqs))
+ return dm
+
+ def requires(self, extras=()):
+ """List of Requirements needed for this distro if `extras` are used"""
+ dm = self._dep_map
+ deps = []
+ deps.extend(dm.get(None, ()))
+ for ext in extras:
+ try:
+ deps.extend(dm[safe_extra(ext)])
+ except KeyError:
+ raise UnknownExtra(
+ "%s has no such extra feature %r" % (self, ext)
+ )
+ return deps
+
+ def _get_metadata_path_for_display(self, name):
+ """
+ Return the path to the given metadata file, if available.
+ """
+ try:
+ # We need to access _get_metadata_path() on the provider object
+ # directly rather than through this class's __getattr__()
+ # since _get_metadata_path() is marked private.
+ path = self._provider._get_metadata_path(name)
+
+ # Handle exceptions e.g. in case the distribution's metadata
+ # provider doesn't support _get_metadata_path().
+ except Exception:
+ return '[could not detect]'
+
+ return path
+
+ def _get_metadata(self, name):
+ if self.has_metadata(name):
+ for line in self.get_metadata_lines(name):
+ yield line
+
+ def _get_version(self):
+ lines = self._get_metadata(self.PKG_INFO)
+ version = _version_from_file(lines)
+
+ return version
+
+ def activate(self, path=None, replace=False):
+ """Ensure distribution is importable on `path` (default=sys.path)"""
+ if path is None:
+ path = sys.path
+ self.insert_on(path, replace=replace)
+ if path is sys.path:
+ fixup_namespace_packages(self.location)
+ for pkg in self._get_metadata('namespace_packages.txt'):
+ if pkg in sys.modules:
+ declare_namespace(pkg)
+
+ def egg_name(self):
+ """Return what this distribution's standard .egg filename should be"""
+ filename = "%s-%s-py%s" % (
+ to_filename(self.project_name), to_filename(self.version),
+ self.py_version or PY_MAJOR
+ )
+
+ if self.platform:
+ filename += '-' + self.platform
+ return filename
+
+ def __repr__(self):
+ if self.location:
+ return "%s (%s)" % (self, self.location)
+ else:
+ return str(self)
+
+ def __str__(self):
+ try:
+ version = getattr(self, 'version', None)
+ except ValueError:
+ version = None
+ version = version or "[unknown version]"
+ return "%s %s" % (self.project_name, version)
+
+ def __getattr__(self, attr):
+ """Delegate all unrecognized public attributes to .metadata provider"""
+ if attr.startswith('_'):
+ raise AttributeError(attr)
+ return getattr(self._provider, attr)
+
+ def __dir__(self):
+ return list(
+ set(super(Distribution, self).__dir__())
+ | set(
+ attr for attr in self._provider.__dir__()
+ if not attr.startswith('_')
+ )
+ )
+
+ if not hasattr(object, '__dir__'):
+ # python 2.7 not supported
+ del __dir__
+
+ @classmethod
+ def from_filename(cls, filename, metadata=None, **kw):
+ return cls.from_location(
+ _normalize_cached(filename), os.path.basename(filename), metadata,
+ **kw
+ )
+
+ def as_requirement(self):
+ """Return a ``Requirement`` that matches this distribution exactly"""
+ if isinstance(self.parsed_version, packaging.version.Version):
+ spec = "%s==%s" % (self.project_name, self.parsed_version)
+ else:
+ spec = "%s===%s" % (self.project_name, self.parsed_version)
+
+ return Requirement.parse(spec)
+
+ def load_entry_point(self, group, name):
+ """Return the `name` entry point of `group` or raise ImportError"""
+ ep = self.get_entry_info(group, name)
+ if ep is None:
+ raise ImportError("Entry point %r not found" % ((group, name),))
+ return ep.load()
+
+ def get_entry_map(self, group=None):
+ """Return the entry point map for `group`, or the full entry map"""
+ try:
+ ep_map = self._ep_map
+ except AttributeError:
+ ep_map = self._ep_map = EntryPoint.parse_map(
+ self._get_metadata('entry_points.txt'), self
+ )
+ if group is not None:
+ return ep_map.get(group, {})
+ return ep_map
+
+ def get_entry_info(self, group, name):
+ """Return the EntryPoint object for `group`+`name`, or ``None``"""
+ return self.get_entry_map(group).get(name)
+
+ def insert_on(self, path, loc=None, replace=False):
+ """Ensure self.location is on path
+
+ If replace=False (default):
+ - If location is already in path anywhere, do nothing.
+ - Else:
+ - If it's an egg and its parent directory is on path,
+ insert just ahead of the parent.
+ - Else: add to the end of path.
+ If replace=True:
+ - If location is already on path anywhere (not eggs)
+ or higher priority than its parent (eggs)
+ do nothing.
+ - Else:
+ - If it's an egg and its parent directory is on path,
+ insert just ahead of the parent,
+ removing any lower-priority entries.
+ - Else: add it to the front of path.
+ """
+
+ loc = loc or self.location
+ if not loc:
+ return
+
+ nloc = _normalize_cached(loc)
+ bdir = os.path.dirname(nloc)
+ npath = [(p and _normalize_cached(p) or p) for p in path]
+
+ for p, item in enumerate(npath):
+ if item == nloc:
+ if replace:
+ break
+ else:
+ # don't modify path (even removing duplicates) if
+ # found and not replace
+ return
+ elif item == bdir and self.precedence == EGG_DIST:
+ # if it's an .egg, give it precedence over its directory
+ # UNLESS it's already been added to sys.path and replace=False
+ if (not replace) and nloc in npath[p:]:
+ return
+ if path is sys.path:
+ self.check_version_conflict()
+ path.insert(p, loc)
+ npath.insert(p, nloc)
+ break
+ else:
+ if path is sys.path:
+ self.check_version_conflict()
+ if replace:
+ path.insert(0, loc)
+ else:
+ path.append(loc)
+ return
+
+ # p is the spot where we found or inserted loc; now remove duplicates
+ while True:
+ try:
+ np = npath.index(nloc, p + 1)
+ except ValueError:
+ break
+ else:
+ del npath[np], path[np]
+ # ha!
+ p = np
+
+ return
+
+ def check_version_conflict(self):
+ if self.key == 'setuptools':
+ # ignore the inevitable setuptools self-conflicts :(
+ return
+
+ nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
+ loc = normalize_path(self.location)
+ for modname in self._get_metadata('top_level.txt'):
+ if (modname not in sys.modules or modname in nsp
+ or modname in _namespace_packages):
+ continue
+ if modname in ('pkg_resources', 'setuptools', 'site'):
+ continue
+ fn = getattr(sys.modules[modname], '__file__', None)
+ if fn and (normalize_path(fn).startswith(loc) or
+ fn.startswith(self.location)):
+ continue
+ issue_warning(
+ "Module %s was already imported from %s, but %s is being added"
+ " to sys.path" % (modname, fn, self.location),
+ )
+
+ def has_version(self):
+ try:
+ self.version
+ except ValueError:
+ issue_warning("Unbuilt egg for " + repr(self))
+ return False
+ return True
+
+ def clone(self, **kw):
+ """Copy this distribution, substituting in any changed keyword args"""
+ names = 'project_name version py_version platform location precedence'
+ for attr in names.split():
+ kw.setdefault(attr, getattr(self, attr, None))
+ kw.setdefault('metadata', self._provider)
+ return self.__class__(**kw)
+
+ @property
+ def extras(self):
+ return [dep for dep in self._dep_map if dep]
+
+
+class EggInfoDistribution(Distribution):
+ def _reload_version(self):
+ """
+ Packages installed by distutils (e.g. numpy or scipy),
+ which uses an old safe_version, and so
+ their version numbers can get mangled when
+ converted to filenames (e.g., 1.11.0.dev0+2329eae to
+ 1.11.0.dev0_2329eae). These distributions will not be
+ parsed properly
+ downstream by Distribution and safe_version, so
+ take an extra step and try to get the version number from
+ the metadata file itself instead of the filename.
+ """
+ md_version = self._get_version()
+ if md_version:
+ self._version = md_version
+ return self
+
+
+class DistInfoDistribution(Distribution):
+ """
+ Wrap an actual or potential sys.path entry
+ w/metadata, .dist-info style.
+ """
+ PKG_INFO = 'METADATA'
+ EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
+
+ @property
+ def _parsed_pkg_info(self):
+ """Parse and cache metadata"""
+ try:
+ return self._pkg_info
+ except AttributeError:
+ metadata = self.get_metadata(self.PKG_INFO)
+ self._pkg_info = email.parser.Parser().parsestr(metadata)
+ return self._pkg_info
+
+ @property
+ def _dep_map(self):
+ try:
+ return self.__dep_map
+ except AttributeError:
+ self.__dep_map = self._compute_dependencies()
+ return self.__dep_map
+
+ def _compute_dependencies(self):
+ """Recompute this distribution's dependencies."""
+ dm = self.__dep_map = {None: []}
+
+ reqs = []
+ # Including any condition expressions
+ for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
+ reqs.extend(parse_requirements(req))
+
+ def reqs_for_extra(extra):
+ for req in reqs:
+ if not req.marker or req.marker.evaluate({'extra': extra}):
+ yield req
+
+ common = frozenset(reqs_for_extra(None))
+ dm[None].extend(common)
+
+ for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
+ s_extra = safe_extra(extra.strip())
+ dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common)
+
+ return dm
+
+
+_distributionImpl = {
+ '.egg': Distribution,
+ '.egg-info': EggInfoDistribution,
+ '.dist-info': DistInfoDistribution,
+}
+
+
+def issue_warning(*args, **kw):
+ level = 1
+ g = globals()
+ try:
+ # find the first stack frame that is *not* code in
+ # the pkg_resources module, to use for the warning
+ while sys._getframe(level).f_globals is g:
+ level += 1
+ except ValueError:
+ pass
+ warnings.warn(stacklevel=level + 1, *args, **kw)
+
+
+class RequirementParseError(ValueError):
+ def __str__(self):
+ return ' '.join(self.args)
+
+
+def parse_requirements(strs):
+ """Yield ``Requirement`` objects for each specification in `strs`
+
+ `strs` must be a string, or a (possibly-nested) iterable thereof.
+ """
+ # create a steppable iterator, so we can handle \-continuations
+ lines = iter(yield_lines(strs))
+
+ for line in lines:
+ # Drop comments -- a hash without a space may be in a URL.
+ if ' #' in line:
+ line = line[:line.find(' #')]
+ # If there is a line continuation, drop it, and append the next line.
+ if line.endswith('\\'):
+ line = line[:-2].strip()
+ try:
+ line += next(lines)
+ except StopIteration:
+ return
+ yield Requirement(line)
+
+
+class Requirement(packaging.requirements.Requirement):
+ def __init__(self, requirement_string):
+ """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
+ try:
+ super(Requirement, self).__init__(requirement_string)
+ except packaging.requirements.InvalidRequirement as e:
+ raise RequirementParseError(str(e))
+ self.unsafe_name = self.name
+ project_name = safe_name(self.name)
+ self.project_name, self.key = project_name, project_name.lower()
+ self.specs = [
+ (spec.operator, spec.version) for spec in self.specifier]
+ self.extras = tuple(map(safe_extra, self.extras))
+ self.hashCmp = (
+ self.key,
+ self.url,
+ self.specifier,
+ frozenset(self.extras),
+ str(self.marker) if self.marker else None,
+ )
+ self.__hash = hash(self.hashCmp)
+
+ def __eq__(self, other):
+ return (
+ isinstance(other, Requirement) and
+ self.hashCmp == other.hashCmp
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __contains__(self, item):
+ if isinstance(item, Distribution):
+ if item.key != self.key:
+ return False
+
+ item = item.version
+
+ # Allow prereleases always in order to match the previous behavior of
+ # this method. In the future this should be smarter and follow PEP 440
+ # more accurately.
+ return self.specifier.contains(item, prereleases=True)
+
+ def __hash__(self):
+ return self.__hash
+
+ def __repr__(self):
+ return "Requirement.parse(%r)" % str(self)
+
+ @staticmethod
+ def parse(s):
+ req, = parse_requirements(s)
+ return req
+
+
+def _always_object(classes):
+ """
+ Ensure object appears in the mro even
+ for old-style classes.
+ """
+ if object not in classes:
+ return classes + (object,)
+ return classes
+
+
+def _find_adapter(registry, ob):
+ """Return an adapter factory for `ob` from `registry`"""
+ types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob))))
+ for t in types:
+ if t in registry:
+ return registry[t]
+
+
+def ensure_directory(path):
+ """Ensure that the parent directory of `path` exists"""
+ dirname = os.path.dirname(path)
+ py31compat.makedirs(dirname, exist_ok=True)
+
+
+def _bypass_ensure_directory(path):
+ """Sandbox-bypassing version of ensure_directory()"""
+ if not WRITE_SUPPORT:
+ raise IOError('"os.mkdir" not supported on this platform.')
+ dirname, filename = split(path)
+ if dirname and filename and not isdir(dirname):
+ _bypass_ensure_directory(dirname)
+ try:
+ mkdir(dirname, 0o755)
+ except FileExistsError:
+ pass
+
+
+def split_sections(s):
+ """Split a string or iterable thereof into (section, content) pairs
+
+ Each ``section`` is a stripped version of the section header ("[section]")
+ and each ``content`` is a list of stripped lines excluding blank lines and
+ comment-only lines. If there are any such lines before the first section
+ header, they're returned in a first ``section`` of ``None``.
+ """
+ section = None
+ content = []
+ for line in yield_lines(s):
+ if line.startswith("["):
+ if line.endswith("]"):
+ if section or content:
+ yield section, content
+ section = line[1:-1].strip()
+ content = []
+ else:
+ raise ValueError("Invalid section heading", line)
+ else:
+ content.append(line)
+
+ # wrap up last segment
+ yield section, content
+
+
+def _mkstemp(*args, **kw):
+ old_open = os.open
+ try:
+ # temporarily bypass sandboxing
+ os.open = os_open
+ return tempfile.mkstemp(*args, **kw)
+ finally:
+ # and then put it back
+ os.open = old_open
+
+
+# Silence the PEP440Warning by default, so that end users don't get hit by it
+# randomly just because they use pkg_resources. We want to append the rule
+# because we want earlier uses of filterwarnings to take precedence over this
+# one.
+warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
+
+
+# from jaraco.functools 1.3
+def _call_aside(f, *args, **kwargs):
+ f(*args, **kwargs)
+ return f
+
+
+@_call_aside
+def _initialize(g=globals()):
+ "Set up global resource manager (deliberately not state-saved)"
+ manager = ResourceManager()
+ g['_manager'] = manager
+ g.update(
+ (name, getattr(manager, name))
+ for name in dir(manager)
+ if not name.startswith('_')
+ )
+
+
+@_call_aside
+def _initialize_master_working_set():
+ """
+ Prepare the master working set and make the ``require()``
+ API available.
+
+ This function has explicit effects on the global state
+ of pkg_resources. It is intended to be invoked once at
+ the initialization of this module.
+
+ Invocation by other packages is unsupported and done
+ at their own risk.
+ """
+ working_set = WorkingSet._build_master()
+ _declare_state('object', working_set=working_set)
+
+ require = working_set.require
+ iter_entry_points = working_set.iter_entry_points
+ add_activation_listener = working_set.subscribe
+ run_script = working_set.run_script
+ # backward compatibility
+ run_main = run_script
+ # Activate all distributions already on sys.path with replace=False and
+ # ensure that all distributions added to the working set in the future
+ # (e.g. by calling ``require()``) will get activated as well,
+ # with higher priority (replace=True).
+ tuple(
+ dist.activate(replace=False)
+ for dist in working_set
+ )
+ add_activation_listener(
+ lambda dist: dist.activate(replace=True),
+ existing=False,
+ )
+ working_set.entries = []
+ # match order
+ list(map(working_set.add_entry, sys.path))
+ globals().update(locals())
+
+class PkgResourcesDeprecationWarning(Warning):
+ """
+ Base class for warning about deprecations in ``pkg_resources``
+
+ This class is not derived from ``DeprecationWarning``, and as such is
+ visible by default.
+ """
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pkg_resources/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..87db116390e37cb696efe294c538db3579846af1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pkg_resources/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/__pycache__/py31compat.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pkg_resources/__pycache__/py31compat.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3be2eb2f03cf9caf085becad079f763fc93a21a0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pkg_resources/__pycache__/py31compat.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/__init__.py b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e2328323537ba092e1c83b50bca657405525d178
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/appdirs.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/appdirs.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6d9ab44c21ba05a1980f862b7ac100926852ade7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/appdirs.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/pyparsing.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/pyparsing.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e6c79f7b6f055a7dcbfde5c6be40863e6048e92d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/pyparsing.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/six.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/six.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..417979f1321850e1a45c5748f67902528a98a724
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/six.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/appdirs.py b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/appdirs.py
new file mode 100644
index 0000000000000000000000000000000000000000..ae67001af8b661373edeee2eb327b9f63e630d62
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/appdirs.py
@@ -0,0 +1,608 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2005-2010 ActiveState Software Inc.
+# Copyright (c) 2013 Eddy Petrișor
+
+"""Utilities for determining application-specific dirs.
+
+See <http://github.com/ActiveState/appdirs> for details and usage.
+"""
+# Dev Notes:
+# - MSDN on where to store app data files:
+# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
+# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
+# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
+
+__version_info__ = (1, 4, 3)
+__version__ = '.'.join(map(str, __version_info__))
+
+
+import sys
+import os
+
+PY3 = sys.version_info[0] == 3
+
+if PY3:
+ unicode = str
+
+if sys.platform.startswith('java'):
+ import platform
+ os_name = platform.java_ver()[3][0]
+ if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
+ system = 'win32'
+ elif os_name.startswith('Mac'): # "Mac OS X", etc.
+ system = 'darwin'
+ else: # "Linux", "SunOS", "FreeBSD", etc.
+ # Setting this to "linux2" is not ideal, but only Windows or Mac
+ # are actually checked for and the rest of the module expects
+ # *sys.platform* style strings.
+ system = 'linux2'
+else:
+ system = sys.platform
+
+
+
+def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
+ r"""Return full path to the user-specific data dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "roaming" (boolean, default False) can be set True to use the Windows
+ roaming appdata directory. That means that for users on a Windows
+ network setup for roaming profiles, this user data will be
+ sync'd on login. See
+ <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
+ for a discussion of issues.
+
+ Typical user data directories are:
+ Mac OS X: ~/Library/Application Support/<AppName>
+ Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined
+ Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
+ Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
+ Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
+ Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>
+
+ For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
+ That means, by default "~/.local/share/<AppName>".
+ """
+ if system == "win32":
+ if appauthor is None:
+ appauthor = appname
+ const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
+ path = os.path.normpath(_get_win_folder(const))
+ if appname:
+ if appauthor is not False:
+ path = os.path.join(path, appauthor, appname)
+ else:
+ path = os.path.join(path, appname)
+ elif system == 'darwin':
+ path = os.path.expanduser('~/Library/Application Support/')
+ if appname:
+ path = os.path.join(path, appname)
+ else:
+ path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
+ if appname:
+ path = os.path.join(path, appname)
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
+ r"""Return full path to the user-shared data dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "multipath" is an optional parameter only applicable to *nix
+ which indicates that the entire list of data dirs should be
+ returned. By default, the first item from XDG_DATA_DIRS is
+ returned, or '/usr/local/share/<AppName>',
+ if XDG_DATA_DIRS is not set
+
+ Typical site data directories are:
+ Mac OS X: /Library/Application Support/<AppName>
+ Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
+ Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
+ Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
+ Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.
+
+ For Unix, this is using the $XDG_DATA_DIRS[0] default.
+
+ WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
+ """
+ if system == "win32":
+ if appauthor is None:
+ appauthor = appname
+ path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
+ if appname:
+ if appauthor is not False:
+ path = os.path.join(path, appauthor, appname)
+ else:
+ path = os.path.join(path, appname)
+ elif system == 'darwin':
+ path = os.path.expanduser('/Library/Application Support')
+ if appname:
+ path = os.path.join(path, appname)
+ else:
+ # XDG default for $XDG_DATA_DIRS
+ # only first, if multipath is False
+ path = os.getenv('XDG_DATA_DIRS',
+ os.pathsep.join(['/usr/local/share', '/usr/share']))
+ pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
+ if appname:
+ if version:
+ appname = os.path.join(appname, version)
+ pathlist = [os.sep.join([x, appname]) for x in pathlist]
+
+ if multipath:
+ path = os.pathsep.join(pathlist)
+ else:
+ path = pathlist[0]
+ return path
+
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
+ r"""Return full path to the user-specific config dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "roaming" (boolean, default False) can be set True to use the Windows
+ roaming appdata directory. That means that for users on a Windows
+ network setup for roaming profiles, this user data will be
+ sync'd on login. See
+ <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
+ for a discussion of issues.
+
+ Typical user config directories are:
+ Mac OS X: same as user_data_dir
+ Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
+ Win *: same as user_data_dir
+
+ For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
+ That means, by default "~/.config/<AppName>".
+ """
+ if system in ["win32", "darwin"]:
+ path = user_data_dir(appname, appauthor, None, roaming)
+ else:
+ path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
+ if appname:
+ path = os.path.join(path, appname)
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
+ r"""Return full path to the user-shared data dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "multipath" is an optional parameter only applicable to *nix
+ which indicates that the entire list of config dirs should be
+ returned. By default, the first item from XDG_CONFIG_DIRS is
+ returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set
+
+ Typical site config directories are:
+ Mac OS X: same as site_data_dir
+ Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
+ $XDG_CONFIG_DIRS
+ Win *: same as site_data_dir
+ Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
+
+ For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False
+
+ WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
+ """
+ if system in ["win32", "darwin"]:
+ path = site_data_dir(appname, appauthor)
+ if appname and version:
+ path = os.path.join(path, version)
+ else:
+ # XDG default for $XDG_CONFIG_DIRS
+ # only first, if multipath is False
+ path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
+ pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
+ if appname:
+ if version:
+ appname = os.path.join(appname, version)
+ pathlist = [os.sep.join([x, appname]) for x in pathlist]
+
+ if multipath:
+ path = os.pathsep.join(pathlist)
+ else:
+ path = pathlist[0]
+ return path
+
+
+def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
+ r"""Return full path to the user-specific cache dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "opinion" (boolean) can be False to disable the appending of
+ "Cache" to the base app data dir for Windows. See
+ discussion below.
+
+ Typical user cache directories are:
+ Mac OS X: ~/Library/Caches/<AppName>
+ Unix: ~/.cache/<AppName> (XDG default)
+ Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
+ Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache
+
+ On Windows the only suggestion in the MSDN docs is that local settings go in
+ the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
+ app data dir (the default returned by `user_data_dir` above). Apps typically
+ put cache data somewhere *under* the given dir here. Some examples:
+ ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
+ ...\Acme\SuperApp\Cache\1.0
+ OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
+ This can be disabled with the `opinion=False` option.
+ """
+ if system == "win32":
+ if appauthor is None:
+ appauthor = appname
+ path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
+ if appname:
+ if appauthor is not False:
+ path = os.path.join(path, appauthor, appname)
+ else:
+ path = os.path.join(path, appname)
+ if opinion:
+ path = os.path.join(path, "Cache")
+ elif system == 'darwin':
+ path = os.path.expanduser('~/Library/Caches')
+ if appname:
+ path = os.path.join(path, appname)
+ else:
+ path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
+ if appname:
+ path = os.path.join(path, appname)
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+def user_state_dir(appname=None, appauthor=None, version=None, roaming=False):
+ r"""Return full path to the user-specific state dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "roaming" (boolean, default False) can be set True to use the Windows
+ roaming appdata directory. That means that for users on a Windows
+ network setup for roaming profiles, this user data will be
+ sync'd on login. See
+ <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
+ for a discussion of issues.
+
+ Typical user state directories are:
+ Mac OS X: same as user_data_dir
+ Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined
+ Win *: same as user_data_dir
+
+ For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state>
+ to extend the XDG spec and support $XDG_STATE_HOME.
+
+ That means, by default "~/.local/state/<AppName>".
+ """
+ if system in ["win32", "darwin"]:
+ path = user_data_dir(appname, appauthor, None, roaming)
+ else:
+ path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state"))
+ if appname:
+ path = os.path.join(path, appname)
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
+ r"""Return full path to the user-specific log dir for this application.
+
+ "appname" is the name of application.
+ If None, just the system directory is returned.
+ "appauthor" (only used on Windows) is the name of the
+ appauthor or distributing body for this application. Typically
+ it is the owning company name. This falls back to appname. You may
+ pass False to disable it.
+ "version" is an optional version path element to append to the
+ path. You might want to use this if you want multiple versions
+ of your app to be able to run independently. If used, this
+ would typically be "<major>.<minor>".
+ Only applied when appname is present.
+ "opinion" (boolean) can be False to disable the appending of
+ "Logs" to the base app data dir for Windows, and "log" to the
+ base cache dir for Unix. See discussion below.
+
+ Typical user log directories are:
+ Mac OS X: ~/Library/Logs/<AppName>
+ Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
+ Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
+ Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs
+
+ On Windows the only suggestion in the MSDN docs is that local settings
+ go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
+ examples of what some windows apps use for a logs dir.)
+
+ OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
+ value for Windows and appends "log" to the user cache dir for Unix.
+ This can be disabled with the `opinion=False` option.
+ """
+ if system == "darwin":
+ path = os.path.join(
+ os.path.expanduser('~/Library/Logs'),
+ appname)
+ elif system == "win32":
+ path = user_data_dir(appname, appauthor, version)
+ version = False
+ if opinion:
+ path = os.path.join(path, "Logs")
+ else:
+ path = user_cache_dir(appname, appauthor, version)
+ version = False
+ if opinion:
+ path = os.path.join(path, "log")
+ if appname and version:
+ path = os.path.join(path, version)
+ return path
+
+
+class AppDirs(object):
+ """Convenience wrapper for getting application dirs."""
+ def __init__(self, appname=None, appauthor=None, version=None,
+ roaming=False, multipath=False):
+ self.appname = appname
+ self.appauthor = appauthor
+ self.version = version
+ self.roaming = roaming
+ self.multipath = multipath
+
+ @property
+ def user_data_dir(self):
+ return user_data_dir(self.appname, self.appauthor,
+ version=self.version, roaming=self.roaming)
+
+ @property
+ def site_data_dir(self):
+ return site_data_dir(self.appname, self.appauthor,
+ version=self.version, multipath=self.multipath)
+
+ @property
+ def user_config_dir(self):
+ return user_config_dir(self.appname, self.appauthor,
+ version=self.version, roaming=self.roaming)
+
+ @property
+ def site_config_dir(self):
+ return site_config_dir(self.appname, self.appauthor,
+ version=self.version, multipath=self.multipath)
+
+ @property
+ def user_cache_dir(self):
+ return user_cache_dir(self.appname, self.appauthor,
+ version=self.version)
+
+ @property
+ def user_state_dir(self):
+ return user_state_dir(self.appname, self.appauthor,
+ version=self.version)
+
+ @property
+ def user_log_dir(self):
+ return user_log_dir(self.appname, self.appauthor,
+ version=self.version)
+
+
+#---- internal support stuff
+
+def _get_win_folder_from_registry(csidl_name):
+ """This is a fallback technique at best. I'm not sure if using the
+ registry for this guarantees us the correct answer for all CSIDL_*
+ names.
+ """
+ if PY3:
+ import winreg as _winreg
+ else:
+ import _winreg
+
+ shell_folder_name = {
+ "CSIDL_APPDATA": "AppData",
+ "CSIDL_COMMON_APPDATA": "Common AppData",
+ "CSIDL_LOCAL_APPDATA": "Local AppData",
+ }[csidl_name]
+
+ key = _winreg.OpenKey(
+ _winreg.HKEY_CURRENT_USER,
+ r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
+ )
+ dir, type = _winreg.QueryValueEx(key, shell_folder_name)
+ return dir
+
+
+def _get_win_folder_with_pywin32(csidl_name):
+ from win32com.shell import shellcon, shell
+ dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
+ # Try to make this a unicode path because SHGetFolderPath does
+ # not return unicode strings when there is unicode data in the
+ # path.
+ try:
+ dir = unicode(dir)
+
+ # Downgrade to short path name if have highbit chars. See
+ # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
+ has_high_char = False
+ for c in dir:
+ if ord(c) > 255:
+ has_high_char = True
+ break
+ if has_high_char:
+ try:
+ import win32api
+ dir = win32api.GetShortPathName(dir)
+ except ImportError:
+ pass
+ except UnicodeError:
+ pass
+ return dir
+
+
+def _get_win_folder_with_ctypes(csidl_name):
+ import ctypes
+
+ csidl_const = {
+ "CSIDL_APPDATA": 26,
+ "CSIDL_COMMON_APPDATA": 35,
+ "CSIDL_LOCAL_APPDATA": 28,
+ }[csidl_name]
+
+ buf = ctypes.create_unicode_buffer(1024)
+ ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
+
+ # Downgrade to short path name if have highbit chars. See
+ # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
+ has_high_char = False
+ for c in buf:
+ if ord(c) > 255:
+ has_high_char = True
+ break
+ if has_high_char:
+ buf2 = ctypes.create_unicode_buffer(1024)
+ if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
+ buf = buf2
+
+ return buf.value
+
+def _get_win_folder_with_jna(csidl_name):
+ import array
+ from com.sun import jna
+ from com.sun.jna.platform import win32
+
+ buf_size = win32.WinDef.MAX_PATH * 2
+ buf = array.zeros('c', buf_size)
+ shell = win32.Shell32.INSTANCE
+ shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
+ dir = jna.Native.toString(buf.tostring()).rstrip("\0")
+
+ # Downgrade to short path name if have highbit chars. See
+ # <http://bugs.activestate.com/show_bug.cgi?id=85099>.
+ has_high_char = False
+ for c in dir:
+ if ord(c) > 255:
+ has_high_char = True
+ break
+ if has_high_char:
+ buf = array.zeros('c', buf_size)
+ kernel = win32.Kernel32.INSTANCE
+ if kernel.GetShortPathName(dir, buf, buf_size):
+ dir = jna.Native.toString(buf.tostring()).rstrip("\0")
+
+ return dir
+
+if system == "win32":
+ try:
+ import win32com.shell
+ _get_win_folder = _get_win_folder_with_pywin32
+ except ImportError:
+ try:
+ from ctypes import windll
+ _get_win_folder = _get_win_folder_with_ctypes
+ except ImportError:
+ try:
+ import com.sun.jna
+ _get_win_folder = _get_win_folder_with_jna
+ except ImportError:
+ _get_win_folder = _get_win_folder_from_registry
+
+
+#---- self test code
+
+if __name__ == "__main__":
+ appname = "MyApp"
+ appauthor = "MyCompany"
+
+ props = ("user_data_dir",
+ "user_config_dir",
+ "user_cache_dir",
+ "user_state_dir",
+ "user_log_dir",
+ "site_data_dir",
+ "site_config_dir")
+
+ print("-- app dirs %s --" % __version__)
+
+ print("-- app dirs (with optional 'version')")
+ dirs = AppDirs(appname, appauthor, version="1.0")
+ for prop in props:
+ print("%s: %s" % (prop, getattr(dirs, prop)))
+
+ print("\n-- app dirs (without optional 'version')")
+ dirs = AppDirs(appname, appauthor)
+ for prop in props:
+ print("%s: %s" % (prop, getattr(dirs, prop)))
+
+ print("\n-- app dirs (without optional 'appauthor')")
+ dirs = AppDirs(appname)
+ for prop in props:
+ print("%s: %s" % (prop, getattr(dirs, prop)))
+
+ print("\n-- app dirs (with disabled 'appauthor')")
+ dirs = AppDirs(appname, appauthor=False)
+ for prop in props:
+ print("%s: %s" % (prop, getattr(dirs, prop)))
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__about__.py b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__about__.py
new file mode 100644
index 0000000000000000000000000000000000000000..95d330ef823aa2e12f7846bc63c0955b25df6029
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__about__.py
@@ -0,0 +1,21 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+__all__ = [
+ "__title__", "__summary__", "__uri__", "__version__", "__author__",
+ "__email__", "__license__", "__copyright__",
+]
+
+__title__ = "packaging"
+__summary__ = "Core utilities for Python packages"
+__uri__ = "https://github.com/pypa/packaging"
+
+__version__ = "16.8"
+
+__author__ = "Donald Stufft and individual contributors"
+__email__ = "donald@stufft.io"
+
+__license__ = "BSD or Apache License, Version 2.0"
+__copyright__ = "Copyright 2014-2016 %s" % __author__
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__init__.py b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..5ee6220203e5425f900fb5a43676c24ea377c2fa
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__init__.py
@@ -0,0 +1,14 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+from .__about__ import (
+ __author__, __copyright__, __email__, __license__, __summary__, __title__,
+ __uri__, __version__
+)
+
+__all__ = [
+ "__title__", "__summary__", "__uri__", "__version__", "__author__",
+ "__email__", "__license__", "__copyright__",
+]
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..99c9dfd421039b69100bfa2b35ea51c2c78d8a3e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..23915dd15183416636a8b98836dc15661c282668
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4ef4c1e3648f7639780b41df8341b18326aa159c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f890f16de9b564675df3800db20bf8547505d6a7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/markers.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/markers.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3b2a4cffbc35a17843a4d6f39a0dae1fd38cef37
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/markers.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2aba9627ea60debbfac63b20da6bff2c09bfd7cb
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2b1328d96964e4ec8184c60b2ff0a31fd86d013f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/utils.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/utils.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..369f328c40c361c5c2a9c887ad7a6194a20cd411
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/utils.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/version.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/version.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9ef6aea11ed8c03e014462ad005251fc9200984a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/version.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/_compat.py b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/_compat.py
new file mode 100644
index 0000000000000000000000000000000000000000..210bb80b7e7b64cb79f7e7cdf3e42819fe3471fe
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/_compat.py
@@ -0,0 +1,30 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import sys
+
+
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+
+# flake8: noqa
+
+if PY3:
+ string_types = str,
+else:
+ string_types = basestring,
+
+
+def with_metaclass(meta, *bases):
+ """
+ Create a base class with a metaclass.
+ """
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
+ class metaclass(meta):
+ def __new__(cls, name, this_bases, d):
+ return meta(name, bases, d)
+ return type.__new__(metaclass, 'temporary_class', (), {})
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/_structures.py b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/_structures.py
new file mode 100644
index 0000000000000000000000000000000000000000..ccc27861c3a4d9efaa3db753c77c4515a627bd98
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/_structures.py
@@ -0,0 +1,68 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+
+class Infinity(object):
+
+ def __repr__(self):
+ return "Infinity"
+
+ def __hash__(self):
+ return hash(repr(self))
+
+ def __lt__(self, other):
+ return False
+
+ def __le__(self, other):
+ return False
+
+ def __eq__(self, other):
+ return isinstance(other, self.__class__)
+
+ def __ne__(self, other):
+ return not isinstance(other, self.__class__)
+
+ def __gt__(self, other):
+ return True
+
+ def __ge__(self, other):
+ return True
+
+ def __neg__(self):
+ return NegativeInfinity
+
+Infinity = Infinity()
+
+
+class NegativeInfinity(object):
+
+ def __repr__(self):
+ return "-Infinity"
+
+ def __hash__(self):
+ return hash(repr(self))
+
+ def __lt__(self, other):
+ return True
+
+ def __le__(self, other):
+ return True
+
+ def __eq__(self, other):
+ return isinstance(other, self.__class__)
+
+ def __ne__(self, other):
+ return not isinstance(other, self.__class__)
+
+ def __gt__(self, other):
+ return False
+
+ def __ge__(self, other):
+ return False
+
+ def __neg__(self):
+ return Infinity
+
+NegativeInfinity = NegativeInfinity()
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/markers.py b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/markers.py
new file mode 100644
index 0000000000000000000000000000000000000000..892e578edd4b992cc2996c31d9deb13af73d62c0
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/markers.py
@@ -0,0 +1,301 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import operator
+import os
+import platform
+import sys
+
+from pkg_resources.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd
+from pkg_resources.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString
+from pkg_resources.extern.pyparsing import Literal as L # noqa
+
+from ._compat import string_types
+from .specifiers import Specifier, InvalidSpecifier
+
+
+__all__ = [
+ "InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName",
+ "Marker", "default_environment",
+]
+
+
+class InvalidMarker(ValueError):
+ """
+ An invalid marker was found, users should refer to PEP 508.
+ """
+
+
+class UndefinedComparison(ValueError):
+ """
+ An invalid operation was attempted on a value that doesn't support it.
+ """
+
+
+class UndefinedEnvironmentName(ValueError):
+ """
+ A name was attempted to be used that does not exist inside of the
+ environment.
+ """
+
+
+class Node(object):
+
+ def __init__(self, value):
+ self.value = value
+
+ def __str__(self):
+ return str(self.value)
+
+ def __repr__(self):
+ return "<{0}({1!r})>".format(self.__class__.__name__, str(self))
+
+ def serialize(self):
+ raise NotImplementedError
+
+
+class Variable(Node):
+
+ def serialize(self):
+ return str(self)
+
+
+class Value(Node):
+
+ def serialize(self):
+ return '"{0}"'.format(self)
+
+
+class Op(Node):
+
+ def serialize(self):
+ return str(self)
+
+
+VARIABLE = (
+ L("implementation_version") |
+ L("platform_python_implementation") |
+ L("implementation_name") |
+ L("python_full_version") |
+ L("platform_release") |
+ L("platform_version") |
+ L("platform_machine") |
+ L("platform_system") |
+ L("python_version") |
+ L("sys_platform") |
+ L("os_name") |
+ L("os.name") | # PEP-345
+ L("sys.platform") | # PEP-345
+ L("platform.version") | # PEP-345
+ L("platform.machine") | # PEP-345
+ L("platform.python_implementation") | # PEP-345
+ L("python_implementation") | # undocumented setuptools legacy
+ L("extra")
+)
+ALIASES = {
+ 'os.name': 'os_name',
+ 'sys.platform': 'sys_platform',
+ 'platform.version': 'platform_version',
+ 'platform.machine': 'platform_machine',
+ 'platform.python_implementation': 'platform_python_implementation',
+ 'python_implementation': 'platform_python_implementation'
+}
+VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
+
+VERSION_CMP = (
+ L("===") |
+ L("==") |
+ L(">=") |
+ L("<=") |
+ L("!=") |
+ L("~=") |
+ L(">") |
+ L("<")
+)
+
+MARKER_OP = VERSION_CMP | L("not in") | L("in")
+MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
+
+MARKER_VALUE = QuotedString("'") | QuotedString('"')
+MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
+
+BOOLOP = L("and") | L("or")
+
+MARKER_VAR = VARIABLE | MARKER_VALUE
+
+MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
+MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
+
+LPAREN = L("(").suppress()
+RPAREN = L(")").suppress()
+
+MARKER_EXPR = Forward()
+MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
+MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
+
+MARKER = stringStart + MARKER_EXPR + stringEnd
+
+
+def _coerce_parse_result(results):
+ if isinstance(results, ParseResults):
+ return [_coerce_parse_result(i) for i in results]
+ else:
+ return results
+
+
+def _format_marker(marker, first=True):
+ assert isinstance(marker, (list, tuple, string_types))
+
+ # Sometimes we have a structure like [[...]] which is a single item list
+ # where the single item is itself it's own list. In that case we want skip
+ # the rest of this function so that we don't get extraneous () on the
+ # outside.
+ if (isinstance(marker, list) and len(marker) == 1 and
+ isinstance(marker[0], (list, tuple))):
+ return _format_marker(marker[0])
+
+ if isinstance(marker, list):
+ inner = (_format_marker(m, first=False) for m in marker)
+ if first:
+ return " ".join(inner)
+ else:
+ return "(" + " ".join(inner) + ")"
+ elif isinstance(marker, tuple):
+ return " ".join([m.serialize() for m in marker])
+ else:
+ return marker
+
+
+_operators = {
+ "in": lambda lhs, rhs: lhs in rhs,
+ "not in": lambda lhs, rhs: lhs not in rhs,
+ "<": operator.lt,
+ "<=": operator.le,
+ "==": operator.eq,
+ "!=": operator.ne,
+ ">=": operator.ge,
+ ">": operator.gt,
+}
+
+
+def _eval_op(lhs, op, rhs):
+ try:
+ spec = Specifier("".join([op.serialize(), rhs]))
+ except InvalidSpecifier:
+ pass
+ else:
+ return spec.contains(lhs)
+
+ oper = _operators.get(op.serialize())
+ if oper is None:
+ raise UndefinedComparison(
+ "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
+ )
+
+ return oper(lhs, rhs)
+
+
+_undefined = object()
+
+
+def _get_env(environment, name):
+ value = environment.get(name, _undefined)
+
+ if value is _undefined:
+ raise UndefinedEnvironmentName(
+ "{0!r} does not exist in evaluation environment.".format(name)
+ )
+
+ return value
+
+
+def _evaluate_markers(markers, environment):
+ groups = [[]]
+
+ for marker in markers:
+ assert isinstance(marker, (list, tuple, string_types))
+
+ if isinstance(marker, list):
+ groups[-1].append(_evaluate_markers(marker, environment))
+ elif isinstance(marker, tuple):
+ lhs, op, rhs = marker
+
+ if isinstance(lhs, Variable):
+ lhs_value = _get_env(environment, lhs.value)
+ rhs_value = rhs.value
+ else:
+ lhs_value = lhs.value
+ rhs_value = _get_env(environment, rhs.value)
+
+ groups[-1].append(_eval_op(lhs_value, op, rhs_value))
+ else:
+ assert marker in ["and", "or"]
+ if marker == "or":
+ groups.append([])
+
+ return any(all(item) for item in groups)
+
+
+def format_full_version(info):
+ version = '{0.major}.{0.minor}.{0.micro}'.format(info)
+ kind = info.releaselevel
+ if kind != 'final':
+ version += kind[0] + str(info.serial)
+ return version
+
+
+def default_environment():
+ if hasattr(sys, 'implementation'):
+ iver = format_full_version(sys.implementation.version)
+ implementation_name = sys.implementation.name
+ else:
+ iver = '0'
+ implementation_name = ''
+
+ return {
+ "implementation_name": implementation_name,
+ "implementation_version": iver,
+ "os_name": os.name,
+ "platform_machine": platform.machine(),
+ "platform_release": platform.release(),
+ "platform_system": platform.system(),
+ "platform_version": platform.version(),
+ "python_full_version": platform.python_version(),
+ "platform_python_implementation": platform.python_implementation(),
+ "python_version": platform.python_version()[:3],
+ "sys_platform": sys.platform,
+ }
+
+
+class Marker(object):
+
+ def __init__(self, marker):
+ try:
+ self._markers = _coerce_parse_result(MARKER.parseString(marker))
+ except ParseException as e:
+ err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(
+ marker, marker[e.loc:e.loc + 8])
+ raise InvalidMarker(err_str)
+
+ def __str__(self):
+ return _format_marker(self._markers)
+
+ def __repr__(self):
+ return "<Marker({0!r})>".format(str(self))
+
+ def evaluate(self, environment=None):
+ """Evaluate a marker.
+
+ Return the boolean from evaluating the given marker against the
+ environment. environment is an optional argument to override all or
+ part of the determined environment.
+
+ The environment is determined from the current Python process.
+ """
+ current_environment = default_environment()
+ if environment is not None:
+ current_environment.update(environment)
+
+ return _evaluate_markers(self._markers, current_environment)
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/requirements.py b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/requirements.py
new file mode 100644
index 0000000000000000000000000000000000000000..0c8c4a3852fd37053fd552846aa7787805c30a48
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/requirements.py
@@ -0,0 +1,127 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import string
+import re
+
+from pkg_resources.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException
+from pkg_resources.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine
+from pkg_resources.extern.pyparsing import Literal as L # noqa
+from pkg_resources.extern.six.moves.urllib import parse as urlparse
+
+from .markers import MARKER_EXPR, Marker
+from .specifiers import LegacySpecifier, Specifier, SpecifierSet
+
+
+class InvalidRequirement(ValueError):
+ """
+ An invalid requirement was found, users should refer to PEP 508.
+ """
+
+
+ALPHANUM = Word(string.ascii_letters + string.digits)
+
+LBRACKET = L("[").suppress()
+RBRACKET = L("]").suppress()
+LPAREN = L("(").suppress()
+RPAREN = L(")").suppress()
+COMMA = L(",").suppress()
+SEMICOLON = L(";").suppress()
+AT = L("@").suppress()
+
+PUNCTUATION = Word("-_.")
+IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
+IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
+
+NAME = IDENTIFIER("name")
+EXTRA = IDENTIFIER
+
+URI = Regex(r'[^ ]+')("url")
+URL = (AT + URI)
+
+EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
+EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
+
+VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
+VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
+
+VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
+VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE),
+ joinString=",", adjacent=False)("_raw_spec")
+_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
+_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '')
+
+VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
+VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
+
+MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
+MARKER_EXPR.setParseAction(
+ lambda s, l, t: Marker(s[t._original_start:t._original_end])
+)
+MARKER_SEPERATOR = SEMICOLON
+MARKER = MARKER_SEPERATOR + MARKER_EXPR
+
+VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
+URL_AND_MARKER = URL + Optional(MARKER)
+
+NAMED_REQUIREMENT = \
+ NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
+
+REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
+
+
+class Requirement(object):
+ """Parse a requirement.
+
+ Parse a given requirement string into its parts, such as name, specifier,
+ URL, and extras. Raises InvalidRequirement on a badly-formed requirement
+ string.
+ """
+
+ # TODO: Can we test whether something is contained within a requirement?
+ # If so how do we do that? Do we need to test against the _name_ of
+ # the thing as well as the version? What about the markers?
+ # TODO: Can we normalize the name and extra name?
+
+ def __init__(self, requirement_string):
+ try:
+ req = REQUIREMENT.parseString(requirement_string)
+ except ParseException as e:
+ raise InvalidRequirement(
+ "Invalid requirement, parse error at \"{0!r}\"".format(
+ requirement_string[e.loc:e.loc + 8]))
+
+ self.name = req.name
+ if req.url:
+ parsed_url = urlparse.urlparse(req.url)
+ if not (parsed_url.scheme and parsed_url.netloc) or (
+ not parsed_url.scheme and not parsed_url.netloc):
+ raise InvalidRequirement("Invalid URL given")
+ self.url = req.url
+ else:
+ self.url = None
+ self.extras = set(req.extras.asList() if req.extras else [])
+ self.specifier = SpecifierSet(req.specifier)
+ self.marker = req.marker if req.marker else None
+
+ def __str__(self):
+ parts = [self.name]
+
+ if self.extras:
+ parts.append("[{0}]".format(",".join(sorted(self.extras))))
+
+ if self.specifier:
+ parts.append(str(self.specifier))
+
+ if self.url:
+ parts.append("@ {0}".format(self.url))
+
+ if self.marker:
+ parts.append("; {0}".format(self.marker))
+
+ return "".join(parts)
+
+ def __repr__(self):
+ return "<Requirement({0!r})>".format(str(self))
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/specifiers.py b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/specifiers.py
new file mode 100644
index 0000000000000000000000000000000000000000..7f5a76cfd63f47dcce29b3ea82f59d10f4e8d771
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/specifiers.py
@@ -0,0 +1,774 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import abc
+import functools
+import itertools
+import re
+
+from ._compat import string_types, with_metaclass
+from .version import Version, LegacyVersion, parse
+
+
+class InvalidSpecifier(ValueError):
+ """
+ An invalid specifier was found, users should refer to PEP 440.
+ """
+
+
+class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
+
+ @abc.abstractmethod
+ def __str__(self):
+ """
+ Returns the str representation of this Specifier like object. This
+ should be representative of the Specifier itself.
+ """
+
+ @abc.abstractmethod
+ def __hash__(self):
+ """
+ Returns a hash value for this Specifier like object.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other):
+ """
+ Returns a boolean representing whether or not the two Specifier like
+ objects are equal.
+ """
+
+ @abc.abstractmethod
+ def __ne__(self, other):
+ """
+ Returns a boolean representing whether or not the two Specifier like
+ objects are not equal.
+ """
+
+ @abc.abstractproperty
+ def prereleases(self):
+ """
+ Returns whether or not pre-releases as a whole are allowed by this
+ specifier.
+ """
+
+ @prereleases.setter
+ def prereleases(self, value):
+ """
+ Sets whether or not pre-releases as a whole are allowed by this
+ specifier.
+ """
+
+ @abc.abstractmethod
+ def contains(self, item, prereleases=None):
+ """
+ Determines if the given item is contained within this specifier.
+ """
+
+ @abc.abstractmethod
+ def filter(self, iterable, prereleases=None):
+ """
+ Takes an iterable of items and filters them so that only items which
+ are contained within this specifier are allowed in it.
+ """
+
+
+class _IndividualSpecifier(BaseSpecifier):
+
+ _operators = {}
+
+ def __init__(self, spec="", prereleases=None):
+ match = self._regex.search(spec)
+ if not match:
+ raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
+
+ self._spec = (
+ match.group("operator").strip(),
+ match.group("version").strip(),
+ )
+
+ # Store whether or not this Specifier should accept prereleases
+ self._prereleases = prereleases
+
+ def __repr__(self):
+ pre = (
+ ", prereleases={0!r}".format(self.prereleases)
+ if self._prereleases is not None
+ else ""
+ )
+
+ return "<{0}({1!r}{2})>".format(
+ self.__class__.__name__,
+ str(self),
+ pre,
+ )
+
+ def __str__(self):
+ return "{0}{1}".format(*self._spec)
+
+ def __hash__(self):
+ return hash(self._spec)
+
+ def __eq__(self, other):
+ if isinstance(other, string_types):
+ try:
+ other = self.__class__(other)
+ except InvalidSpecifier:
+ return NotImplemented
+ elif not isinstance(other, self.__class__):
+ return NotImplemented
+
+ return self._spec == other._spec
+
+ def __ne__(self, other):
+ if isinstance(other, string_types):
+ try:
+ other = self.__class__(other)
+ except InvalidSpecifier:
+ return NotImplemented
+ elif not isinstance(other, self.__class__):
+ return NotImplemented
+
+ return self._spec != other._spec
+
+ def _get_operator(self, op):
+ return getattr(self, "_compare_{0}".format(self._operators[op]))
+
+ def _coerce_version(self, version):
+ if not isinstance(version, (LegacyVersion, Version)):
+ version = parse(version)
+ return version
+
+ @property
+ def operator(self):
+ return self._spec[0]
+
+ @property
+ def version(self):
+ return self._spec[1]
+
+ @property
+ def prereleases(self):
+ return self._prereleases
+
+ @prereleases.setter
+ def prereleases(self, value):
+ self._prereleases = value
+
+ def __contains__(self, item):
+ return self.contains(item)
+
+ def contains(self, item, prereleases=None):
+ # Determine if prereleases are to be allowed or not.
+ if prereleases is None:
+ prereleases = self.prereleases
+
+ # Normalize item to a Version or LegacyVersion, this allows us to have
+ # a shortcut for ``"2.0" in Specifier(">=2")
+ item = self._coerce_version(item)
+
+ # Determine if we should be supporting prereleases in this specifier
+ # or not, if we do not support prereleases than we can short circuit
+ # logic if this version is a prereleases.
+ if item.is_prerelease and not prereleases:
+ return False
+
+ # Actually do the comparison to determine if this item is contained
+ # within this Specifier or not.
+ return self._get_operator(self.operator)(item, self.version)
+
+ def filter(self, iterable, prereleases=None):
+ yielded = False
+ found_prereleases = []
+
+ kw = {"prereleases": prereleases if prereleases is not None else True}
+
+ # Attempt to iterate over all the values in the iterable and if any of
+ # them match, yield them.
+ for version in iterable:
+ parsed_version = self._coerce_version(version)
+
+ if self.contains(parsed_version, **kw):
+ # If our version is a prerelease, and we were not set to allow
+ # prereleases, then we'll store it for later incase nothing
+ # else matches this specifier.
+ if (parsed_version.is_prerelease and not
+ (prereleases or self.prereleases)):
+ found_prereleases.append(version)
+ # Either this is not a prerelease, or we should have been
+ # accepting prereleases from the begining.
+ else:
+ yielded = True
+ yield version
+
+ # Now that we've iterated over everything, determine if we've yielded
+ # any values, and if we have not and we have any prereleases stored up
+ # then we will go ahead and yield the prereleases.
+ if not yielded and found_prereleases:
+ for version in found_prereleases:
+ yield version
+
+
+class LegacySpecifier(_IndividualSpecifier):
+
+ _regex_str = (
+ r"""
+ (?P<operator>(==|!=|<=|>=|<|>))
+ \s*
+ (?P<version>
+ [^,;\s)]* # Since this is a "legacy" specifier, and the version
+ # string can be just about anything, we match everything
+ # except for whitespace, a semi-colon for marker support,
+ # a closing paren since versions can be enclosed in
+ # them, and a comma since it's a version separator.
+ )
+ """
+ )
+
+ _regex = re.compile(
+ r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+ _operators = {
+ "==": "equal",
+ "!=": "not_equal",
+ "<=": "less_than_equal",
+ ">=": "greater_than_equal",
+ "<": "less_than",
+ ">": "greater_than",
+ }
+
+ def _coerce_version(self, version):
+ if not isinstance(version, LegacyVersion):
+ version = LegacyVersion(str(version))
+ return version
+
+ def _compare_equal(self, prospective, spec):
+ return prospective == self._coerce_version(spec)
+
+ def _compare_not_equal(self, prospective, spec):
+ return prospective != self._coerce_version(spec)
+
+ def _compare_less_than_equal(self, prospective, spec):
+ return prospective <= self._coerce_version(spec)
+
+ def _compare_greater_than_equal(self, prospective, spec):
+ return prospective >= self._coerce_version(spec)
+
+ def _compare_less_than(self, prospective, spec):
+ return prospective < self._coerce_version(spec)
+
+ def _compare_greater_than(self, prospective, spec):
+ return prospective > self._coerce_version(spec)
+
+
+def _require_version_compare(fn):
+ @functools.wraps(fn)
+ def wrapped(self, prospective, spec):
+ if not isinstance(prospective, Version):
+ return False
+ return fn(self, prospective, spec)
+ return wrapped
+
+
+class Specifier(_IndividualSpecifier):
+
+ _regex_str = (
+ r"""
+ (?P<operator>(~=|==|!=|<=|>=|<|>|===))
+ (?P<version>
+ (?:
+ # The identity operators allow for an escape hatch that will
+ # do an exact string match of the version you wish to install.
+ # This will not be parsed by PEP 440 and we cannot determine
+ # any semantic meaning from it. This operator is discouraged
+ # but included entirely as an escape hatch.
+ (?<====) # Only match for the identity operator
+ \s*
+ [^\s]* # We just match everything, except for whitespace
+ # since we are only testing for strict identity.
+ )
+ |
+ (?:
+ # The (non)equality operators allow for wild card and local
+ # versions to be specified so we have to define these two
+ # operators separately to enable that.
+ (?<===|!=) # Only match for equals and not equals
+
+ \s*
+ v?
+ (?:[0-9]+!)? # epoch
+ [0-9]+(?:\.[0-9]+)* # release
+ (?: # pre release
+ [-_\.]?
+ (a|b|c|rc|alpha|beta|pre|preview)
+ [-_\.]?
+ [0-9]*
+ )?
+ (?: # post release
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+ )?
+
+ # You cannot use a wild card and a dev or local version
+ # together so group them with a | and make them optional.
+ (?:
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
+ (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
+ |
+ \.\* # Wild card syntax of .*
+ )?
+ )
+ |
+ (?:
+ # The compatible operator requires at least two digits in the
+ # release segment.
+ (?<=~=) # Only match for the compatible operator
+
+ \s*
+ v?
+ (?:[0-9]+!)? # epoch
+ [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
+ (?: # pre release
+ [-_\.]?
+ (a|b|c|rc|alpha|beta|pre|preview)
+ [-_\.]?
+ [0-9]*
+ )?
+ (?: # post release
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+ )?
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
+ )
+ |
+ (?:
+ # All other operators only allow a sub set of what the
+ # (non)equality operators do. Specifically they do not allow
+ # local versions to be specified nor do they allow the prefix
+ # matching wild cards.
+ (?<!==|!=|~=) # We have special cases for these
+ # operators so we want to make sure they
+ # don't match here.
+
+ \s*
+ v?
+ (?:[0-9]+!)? # epoch
+ [0-9]+(?:\.[0-9]+)* # release
+ (?: # pre release
+ [-_\.]?
+ (a|b|c|rc|alpha|beta|pre|preview)
+ [-_\.]?
+ [0-9]*
+ )?
+ (?: # post release
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+ )?
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
+ )
+ )
+ """
+ )
+
+ _regex = re.compile(
+ r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+ _operators = {
+ "~=": "compatible",
+ "==": "equal",
+ "!=": "not_equal",
+ "<=": "less_than_equal",
+ ">=": "greater_than_equal",
+ "<": "less_than",
+ ">": "greater_than",
+ "===": "arbitrary",
+ }
+
+ @_require_version_compare
+ def _compare_compatible(self, prospective, spec):
+ # Compatible releases have an equivalent combination of >= and ==. That
+ # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
+ # implement this in terms of the other specifiers instead of
+ # implementing it ourselves. The only thing we need to do is construct
+ # the other specifiers.
+
+ # We want everything but the last item in the version, but we want to
+ # ignore post and dev releases and we want to treat the pre-release as
+ # it's own separate segment.
+ prefix = ".".join(
+ list(
+ itertools.takewhile(
+ lambda x: (not x.startswith("post") and not
+ x.startswith("dev")),
+ _version_split(spec),
+ )
+ )[:-1]
+ )
+
+ # Add the prefix notation to the end of our string
+ prefix += ".*"
+
+ return (self._get_operator(">=")(prospective, spec) and
+ self._get_operator("==")(prospective, prefix))
+
+ @_require_version_compare
+ def _compare_equal(self, prospective, spec):
+ # We need special logic to handle prefix matching
+ if spec.endswith(".*"):
+ # In the case of prefix matching we want to ignore local segment.
+ prospective = Version(prospective.public)
+ # Split the spec out by dots, and pretend that there is an implicit
+ # dot in between a release segment and a pre-release segment.
+ spec = _version_split(spec[:-2]) # Remove the trailing .*
+
+ # Split the prospective version out by dots, and pretend that there
+ # is an implicit dot in between a release segment and a pre-release
+ # segment.
+ prospective = _version_split(str(prospective))
+
+ # Shorten the prospective version to be the same length as the spec
+ # so that we can determine if the specifier is a prefix of the
+ # prospective version or not.
+ prospective = prospective[:len(spec)]
+
+ # Pad out our two sides with zeros so that they both equal the same
+ # length.
+ spec, prospective = _pad_version(spec, prospective)
+ else:
+ # Convert our spec string into a Version
+ spec = Version(spec)
+
+ # If the specifier does not have a local segment, then we want to
+ # act as if the prospective version also does not have a local
+ # segment.
+ if not spec.local:
+ prospective = Version(prospective.public)
+
+ return prospective == spec
+
+ @_require_version_compare
+ def _compare_not_equal(self, prospective, spec):
+ return not self._compare_equal(prospective, spec)
+
+ @_require_version_compare
+ def _compare_less_than_equal(self, prospective, spec):
+ return prospective <= Version(spec)
+
+ @_require_version_compare
+ def _compare_greater_than_equal(self, prospective, spec):
+ return prospective >= Version(spec)
+
+ @_require_version_compare
+ def _compare_less_than(self, prospective, spec):
+ # Convert our spec to a Version instance, since we'll want to work with
+ # it as a version.
+ spec = Version(spec)
+
+ # Check to see if the prospective version is less than the spec
+ # version. If it's not we can short circuit and just return False now
+ # instead of doing extra unneeded work.
+ if not prospective < spec:
+ return False
+
+ # This special case is here so that, unless the specifier itself
+ # includes is a pre-release version, that we do not accept pre-release
+ # versions for the version mentioned in the specifier (e.g. <3.1 should
+ # not match 3.1.dev0, but should match 3.0.dev0).
+ if not spec.is_prerelease and prospective.is_prerelease:
+ if Version(prospective.base_version) == Version(spec.base_version):
+ return False
+
+ # If we've gotten to here, it means that prospective version is both
+ # less than the spec version *and* it's not a pre-release of the same
+ # version in the spec.
+ return True
+
+ @_require_version_compare
+ def _compare_greater_than(self, prospective, spec):
+ # Convert our spec to a Version instance, since we'll want to work with
+ # it as a version.
+ spec = Version(spec)
+
+ # Check to see if the prospective version is greater than the spec
+ # version. If it's not we can short circuit and just return False now
+ # instead of doing extra unneeded work.
+ if not prospective > spec:
+ return False
+
+ # This special case is here so that, unless the specifier itself
+ # includes is a post-release version, that we do not accept
+ # post-release versions for the version mentioned in the specifier
+ # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
+ if not spec.is_postrelease and prospective.is_postrelease:
+ if Version(prospective.base_version) == Version(spec.base_version):
+ return False
+
+ # Ensure that we do not allow a local version of the version mentioned
+ # in the specifier, which is techincally greater than, to match.
+ if prospective.local is not None:
+ if Version(prospective.base_version) == Version(spec.base_version):
+ return False
+
+ # If we've gotten to here, it means that prospective version is both
+ # greater than the spec version *and* it's not a pre-release of the
+ # same version in the spec.
+ return True
+
+ def _compare_arbitrary(self, prospective, spec):
+ return str(prospective).lower() == str(spec).lower()
+
+ @property
+ def prereleases(self):
+ # If there is an explicit prereleases set for this, then we'll just
+ # blindly use that.
+ if self._prereleases is not None:
+ return self._prereleases
+
+ # Look at all of our specifiers and determine if they are inclusive
+ # operators, and if they are if they are including an explicit
+ # prerelease.
+ operator, version = self._spec
+ if operator in ["==", ">=", "<=", "~=", "==="]:
+ # The == specifier can include a trailing .*, if it does we
+ # want to remove before parsing.
+ if operator == "==" and version.endswith(".*"):
+ version = version[:-2]
+
+ # Parse the version, and if it is a pre-release than this
+ # specifier allows pre-releases.
+ if parse(version).is_prerelease:
+ return True
+
+ return False
+
+ @prereleases.setter
+ def prereleases(self, value):
+ self._prereleases = value
+
+
+_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
+
+
+def _version_split(version):
+ result = []
+ for item in version.split("."):
+ match = _prefix_regex.search(item)
+ if match:
+ result.extend(match.groups())
+ else:
+ result.append(item)
+ return result
+
+
+def _pad_version(left, right):
+ left_split, right_split = [], []
+
+ # Get the release segment of our versions
+ left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
+ right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
+
+ # Get the rest of our versions
+ left_split.append(left[len(left_split[0]):])
+ right_split.append(right[len(right_split[0]):])
+
+ # Insert our padding
+ left_split.insert(
+ 1,
+ ["0"] * max(0, len(right_split[0]) - len(left_split[0])),
+ )
+ right_split.insert(
+ 1,
+ ["0"] * max(0, len(left_split[0]) - len(right_split[0])),
+ )
+
+ return (
+ list(itertools.chain(*left_split)),
+ list(itertools.chain(*right_split)),
+ )
+
+
+class SpecifierSet(BaseSpecifier):
+
+ def __init__(self, specifiers="", prereleases=None):
+ # Split on , to break each indidivual specifier into it's own item, and
+ # strip each item to remove leading/trailing whitespace.
+ specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
+
+ # Parsed each individual specifier, attempting first to make it a
+ # Specifier and falling back to a LegacySpecifier.
+ parsed = set()
+ for specifier in specifiers:
+ try:
+ parsed.add(Specifier(specifier))
+ except InvalidSpecifier:
+ parsed.add(LegacySpecifier(specifier))
+
+ # Turn our parsed specifiers into a frozen set and save them for later.
+ self._specs = frozenset(parsed)
+
+ # Store our prereleases value so we can use it later to determine if
+ # we accept prereleases or not.
+ self._prereleases = prereleases
+
+ def __repr__(self):
+ pre = (
+ ", prereleases={0!r}".format(self.prereleases)
+ if self._prereleases is not None
+ else ""
+ )
+
+ return "<SpecifierSet({0!r}{1})>".format(str(self), pre)
+
+ def __str__(self):
+ return ",".join(sorted(str(s) for s in self._specs))
+
+ def __hash__(self):
+ return hash(self._specs)
+
+ def __and__(self, other):
+ if isinstance(other, string_types):
+ other = SpecifierSet(other)
+ elif not isinstance(other, SpecifierSet):
+ return NotImplemented
+
+ specifier = SpecifierSet()
+ specifier._specs = frozenset(self._specs | other._specs)
+
+ if self._prereleases is None and other._prereleases is not None:
+ specifier._prereleases = other._prereleases
+ elif self._prereleases is not None and other._prereleases is None:
+ specifier._prereleases = self._prereleases
+ elif self._prereleases == other._prereleases:
+ specifier._prereleases = self._prereleases
+ else:
+ raise ValueError(
+ "Cannot combine SpecifierSets with True and False prerelease "
+ "overrides."
+ )
+
+ return specifier
+
+ def __eq__(self, other):
+ if isinstance(other, string_types):
+ other = SpecifierSet(other)
+ elif isinstance(other, _IndividualSpecifier):
+ other = SpecifierSet(str(other))
+ elif not isinstance(other, SpecifierSet):
+ return NotImplemented
+
+ return self._specs == other._specs
+
+ def __ne__(self, other):
+ if isinstance(other, string_types):
+ other = SpecifierSet(other)
+ elif isinstance(other, _IndividualSpecifier):
+ other = SpecifierSet(str(other))
+ elif not isinstance(other, SpecifierSet):
+ return NotImplemented
+
+ return self._specs != other._specs
+
+ def __len__(self):
+ return len(self._specs)
+
+ def __iter__(self):
+ return iter(self._specs)
+
+ @property
+ def prereleases(self):
+ # If we have been given an explicit prerelease modifier, then we'll
+ # pass that through here.
+ if self._prereleases is not None:
+ return self._prereleases
+
+ # If we don't have any specifiers, and we don't have a forced value,
+ # then we'll just return None since we don't know if this should have
+ # pre-releases or not.
+ if not self._specs:
+ return None
+
+ # Otherwise we'll see if any of the given specifiers accept
+ # prereleases, if any of them do we'll return True, otherwise False.
+ return any(s.prereleases for s in self._specs)
+
+ @prereleases.setter
+ def prereleases(self, value):
+ self._prereleases = value
+
+ def __contains__(self, item):
+ return self.contains(item)
+
+ def contains(self, item, prereleases=None):
+ # Ensure that our item is a Version or LegacyVersion instance.
+ if not isinstance(item, (LegacyVersion, Version)):
+ item = parse(item)
+
+ # Determine if we're forcing a prerelease or not, if we're not forcing
+ # one for this particular filter call, then we'll use whatever the
+ # SpecifierSet thinks for whether or not we should support prereleases.
+ if prereleases is None:
+ prereleases = self.prereleases
+
+ # We can determine if we're going to allow pre-releases by looking to
+ # see if any of the underlying items supports them. If none of them do
+ # and this item is a pre-release then we do not allow it and we can
+ # short circuit that here.
+ # Note: This means that 1.0.dev1 would not be contained in something
+ # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
+ if not prereleases and item.is_prerelease:
+ return False
+
+ # We simply dispatch to the underlying specs here to make sure that the
+ # given version is contained within all of them.
+ # Note: This use of all() here means that an empty set of specifiers
+ # will always return True, this is an explicit design decision.
+ return all(
+ s.contains(item, prereleases=prereleases)
+ for s in self._specs
+ )
+
+ def filter(self, iterable, prereleases=None):
+ # Determine if we're forcing a prerelease or not, if we're not forcing
+ # one for this particular filter call, then we'll use whatever the
+ # SpecifierSet thinks for whether or not we should support prereleases.
+ if prereleases is None:
+ prereleases = self.prereleases
+
+ # If we have any specifiers, then we want to wrap our iterable in the
+ # filter method for each one, this will act as a logical AND amongst
+ # each specifier.
+ if self._specs:
+ for spec in self._specs:
+ iterable = spec.filter(iterable, prereleases=bool(prereleases))
+ return iterable
+ # If we do not have any specifiers, then we need to have a rough filter
+ # which will filter out any pre-releases, unless there are no final
+ # releases, and which will filter out LegacyVersion in general.
+ else:
+ filtered = []
+ found_prereleases = []
+
+ for item in iterable:
+ # Ensure that we some kind of Version class for this item.
+ if not isinstance(item, (LegacyVersion, Version)):
+ parsed_version = parse(item)
+ else:
+ parsed_version = item
+
+ # Filter out any item which is parsed as a LegacyVersion
+ if isinstance(parsed_version, LegacyVersion):
+ continue
+
+ # Store any item which is a pre-release for later unless we've
+ # already found a final version or we are accepting prereleases
+ if parsed_version.is_prerelease and not prereleases:
+ if not filtered:
+ found_prereleases.append(item)
+ else:
+ filtered.append(item)
+
+ # If we've found no items except for pre-releases, then we'll go
+ # ahead and use the pre-releases
+ if not filtered and found_prereleases and prereleases is None:
+ return found_prereleases
+
+ return filtered
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/utils.py b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..942387cef5d75f299a769b1eb43b6c7679e7a3a0
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/utils.py
@@ -0,0 +1,14 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import re
+
+
+_canonicalize_regex = re.compile(r"[-_.]+")
+
+
+def canonicalize_name(name):
+ # This is taken from PEP 503.
+ return _canonicalize_regex.sub("-", name).lower()
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/version.py b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/version.py
new file mode 100644
index 0000000000000000000000000000000000000000..83b5ee8c5efadf22ce2f16ff08c8a8d75f1eb5df
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/version.py
@@ -0,0 +1,393 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import collections
+import itertools
+import re
+
+from ._structures import Infinity
+
+
+__all__ = [
+ "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"
+]
+
+
+_Version = collections.namedtuple(
+ "_Version",
+ ["epoch", "release", "dev", "pre", "post", "local"],
+)
+
+
+def parse(version):
+ """
+ Parse the given version string and return either a :class:`Version` object
+ or a :class:`LegacyVersion` object depending on if the given version is
+ a valid PEP 440 version or a legacy version.
+ """
+ try:
+ return Version(version)
+ except InvalidVersion:
+ return LegacyVersion(version)
+
+
+class InvalidVersion(ValueError):
+ """
+ An invalid version was found, users should refer to PEP 440.
+ """
+
+
+class _BaseVersion(object):
+
+ def __hash__(self):
+ return hash(self._key)
+
+ def __lt__(self, other):
+ return self._compare(other, lambda s, o: s < o)
+
+ def __le__(self, other):
+ return self._compare(other, lambda s, o: s <= o)
+
+ def __eq__(self, other):
+ return self._compare(other, lambda s, o: s == o)
+
+ def __ge__(self, other):
+ return self._compare(other, lambda s, o: s >= o)
+
+ def __gt__(self, other):
+ return self._compare(other, lambda s, o: s > o)
+
+ def __ne__(self, other):
+ return self._compare(other, lambda s, o: s != o)
+
+ def _compare(self, other, method):
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return method(self._key, other._key)
+
+
+class LegacyVersion(_BaseVersion):
+
+ def __init__(self, version):
+ self._version = str(version)
+ self._key = _legacy_cmpkey(self._version)
+
+ def __str__(self):
+ return self._version
+
+ def __repr__(self):
+ return "<LegacyVersion({0})>".format(repr(str(self)))
+
+ @property
+ def public(self):
+ return self._version
+
+ @property
+ def base_version(self):
+ return self._version
+
+ @property
+ def local(self):
+ return None
+
+ @property
+ def is_prerelease(self):
+ return False
+
+ @property
+ def is_postrelease(self):
+ return False
+
+
+_legacy_version_component_re = re.compile(
+ r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
+)
+
+_legacy_version_replacement_map = {
+ "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
+}
+
+
+def _parse_version_parts(s):
+ for part in _legacy_version_component_re.split(s):
+ part = _legacy_version_replacement_map.get(part, part)
+
+ if not part or part == ".":
+ continue
+
+ if part[:1] in "0123456789":
+ # pad for numeric comparison
+ yield part.zfill(8)
+ else:
+ yield "*" + part
+
+ # ensure that alpha/beta/candidate are before final
+ yield "*final"
+
+
+def _legacy_cmpkey(version):
+ # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
+ # greater than or equal to 0. This will effectively put the LegacyVersion,
+ # which uses the defacto standard originally implemented by setuptools,
+ # as before all PEP 440 versions.
+ epoch = -1
+
+ # This scheme is taken from pkg_resources.parse_version setuptools prior to
+ # it's adoption of the packaging library.
+ parts = []
+ for part in _parse_version_parts(version.lower()):
+ if part.startswith("*"):
+ # remove "-" before a prerelease tag
+ if part < "*final":
+ while parts and parts[-1] == "*final-":
+ parts.pop()
+
+ # remove trailing zeros from each series of numeric parts
+ while parts and parts[-1] == "00000000":
+ parts.pop()
+
+ parts.append(part)
+ parts = tuple(parts)
+
+ return epoch, parts
+
+# Deliberately not anchored to the start and end of the string, to make it
+# easier for 3rd party code to reuse
+VERSION_PATTERN = r"""
+ v?
+ (?:
+ (?:(?P<epoch>[0-9]+)!)? # epoch
+ (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
+ (?P<pre> # pre-release
+ [-_\.]?
+ (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
+ [-_\.]?
+ (?P<pre_n>[0-9]+)?
+ )?
+ (?P<post> # post release
+ (?:-(?P<post_n1>[0-9]+))
+ |
+ (?:
+ [-_\.]?
+ (?P<post_l>post|rev|r)
+ [-_\.]?
+ (?P<post_n2>[0-9]+)?
+ )
+ )?
+ (?P<dev> # dev release
+ [-_\.]?
+ (?P<dev_l>dev)
+ [-_\.]?
+ (?P<dev_n>[0-9]+)?
+ )?
+ )
+ (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
+"""
+
+
+class Version(_BaseVersion):
+
+ _regex = re.compile(
+ r"^\s*" + VERSION_PATTERN + r"\s*$",
+ re.VERBOSE | re.IGNORECASE,
+ )
+
+ def __init__(self, version):
+ # Validate the version and parse it into pieces
+ match = self._regex.search(version)
+ if not match:
+ raise InvalidVersion("Invalid version: '{0}'".format(version))
+
+ # Store the parsed out pieces of the version
+ self._version = _Version(
+ epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+ release=tuple(int(i) for i in match.group("release").split(".")),
+ pre=_parse_letter_version(
+ match.group("pre_l"),
+ match.group("pre_n"),
+ ),
+ post=_parse_letter_version(
+ match.group("post_l"),
+ match.group("post_n1") or match.group("post_n2"),
+ ),
+ dev=_parse_letter_version(
+ match.group("dev_l"),
+ match.group("dev_n"),
+ ),
+ local=_parse_local_version(match.group("local")),
+ )
+
+ # Generate a key which will be used for sorting
+ self._key = _cmpkey(
+ self._version.epoch,
+ self._version.release,
+ self._version.pre,
+ self._version.post,
+ self._version.dev,
+ self._version.local,
+ )
+
+ def __repr__(self):
+ return "<Version({0})>".format(repr(str(self)))
+
+ def __str__(self):
+ parts = []
+
+ # Epoch
+ if self._version.epoch != 0:
+ parts.append("{0}!".format(self._version.epoch))
+
+ # Release segment
+ parts.append(".".join(str(x) for x in self._version.release))
+
+ # Pre-release
+ if self._version.pre is not None:
+ parts.append("".join(str(x) for x in self._version.pre))
+
+ # Post-release
+ if self._version.post is not None:
+ parts.append(".post{0}".format(self._version.post[1]))
+
+ # Development release
+ if self._version.dev is not None:
+ parts.append(".dev{0}".format(self._version.dev[1]))
+
+ # Local version segment
+ if self._version.local is not None:
+ parts.append(
+ "+{0}".format(".".join(str(x) for x in self._version.local))
+ )
+
+ return "".join(parts)
+
+ @property
+ def public(self):
+ return str(self).split("+", 1)[0]
+
+ @property
+ def base_version(self):
+ parts = []
+
+ # Epoch
+ if self._version.epoch != 0:
+ parts.append("{0}!".format(self._version.epoch))
+
+ # Release segment
+ parts.append(".".join(str(x) for x in self._version.release))
+
+ return "".join(parts)
+
+ @property
+ def local(self):
+ version_string = str(self)
+ if "+" in version_string:
+ return version_string.split("+", 1)[1]
+
+ @property
+ def is_prerelease(self):
+ return bool(self._version.dev or self._version.pre)
+
+ @property
+ def is_postrelease(self):
+ return bool(self._version.post)
+
+
+def _parse_letter_version(letter, number):
+ if letter:
+ # We consider there to be an implicit 0 in a pre-release if there is
+ # not a numeral associated with it.
+ if number is None:
+ number = 0
+
+ # We normalize any letters to their lower case form
+ letter = letter.lower()
+
+ # We consider some words to be alternate spellings of other words and
+ # in those cases we want to normalize the spellings to our preferred
+ # spelling.
+ if letter == "alpha":
+ letter = "a"
+ elif letter == "beta":
+ letter = "b"
+ elif letter in ["c", "pre", "preview"]:
+ letter = "rc"
+ elif letter in ["rev", "r"]:
+ letter = "post"
+
+ return letter, int(number)
+ if not letter and number:
+ # We assume if we are given a number, but we are not given a letter
+ # then this is using the implicit post release syntax (e.g. 1.0-1)
+ letter = "post"
+
+ return letter, int(number)
+
+
+_local_version_seperators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local):
+ """
+ Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+ """
+ if local is not None:
+ return tuple(
+ part.lower() if not part.isdigit() else int(part)
+ for part in _local_version_seperators.split(local)
+ )
+
+
+def _cmpkey(epoch, release, pre, post, dev, local):
+ # When we compare a release version, we want to compare it with all of the
+ # trailing zeros removed. So we'll use a reverse the list, drop all the now
+ # leading zeros until we come to something non zero, then take the rest
+ # re-reverse it back into the correct order and make it a tuple and use
+ # that for our sorting key.
+ release = tuple(
+ reversed(list(
+ itertools.dropwhile(
+ lambda x: x == 0,
+ reversed(release),
+ )
+ ))
+ )
+
+ # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+ # We'll do this by abusing the pre segment, but we _only_ want to do this
+ # if there is not a pre or a post segment. If we have one of those then
+ # the normal sorting rules will handle this case correctly.
+ if pre is None and post is None and dev is not None:
+ pre = -Infinity
+ # Versions without a pre-release (except as noted above) should sort after
+ # those with one.
+ elif pre is None:
+ pre = Infinity
+
+ # Versions without a post segment should sort before those with one.
+ if post is None:
+ post = -Infinity
+
+ # Versions without a development segment should sort after those with one.
+ if dev is None:
+ dev = Infinity
+
+ if local is None:
+ # Versions without a local segment should sort before those with one.
+ local = -Infinity
+ else:
+ # Versions with a local segment need that segment parsed to implement
+ # the sorting rules in PEP440.
+ # - Alpha numeric segments sort before numeric segments
+ # - Alpha numeric segments sort lexicographically
+ # - Numeric segments sort numerically
+ # - Shorter versions sort before longer versions when the prefixes
+ # match exactly
+ local = tuple(
+ (i, "") if isinstance(i, int) else (-Infinity, i)
+ for i in local
+ )
+
+ return epoch, release, pre, post, dev, local
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/pyparsing.py b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/pyparsing.py
new file mode 100644
index 0000000000000000000000000000000000000000..cf75e1e5fcbfe7eac41d2a9e446c5c980741087b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/pyparsing.py
@@ -0,0 +1,5742 @@
+# module pyparsing.py
+#
+# Copyright (c) 2003-2018 Paul T. McGuire
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__doc__ = \
+"""
+pyparsing module - Classes and methods to define and execute parsing grammars
+=============================================================================
+
+The pyparsing module is an alternative approach to creating and executing simple grammars,
+vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you
+don't need to learn a new syntax for defining grammars or matching expressions - the parsing module
+provides a library of classes that you use to construct the grammar directly in Python.
+
+Here is a program to parse "Hello, World!" (or any greeting of the form
+C{"<salutation>, <addressee>!"}), built up using L{Word}, L{Literal}, and L{And} elements
+(L{'+'<ParserElement.__add__>} operator gives L{And} expressions, strings are auto-converted to
+L{Literal} expressions)::
+
+ from pyparsing import Word, alphas
+
+ # define grammar of a greeting
+ greet = Word(alphas) + "," + Word(alphas) + "!"
+
+ hello = "Hello, World!"
+ print (hello, "->", greet.parseString(hello))
+
+The program outputs the following::
+
+ Hello, World! -> ['Hello', ',', 'World', '!']
+
+The Python representation of the grammar is quite readable, owing to the self-explanatory
+class names, and the use of '+', '|' and '^' operators.
+
+The L{ParseResults} object returned from L{ParserElement.parseString<ParserElement.parseString>} can be accessed as a nested list, a dictionary, or an
+object with named attributes.
+
+The pyparsing module handles some of the problems that are typically vexing when writing text parsers:
+ - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.)
+ - quoted strings
+ - embedded comments
+
+
+Getting Started -
+-----------------
+Visit the classes L{ParserElement} and L{ParseResults} to see the base classes that most other pyparsing
+classes inherit from. Use the docstrings for examples of how to:
+ - construct literal match expressions from L{Literal} and L{CaselessLiteral} classes
+ - construct character word-group expressions using the L{Word} class
+ - see how to create repetitive expressions using L{ZeroOrMore} and L{OneOrMore} classes
+ - use L{'+'<And>}, L{'|'<MatchFirst>}, L{'^'<Or>}, and L{'&'<Each>} operators to combine simple expressions into more complex ones
+ - associate names with your parsed results using L{ParserElement.setResultsName}
+ - find some helpful expression short-cuts like L{delimitedList} and L{oneOf}
+ - find more useful common expressions in the L{pyparsing_common} namespace class
+"""
+
+__version__ = "2.2.1"
+__versionTime__ = "18 Sep 2018 00:49 UTC"
+__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>"
+
+import string
+from weakref import ref as wkref
+import copy
+import sys
+import warnings
+import re
+import sre_constants
+import collections
+import pprint
+import traceback
+import types
+from datetime import datetime
+
+try:
+ from _thread import RLock
+except ImportError:
+ from threading import RLock
+
+try:
+ # Python 3
+ from collections.abc import Iterable
+ from collections.abc import MutableMapping
+except ImportError:
+ # Python 2.7
+ from collections import Iterable
+ from collections import MutableMapping
+
+try:
+ from collections import OrderedDict as _OrderedDict
+except ImportError:
+ try:
+ from ordereddict import OrderedDict as _OrderedDict
+ except ImportError:
+ _OrderedDict = None
+
+#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) )
+
+__all__ = [
+'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty',
+'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal',
+'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or',
+'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException',
+'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException',
+'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter',
+'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore',
+'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col',
+'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString',
+'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums',
+'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno',
+'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral',
+'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables',
+'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity',
+'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd',
+'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute',
+'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass',
+'CloseMatch', 'tokenMap', 'pyparsing_common',
+]
+
+system_version = tuple(sys.version_info)[:3]
+PY_3 = system_version[0] == 3
+if PY_3:
+ _MAX_INT = sys.maxsize
+ basestring = str
+ unichr = chr
+ _ustr = str
+
+ # build list of single arg builtins, that can be used as parse actions
+ singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max]
+
+else:
+ _MAX_INT = sys.maxint
+ range = xrange
+
+ def _ustr(obj):
+ """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries
+ str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It
+ then < returns the unicode object | encodes it with the default encoding | ... >.
+ """
+ if isinstance(obj,unicode):
+ return obj
+
+ try:
+ # If this works, then _ustr(obj) has the same behaviour as str(obj), so
+ # it won't break any existing code.
+ return str(obj)
+
+ except UnicodeEncodeError:
+ # Else encode it
+ ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace')
+ xmlcharref = Regex(r'&#\d+;')
+ xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:])
+ return xmlcharref.transformString(ret)
+
+ # build list of single arg builtins, tolerant of Python version, that can be used as parse actions
+ singleArgBuiltins = []
+ import __builtin__
+ for fname in "sum len sorted reversed list tuple set any all min max".split():
+ try:
+ singleArgBuiltins.append(getattr(__builtin__,fname))
+ except AttributeError:
+ continue
+
+_generatorType = type((y for y in range(1)))
+
+def _xml_escape(data):
+ """Escape &, <, >, ", ', etc. in a string of data."""
+
+ # ampersand must be replaced first
+ from_symbols = '&><"\''
+ to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split())
+ for from_,to_ in zip(from_symbols, to_symbols):
+ data = data.replace(from_, to_)
+ return data
+
+class _Constants(object):
+ pass
+
+alphas = string.ascii_uppercase + string.ascii_lowercase
+nums = "0123456789"
+hexnums = nums + "ABCDEFabcdef"
+alphanums = alphas + nums
+_bslash = chr(92)
+printables = "".join(c for c in string.printable if c not in string.whitespace)
+
+class ParseBaseException(Exception):
+ """base exception class for all parsing runtime exceptions"""
+ # Performance tuning: we construct a *lot* of these, so keep this
+ # constructor as small and fast as possible
+ def __init__( self, pstr, loc=0, msg=None, elem=None ):
+ self.loc = loc
+ if msg is None:
+ self.msg = pstr
+ self.pstr = ""
+ else:
+ self.msg = msg
+ self.pstr = pstr
+ self.parserElement = elem
+ self.args = (pstr, loc, msg)
+
+ @classmethod
+ def _from_exception(cls, pe):
+ """
+ internal factory method to simplify creating one type of ParseException
+ from another - avoids having __init__ signature conflicts among subclasses
+ """
+ return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement)
+
+ def __getattr__( self, aname ):
+ """supported attributes by name are:
+ - lineno - returns the line number of the exception text
+ - col - returns the column number of the exception text
+ - line - returns the line containing the exception text
+ """
+ if( aname == "lineno" ):
+ return lineno( self.loc, self.pstr )
+ elif( aname in ("col", "column") ):
+ return col( self.loc, self.pstr )
+ elif( aname == "line" ):
+ return line( self.loc, self.pstr )
+ else:
+ raise AttributeError(aname)
+
+ def __str__( self ):
+ return "%s (at char %d), (line:%d, col:%d)" % \
+ ( self.msg, self.loc, self.lineno, self.column )
+ def __repr__( self ):
+ return _ustr(self)
+ def markInputline( self, markerString = ">!<" ):
+ """Extracts the exception line from the input string, and marks
+ the location of the exception with a special symbol.
+ """
+ line_str = self.line
+ line_column = self.column - 1
+ if markerString:
+ line_str = "".join((line_str[:line_column],
+ markerString, line_str[line_column:]))
+ return line_str.strip()
+ def __dir__(self):
+ return "lineno col line".split() + dir(type(self))
+
+class ParseException(ParseBaseException):
+ """
+ Exception thrown when parse expressions don't match class;
+ supported attributes by name are:
+ - lineno - returns the line number of the exception text
+ - col - returns the column number of the exception text
+ - line - returns the line containing the exception text
+
+ Example::
+ try:
+ Word(nums).setName("integer").parseString("ABC")
+ except ParseException as pe:
+ print(pe)
+ print("column: {}".format(pe.col))
+
+ prints::
+ Expected integer (at char 0), (line:1, col:1)
+ column: 1
+ """
+ pass
+
+class ParseFatalException(ParseBaseException):
+ """user-throwable exception thrown when inconsistent parse content
+ is found; stops all parsing immediately"""
+ pass
+
+class ParseSyntaxException(ParseFatalException):
+ """just like L{ParseFatalException}, but thrown internally when an
+ L{ErrorStop<And._ErrorStop>} ('-' operator) indicates that parsing is to stop
+ immediately because an unbacktrackable syntax error has been found"""
+ pass
+
+#~ class ReparseException(ParseBaseException):
+ #~ """Experimental class - parse actions can raise this exception to cause
+ #~ pyparsing to reparse the input string:
+ #~ - with a modified input string, and/or
+ #~ - with a modified start location
+ #~ Set the values of the ReparseException in the constructor, and raise the
+ #~ exception in a parse action to cause pyparsing to use the new string/location.
+ #~ Setting the values as None causes no change to be made.
+ #~ """
+ #~ def __init_( self, newstring, restartLoc ):
+ #~ self.newParseText = newstring
+ #~ self.reparseLoc = restartLoc
+
+class RecursiveGrammarException(Exception):
+ """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive"""
+ def __init__( self, parseElementList ):
+ self.parseElementTrace = parseElementList
+
+ def __str__( self ):
+ return "RecursiveGrammarException: %s" % self.parseElementTrace
+
+class _ParseResultsWithOffset(object):
+ def __init__(self,p1,p2):
+ self.tup = (p1,p2)
+ def __getitem__(self,i):
+ return self.tup[i]
+ def __repr__(self):
+ return repr(self.tup[0])
+ def setOffset(self,i):
+ self.tup = (self.tup[0],i)
+
+class ParseResults(object):
+ """
+ Structured parse results, to provide multiple means of access to the parsed data:
+ - as a list (C{len(results)})
+ - by list index (C{results[0], results[1]}, etc.)
+ - by attribute (C{results.<resultsName>} - see L{ParserElement.setResultsName})
+
+ Example::
+ integer = Word(nums)
+ date_str = (integer.setResultsName("year") + '/'
+ + integer.setResultsName("month") + '/'
+ + integer.setResultsName("day"))
+ # equivalent form:
+ # date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ # parseString returns a ParseResults object
+ result = date_str.parseString("1999/12/31")
+
+ def test(s, fn=repr):
+ print("%s -> %s" % (s, fn(eval(s))))
+ test("list(result)")
+ test("result[0]")
+ test("result['month']")
+ test("result.day")
+ test("'month' in result")
+ test("'minutes' in result")
+ test("result.dump()", str)
+ prints::
+ list(result) -> ['1999', '/', '12', '/', '31']
+ result[0] -> '1999'
+ result['month'] -> '12'
+ result.day -> '31'
+ 'month' in result -> True
+ 'minutes' in result -> False
+ result.dump() -> ['1999', '/', '12', '/', '31']
+ - day: 31
+ - month: 12
+ - year: 1999
+ """
+ def __new__(cls, toklist=None, name=None, asList=True, modal=True ):
+ if isinstance(toklist, cls):
+ return toklist
+ retobj = object.__new__(cls)
+ retobj.__doinit = True
+ return retobj
+
+ # Performance tuning: we construct a *lot* of these, so keep this
+ # constructor as small and fast as possible
+ def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ):
+ if self.__doinit:
+ self.__doinit = False
+ self.__name = None
+ self.__parent = None
+ self.__accumNames = {}
+ self.__asList = asList
+ self.__modal = modal
+ if toklist is None:
+ toklist = []
+ if isinstance(toklist, list):
+ self.__toklist = toklist[:]
+ elif isinstance(toklist, _generatorType):
+ self.__toklist = list(toklist)
+ else:
+ self.__toklist = [toklist]
+ self.__tokdict = dict()
+
+ if name is not None and name:
+ if not modal:
+ self.__accumNames[name] = 0
+ if isinstance(name,int):
+ name = _ustr(name) # will always return a str, but use _ustr for consistency
+ self.__name = name
+ if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])):
+ if isinstance(toklist,basestring):
+ toklist = [ toklist ]
+ if asList:
+ if isinstance(toklist,ParseResults):
+ self[name] = _ParseResultsWithOffset(toklist.copy(),0)
+ else:
+ self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0)
+ self[name].__name = name
+ else:
+ try:
+ self[name] = toklist[0]
+ except (KeyError,TypeError,IndexError):
+ self[name] = toklist
+
+ def __getitem__( self, i ):
+ if isinstance( i, (int,slice) ):
+ return self.__toklist[i]
+ else:
+ if i not in self.__accumNames:
+ return self.__tokdict[i][-1][0]
+ else:
+ return ParseResults([ v[0] for v in self.__tokdict[i] ])
+
+ def __setitem__( self, k, v, isinstance=isinstance ):
+ if isinstance(v,_ParseResultsWithOffset):
+ self.__tokdict[k] = self.__tokdict.get(k,list()) + [v]
+ sub = v[0]
+ elif isinstance(k,(int,slice)):
+ self.__toklist[k] = v
+ sub = v
+ else:
+ self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)]
+ sub = v
+ if isinstance(sub,ParseResults):
+ sub.__parent = wkref(self)
+
+ def __delitem__( self, i ):
+ if isinstance(i,(int,slice)):
+ mylen = len( self.__toklist )
+ del self.__toklist[i]
+
+ # convert int to slice
+ if isinstance(i, int):
+ if i < 0:
+ i += mylen
+ i = slice(i, i+1)
+ # get removed indices
+ removed = list(range(*i.indices(mylen)))
+ removed.reverse()
+ # fixup indices in token dictionary
+ for name,occurrences in self.__tokdict.items():
+ for j in removed:
+ for k, (value, position) in enumerate(occurrences):
+ occurrences[k] = _ParseResultsWithOffset(value, position - (position > j))
+ else:
+ del self.__tokdict[i]
+
+ def __contains__( self, k ):
+ return k in self.__tokdict
+
+ def __len__( self ): return len( self.__toklist )
+ def __bool__(self): return ( not not self.__toklist )
+ __nonzero__ = __bool__
+ def __iter__( self ): return iter( self.__toklist )
+ def __reversed__( self ): return iter( self.__toklist[::-1] )
+ def _iterkeys( self ):
+ if hasattr(self.__tokdict, "iterkeys"):
+ return self.__tokdict.iterkeys()
+ else:
+ return iter(self.__tokdict)
+
+ def _itervalues( self ):
+ return (self[k] for k in self._iterkeys())
+
+ def _iteritems( self ):
+ return ((k, self[k]) for k in self._iterkeys())
+
+ if PY_3:
+ keys = _iterkeys
+ """Returns an iterator of all named result keys (Python 3.x only)."""
+
+ values = _itervalues
+ """Returns an iterator of all named result values (Python 3.x only)."""
+
+ items = _iteritems
+ """Returns an iterator of all named result key-value tuples (Python 3.x only)."""
+
+ else:
+ iterkeys = _iterkeys
+ """Returns an iterator of all named result keys (Python 2.x only)."""
+
+ itervalues = _itervalues
+ """Returns an iterator of all named result values (Python 2.x only)."""
+
+ iteritems = _iteritems
+ """Returns an iterator of all named result key-value tuples (Python 2.x only)."""
+
+ def keys( self ):
+ """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x)."""
+ return list(self.iterkeys())
+
+ def values( self ):
+ """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x)."""
+ return list(self.itervalues())
+
+ def items( self ):
+ """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x)."""
+ return list(self.iteritems())
+
+ def haskeys( self ):
+ """Since keys() returns an iterator, this method is helpful in bypassing
+ code that looks for the existence of any defined results names."""
+ return bool(self.__tokdict)
+
+ def pop( self, *args, **kwargs):
+ """
+ Removes and returns item at specified index (default=C{last}).
+ Supports both C{list} and C{dict} semantics for C{pop()}. If passed no
+ argument or an integer argument, it will use C{list} semantics
+ and pop tokens from the list of parsed tokens. If passed a
+ non-integer argument (most likely a string), it will use C{dict}
+ semantics and pop the corresponding value from any defined
+ results names. A second default return value argument is
+ supported, just as in C{dict.pop()}.
+
+ Example::
+ def remove_first(tokens):
+ tokens.pop(0)
+ print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+ print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321']
+
+ label = Word(alphas)
+ patt = label("LABEL") + OneOrMore(Word(nums))
+ print(patt.parseString("AAB 123 321").dump())
+
+ # Use pop() in a parse action to remove named result (note that corresponding value is not
+ # removed from list form of results)
+ def remove_LABEL(tokens):
+ tokens.pop("LABEL")
+ return tokens
+ patt.addParseAction(remove_LABEL)
+ print(patt.parseString("AAB 123 321").dump())
+ prints::
+ ['AAB', '123', '321']
+ - LABEL: AAB
+
+ ['AAB', '123', '321']
+ """
+ if not args:
+ args = [-1]
+ for k,v in kwargs.items():
+ if k == 'default':
+ args = (args[0], v)
+ else:
+ raise TypeError("pop() got an unexpected keyword argument '%s'" % k)
+ if (isinstance(args[0], int) or
+ len(args) == 1 or
+ args[0] in self):
+ index = args[0]
+ ret = self[index]
+ del self[index]
+ return ret
+ else:
+ defaultvalue = args[1]
+ return defaultvalue
+
+ def get(self, key, defaultValue=None):
+ """
+ Returns named result matching the given key, or if there is no
+ such name, then returns the given C{defaultValue} or C{None} if no
+ C{defaultValue} is specified.
+
+ Similar to C{dict.get()}.
+
+ Example::
+ integer = Word(nums)
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ result = date_str.parseString("1999/12/31")
+ print(result.get("year")) # -> '1999'
+ print(result.get("hour", "not specified")) # -> 'not specified'
+ print(result.get("hour")) # -> None
+ """
+ if key in self:
+ return self[key]
+ else:
+ return defaultValue
+
+ def insert( self, index, insStr ):
+ """
+ Inserts new element at location index in the list of parsed tokens.
+
+ Similar to C{list.insert()}.
+
+ Example::
+ print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+
+ # use a parse action to insert the parse location in the front of the parsed results
+ def insert_locn(locn, tokens):
+ tokens.insert(0, locn)
+ print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321']
+ """
+ self.__toklist.insert(index, insStr)
+ # fixup indices in token dictionary
+ for name,occurrences in self.__tokdict.items():
+ for k, (value, position) in enumerate(occurrences):
+ occurrences[k] = _ParseResultsWithOffset(value, position + (position > index))
+
+ def append( self, item ):
+ """
+ Add single element to end of ParseResults list of elements.
+
+ Example::
+ print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+
+ # use a parse action to compute the sum of the parsed integers, and add it to the end
+ def append_sum(tokens):
+ tokens.append(sum(map(int, tokens)))
+ print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444]
+ """
+ self.__toklist.append(item)
+
+ def extend( self, itemseq ):
+ """
+ Add sequence of elements to end of ParseResults list of elements.
+
+ Example::
+ patt = OneOrMore(Word(alphas))
+
+ # use a parse action to append the reverse of the matched strings, to make a palindrome
+ def make_palindrome(tokens):
+ tokens.extend(reversed([t[::-1] for t in tokens]))
+ return ''.join(tokens)
+ print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'
+ """
+ if isinstance(itemseq, ParseResults):
+ self += itemseq
+ else:
+ self.__toklist.extend(itemseq)
+
+ def clear( self ):
+ """
+ Clear all elements and results names.
+ """
+ del self.__toklist[:]
+ self.__tokdict.clear()
+
+ def __getattr__( self, name ):
+ try:
+ return self[name]
+ except KeyError:
+ return ""
+
+ if name in self.__tokdict:
+ if name not in self.__accumNames:
+ return self.__tokdict[name][-1][0]
+ else:
+ return ParseResults([ v[0] for v in self.__tokdict[name] ])
+ else:
+ return ""
+
+ def __add__( self, other ):
+ ret = self.copy()
+ ret += other
+ return ret
+
+ def __iadd__( self, other ):
+ if other.__tokdict:
+ offset = len(self.__toklist)
+ addoffset = lambda a: offset if a<0 else a+offset
+ otheritems = other.__tokdict.items()
+ otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) )
+ for (k,vlist) in otheritems for v in vlist]
+ for k,v in otherdictitems:
+ self[k] = v
+ if isinstance(v[0],ParseResults):
+ v[0].__parent = wkref(self)
+
+ self.__toklist += other.__toklist
+ self.__accumNames.update( other.__accumNames )
+ return self
+
+ def __radd__(self, other):
+ if isinstance(other,int) and other == 0:
+ # useful for merging many ParseResults using sum() builtin
+ return self.copy()
+ else:
+ # this may raise a TypeError - so be it
+ return other + self
+
+ def __repr__( self ):
+ return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) )
+
+ def __str__( self ):
+ return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']'
+
+ def _asStringList( self, sep='' ):
+ out = []
+ for item in self.__toklist:
+ if out and sep:
+ out.append(sep)
+ if isinstance( item, ParseResults ):
+ out += item._asStringList()
+ else:
+ out.append( _ustr(item) )
+ return out
+
+ def asList( self ):
+ """
+ Returns the parse results as a nested list of matching tokens, all converted to strings.
+
+ Example::
+ patt = OneOrMore(Word(alphas))
+ result = patt.parseString("sldkj lsdkj sldkj")
+ # even though the result prints in string-like form, it is actually a pyparsing ParseResults
+ print(type(result), result) # -> <class 'pyparsing.ParseResults'> ['sldkj', 'lsdkj', 'sldkj']
+
+ # Use asList() to create an actual list
+ result_list = result.asList()
+ print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj']
+ """
+ return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist]
+
+ def asDict( self ):
+ """
+ Returns the named parse results as a nested dictionary.
+
+ Example::
+ integer = Word(nums)
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ result = date_str.parseString('12/31/1999')
+ print(type(result), repr(result)) # -> <class 'pyparsing.ParseResults'> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]})
+
+ result_dict = result.asDict()
+ print(type(result_dict), repr(result_dict)) # -> <class 'dict'> {'day': '1999', 'year': '12', 'month': '31'}
+
+ # even though a ParseResults supports dict-like access, sometime you just need to have a dict
+ import json
+ print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable
+ print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"}
+ """
+ if PY_3:
+ item_fn = self.items
+ else:
+ item_fn = self.iteritems
+
+ def toItem(obj):
+ if isinstance(obj, ParseResults):
+ if obj.haskeys():
+ return obj.asDict()
+ else:
+ return [toItem(v) for v in obj]
+ else:
+ return obj
+
+ return dict((k,toItem(v)) for k,v in item_fn())
+
+ def copy( self ):
+ """
+ Returns a new copy of a C{ParseResults} object.
+ """
+ ret = ParseResults( self.__toklist )
+ ret.__tokdict = self.__tokdict.copy()
+ ret.__parent = self.__parent
+ ret.__accumNames.update( self.__accumNames )
+ ret.__name = self.__name
+ return ret
+
+ def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ):
+ """
+ (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names.
+ """
+ nl = "\n"
+ out = []
+ namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items()
+ for v in vlist)
+ nextLevelIndent = indent + " "
+
+ # collapse out indents if formatting is not desired
+ if not formatted:
+ indent = ""
+ nextLevelIndent = ""
+ nl = ""
+
+ selfTag = None
+ if doctag is not None:
+ selfTag = doctag
+ else:
+ if self.__name:
+ selfTag = self.__name
+
+ if not selfTag:
+ if namedItemsOnly:
+ return ""
+ else:
+ selfTag = "ITEM"
+
+ out += [ nl, indent, "<", selfTag, ">" ]
+
+ for i,res in enumerate(self.__toklist):
+ if isinstance(res,ParseResults):
+ if i in namedItems:
+ out += [ res.asXML(namedItems[i],
+ namedItemsOnly and doctag is None,
+ nextLevelIndent,
+ formatted)]
+ else:
+ out += [ res.asXML(None,
+ namedItemsOnly and doctag is None,
+ nextLevelIndent,
+ formatted)]
+ else:
+ # individual token, see if there is a name for it
+ resTag = None
+ if i in namedItems:
+ resTag = namedItems[i]
+ if not resTag:
+ if namedItemsOnly:
+ continue
+ else:
+ resTag = "ITEM"
+ xmlBodyText = _xml_escape(_ustr(res))
+ out += [ nl, nextLevelIndent, "<", resTag, ">",
+ xmlBodyText,
+ "</", resTag, ">" ]
+
+ out += [ nl, indent, "</", selfTag, ">" ]
+ return "".join(out)
+
+ def __lookup(self,sub):
+ for k,vlist in self.__tokdict.items():
+ for v,loc in vlist:
+ if sub is v:
+ return k
+ return None
+
+ def getName(self):
+ r"""
+ Returns the results name for this token expression. Useful when several
+ different expressions might match at a particular location.
+
+ Example::
+ integer = Word(nums)
+ ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d")
+ house_number_expr = Suppress('#') + Word(nums, alphanums)
+ user_data = (Group(house_number_expr)("house_number")
+ | Group(ssn_expr)("ssn")
+ | Group(integer)("age"))
+ user_info = OneOrMore(user_data)
+
+ result = user_info.parseString("22 111-22-3333 #221B")
+ for item in result:
+ print(item.getName(), ':', item[0])
+ prints::
+ age : 22
+ ssn : 111-22-3333
+ house_number : 221B
+ """
+ if self.__name:
+ return self.__name
+ elif self.__parent:
+ par = self.__parent()
+ if par:
+ return par.__lookup(self)
+ else:
+ return None
+ elif (len(self) == 1 and
+ len(self.__tokdict) == 1 and
+ next(iter(self.__tokdict.values()))[0][1] in (0,-1)):
+ return next(iter(self.__tokdict.keys()))
+ else:
+ return None
+
+ def dump(self, indent='', depth=0, full=True):
+ """
+ Diagnostic method for listing out the contents of a C{ParseResults}.
+ Accepts an optional C{indent} argument so that this string can be embedded
+ in a nested display of other data.
+
+ Example::
+ integer = Word(nums)
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ result = date_str.parseString('12/31/1999')
+ print(result.dump())
+ prints::
+ ['12', '/', '31', '/', '1999']
+ - day: 1999
+ - month: 31
+ - year: 12
+ """
+ out = []
+ NL = '\n'
+ out.append( indent+_ustr(self.asList()) )
+ if full:
+ if self.haskeys():
+ items = sorted((str(k), v) for k,v in self.items())
+ for k,v in items:
+ if out:
+ out.append(NL)
+ out.append( "%s%s- %s: " % (indent,(' '*depth), k) )
+ if isinstance(v,ParseResults):
+ if v:
+ out.append( v.dump(indent,depth+1) )
+ else:
+ out.append(_ustr(v))
+ else:
+ out.append(repr(v))
+ elif any(isinstance(vv,ParseResults) for vv in self):
+ v = self
+ for i,vv in enumerate(v):
+ if isinstance(vv,ParseResults):
+ out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) ))
+ else:
+ out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv)))
+
+ return "".join(out)
+
+ def pprint(self, *args, **kwargs):
+ """
+ Pretty-printer for parsed results as a list, using the C{pprint} module.
+ Accepts additional positional or keyword args as defined for the
+ C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint})
+
+ Example::
+ ident = Word(alphas, alphanums)
+ num = Word(nums)
+ func = Forward()
+ term = ident | num | Group('(' + func + ')')
+ func <<= ident + Group(Optional(delimitedList(term)))
+ result = func.parseString("fna a,b,(fnb c,d,200),100")
+ result.pprint(width=40)
+ prints::
+ ['fna',
+ ['a',
+ 'b',
+ ['(', 'fnb', ['c', 'd', '200'], ')'],
+ '100']]
+ """
+ pprint.pprint(self.asList(), *args, **kwargs)
+
+ # add support for pickle protocol
+ def __getstate__(self):
+ return ( self.__toklist,
+ ( self.__tokdict.copy(),
+ self.__parent is not None and self.__parent() or None,
+ self.__accumNames,
+ self.__name ) )
+
+ def __setstate__(self,state):
+ self.__toklist = state[0]
+ (self.__tokdict,
+ par,
+ inAccumNames,
+ self.__name) = state[1]
+ self.__accumNames = {}
+ self.__accumNames.update(inAccumNames)
+ if par is not None:
+ self.__parent = wkref(par)
+ else:
+ self.__parent = None
+
+ def __getnewargs__(self):
+ return self.__toklist, self.__name, self.__asList, self.__modal
+
+ def __dir__(self):
+ return (dir(type(self)) + list(self.keys()))
+
+MutableMapping.register(ParseResults)
+
+def col (loc,strg):
+ """Returns current column within a string, counting newlines as line separators.
+ The first column is number 1.
+
+ Note: the default parsing behavior is to expand tabs in the input string
+ before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information
+ on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
+ consistent view of the parsed string, the parse location, and line and column
+ positions within the parsed string.
+ """
+ s = strg
+ return 1 if 0<loc<len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc)
+
+def lineno(loc,strg):
+ """Returns current line number within a string, counting newlines as line separators.
+ The first line is number 1.
+
+ Note: the default parsing behavior is to expand tabs in the input string
+ before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information
+ on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
+ consistent view of the parsed string, the parse location, and line and column
+ positions within the parsed string.
+ """
+ return strg.count("\n",0,loc) + 1
+
+def line( loc, strg ):
+ """Returns the line of text containing loc within a string, counting newlines as line separators.
+ """
+ lastCR = strg.rfind("\n", 0, loc)
+ nextCR = strg.find("\n", loc)
+ if nextCR >= 0:
+ return strg[lastCR+1:nextCR]
+ else:
+ return strg[lastCR+1:]
+
+def _defaultStartDebugAction( instring, loc, expr ):
+ print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )))
+
+def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ):
+ print ("Matched " + _ustr(expr) + " -> " + str(toks.asList()))
+
+def _defaultExceptionDebugAction( instring, loc, expr, exc ):
+ print ("Exception raised:" + _ustr(exc))
+
+def nullDebugAction(*args):
+ """'Do-nothing' debug action, to suppress debugging output during parsing."""
+ pass
+
+# Only works on Python 3.x - nonlocal is toxic to Python 2 installs
+#~ 'decorator to trim function calls to match the arity of the target'
+#~ def _trim_arity(func, maxargs=3):
+ #~ if func in singleArgBuiltins:
+ #~ return lambda s,l,t: func(t)
+ #~ limit = 0
+ #~ foundArity = False
+ #~ def wrapper(*args):
+ #~ nonlocal limit,foundArity
+ #~ while 1:
+ #~ try:
+ #~ ret = func(*args[limit:])
+ #~ foundArity = True
+ #~ return ret
+ #~ except TypeError:
+ #~ if limit == maxargs or foundArity:
+ #~ raise
+ #~ limit += 1
+ #~ continue
+ #~ return wrapper
+
+# this version is Python 2.x-3.x cross-compatible
+'decorator to trim function calls to match the arity of the target'
+def _trim_arity(func, maxargs=2):
+ if func in singleArgBuiltins:
+ return lambda s,l,t: func(t)
+ limit = [0]
+ foundArity = [False]
+
+ # traceback return data structure changed in Py3.5 - normalize back to plain tuples
+ if system_version[:2] >= (3,5):
+ def extract_stack(limit=0):
+ # special handling for Python 3.5.0 - extra deep call stack by 1
+ offset = -3 if system_version == (3,5,0) else -2
+ frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset]
+ return [frame_summary[:2]]
+ def extract_tb(tb, limit=0):
+ frames = traceback.extract_tb(tb, limit=limit)
+ frame_summary = frames[-1]
+ return [frame_summary[:2]]
+ else:
+ extract_stack = traceback.extract_stack
+ extract_tb = traceback.extract_tb
+
+ # synthesize what would be returned by traceback.extract_stack at the call to
+ # user's parse action 'func', so that we don't incur call penalty at parse time
+
+ LINE_DIFF = 6
+ # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND
+ # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!!
+ this_line = extract_stack(limit=2)[-1]
+ pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF)
+
+ def wrapper(*args):
+ while 1:
+ try:
+ ret = func(*args[limit[0]:])
+ foundArity[0] = True
+ return ret
+ except TypeError:
+ # re-raise TypeErrors if they did not come from our arity testing
+ if foundArity[0]:
+ raise
+ else:
+ try:
+ tb = sys.exc_info()[-1]
+ if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth:
+ raise
+ finally:
+ del tb
+
+ if limit[0] <= maxargs:
+ limit[0] += 1
+ continue
+ raise
+
+ # copy func name to wrapper for sensible debug output
+ func_name = "<parse action>"
+ try:
+ func_name = getattr(func, '__name__',
+ getattr(func, '__class__').__name__)
+ except Exception:
+ func_name = str(func)
+ wrapper.__name__ = func_name
+
+ return wrapper
+
+class ParserElement(object):
+ """Abstract base level parser element class."""
+ DEFAULT_WHITE_CHARS = " \n\t\r"
+ verbose_stacktrace = False
+
+ @staticmethod
+ def setDefaultWhitespaceChars( chars ):
+ r"""
+ Overrides the default whitespace chars
+
+ Example::
+ # default whitespace chars are space, <TAB> and newline
+ OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl']
+
+ # change to just treat newline as significant
+ ParserElement.setDefaultWhitespaceChars(" \t")
+ OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def']
+ """
+ ParserElement.DEFAULT_WHITE_CHARS = chars
+
+ @staticmethod
+ def inlineLiteralsUsing(cls):
+ """
+ Set class to be used for inclusion of string literals into a parser.
+
+ Example::
+ # default literal class used is Literal
+ integer = Word(nums)
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31']
+
+
+ # change to Suppress
+ ParserElement.inlineLiteralsUsing(Suppress)
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ date_str.parseString("1999/12/31") # -> ['1999', '12', '31']
+ """
+ ParserElement._literalStringClass = cls
+
+ def __init__( self, savelist=False ):
+ self.parseAction = list()
+ self.failAction = None
+ #~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcall
+ self.strRepr = None
+ self.resultsName = None
+ self.saveAsList = savelist
+ self.skipWhitespace = True
+ self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
+ self.copyDefaultWhiteChars = True
+ self.mayReturnEmpty = False # used when checking for left-recursion
+ self.keepTabs = False
+ self.ignoreExprs = list()
+ self.debug = False
+ self.streamlined = False
+ self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index
+ self.errmsg = ""
+ self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all)
+ self.debugActions = ( None, None, None ) #custom debug actions
+ self.re = None
+ self.callPreparse = True # used to avoid redundant calls to preParse
+ self.callDuringTry = False
+
+ def copy( self ):
+ """
+ Make a copy of this C{ParserElement}. Useful for defining different parse actions
+ for the same parsing pattern, using copies of the original parse element.
+
+ Example::
+ integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+ integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K")
+ integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
+
+ print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M"))
+ prints::
+ [5120, 100, 655360, 268435456]
+ Equivalent form of C{expr.copy()} is just C{expr()}::
+ integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
+ """
+ cpy = copy.copy( self )
+ cpy.parseAction = self.parseAction[:]
+ cpy.ignoreExprs = self.ignoreExprs[:]
+ if self.copyDefaultWhiteChars:
+ cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
+ return cpy
+
+ def setName( self, name ):
+ """
+ Define name for this expression, makes debugging and exception messages clearer.
+
+ Example::
+ Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1)
+ Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1)
+ """
+ self.name = name
+ self.errmsg = "Expected " + self.name
+ if hasattr(self,"exception"):
+ self.exception.msg = self.errmsg
+ return self
+
+ def setResultsName( self, name, listAllMatches=False ):
+ """
+ Define name for referencing matching tokens as a nested attribute
+ of the returned parse results.
+ NOTE: this returns a *copy* of the original C{ParserElement} object;
+ this is so that the client can define a basic element, such as an
+ integer, and reference it in multiple places with different names.
+
+ You can also set results names using the abbreviated syntax,
+ C{expr("name")} in place of C{expr.setResultsName("name")} -
+ see L{I{__call__}<__call__>}.
+
+ Example::
+ date_str = (integer.setResultsName("year") + '/'
+ + integer.setResultsName("month") + '/'
+ + integer.setResultsName("day"))
+
+ # equivalent form:
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+ """
+ newself = self.copy()
+ if name.endswith("*"):
+ name = name[:-1]
+ listAllMatches=True
+ newself.resultsName = name
+ newself.modalResults = not listAllMatches
+ return newself
+
+ def setBreak(self,breakFlag = True):
+ """Method to invoke the Python pdb debugger when this element is
+ about to be parsed. Set C{breakFlag} to True to enable, False to
+ disable.
+ """
+ if breakFlag:
+ _parseMethod = self._parse
+ def breaker(instring, loc, doActions=True, callPreParse=True):
+ import pdb
+ pdb.set_trace()
+ return _parseMethod( instring, loc, doActions, callPreParse )
+ breaker._originalParseMethod = _parseMethod
+ self._parse = breaker
+ else:
+ if hasattr(self._parse,"_originalParseMethod"):
+ self._parse = self._parse._originalParseMethod
+ return self
+
+ def setParseAction( self, *fns, **kwargs ):
+ """
+ Define one or more actions to perform when successfully matching parse element definition.
+ Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)},
+ C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where:
+ - s = the original string being parsed (see note below)
+ - loc = the location of the matching substring
+ - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object
+ If the functions in fns modify the tokens, they can return them as the return
+ value from fn, and the modified list of tokens will replace the original.
+ Otherwise, fn does not need to return any value.
+
+ Optional keyword arguments:
+ - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing
+
+ Note: the default parsing behavior is to expand tabs in the input string
+ before starting the parsing process. See L{I{parseString}<parseString>} for more information
+ on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
+ consistent view of the parsed string, the parse location, and line and column
+ positions within the parsed string.
+
+ Example::
+ integer = Word(nums)
+ date_str = integer + '/' + integer + '/' + integer
+
+ date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31']
+
+ # use parse action to convert to ints at parse time
+ integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+ date_str = integer + '/' + integer + '/' + integer
+
+ # note that integer fields are now ints, not strings
+ date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31]
+ """
+ self.parseAction = list(map(_trim_arity, list(fns)))
+ self.callDuringTry = kwargs.get("callDuringTry", False)
+ return self
+
+ def addParseAction( self, *fns, **kwargs ):
+ """
+ Add one or more parse actions to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}.
+
+ See examples in L{I{copy}<copy>}.
+ """
+ self.parseAction += list(map(_trim_arity, list(fns)))
+ self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
+ return self
+
+ def addCondition(self, *fns, **kwargs):
+ """Add a boolean predicate function to expression's list of parse actions. See
+ L{I{setParseAction}<setParseAction>} for function call signatures. Unlike C{setParseAction},
+ functions passed to C{addCondition} need to return boolean success/fail of the condition.
+
+ Optional keyword arguments:
+ - message = define a custom message to be used in the raised exception
+ - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException
+
+ Example::
+ integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+ year_int = integer.copy()
+ year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later")
+ date_str = year_int + '/' + integer + '/' + integer
+
+ result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1)
+ """
+ msg = kwargs.get("message", "failed user-defined condition")
+ exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException
+ for fn in fns:
+ def pa(s,l,t):
+ if not bool(_trim_arity(fn)(s,l,t)):
+ raise exc_type(s,l,msg)
+ self.parseAction.append(pa)
+ self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
+ return self
+
+ def setFailAction( self, fn ):
+ """Define action to perform if parsing fails at this expression.
+ Fail acton fn is a callable function that takes the arguments
+ C{fn(s,loc,expr,err)} where:
+ - s = string being parsed
+ - loc = location where expression match was attempted and failed
+ - expr = the parse expression that failed
+ - err = the exception thrown
+ The function returns no value. It may throw C{L{ParseFatalException}}
+ if it is desired to stop parsing immediately."""
+ self.failAction = fn
+ return self
+
+ def _skipIgnorables( self, instring, loc ):
+ exprsFound = True
+ while exprsFound:
+ exprsFound = False
+ for e in self.ignoreExprs:
+ try:
+ while 1:
+ loc,dummy = e._parse( instring, loc )
+ exprsFound = True
+ except ParseException:
+ pass
+ return loc
+
+ def preParse( self, instring, loc ):
+ if self.ignoreExprs:
+ loc = self._skipIgnorables( instring, loc )
+
+ if self.skipWhitespace:
+ wt = self.whiteChars
+ instrlen = len(instring)
+ while loc < instrlen and instring[loc] in wt:
+ loc += 1
+
+ return loc
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ return loc, []
+
+ def postParse( self, instring, loc, tokenlist ):
+ return tokenlist
+
+ #~ @profile
+ def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ):
+ debugging = ( self.debug ) #and doActions )
+
+ if debugging or self.failAction:
+ #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))
+ if (self.debugActions[0] ):
+ self.debugActions[0]( instring, loc, self )
+ if callPreParse and self.callPreparse:
+ preloc = self.preParse( instring, loc )
+ else:
+ preloc = loc
+ tokensStart = preloc
+ try:
+ try:
+ loc,tokens = self.parseImpl( instring, preloc, doActions )
+ except IndexError:
+ raise ParseException( instring, len(instring), self.errmsg, self )
+ except ParseBaseException as err:
+ #~ print ("Exception raised:", err)
+ if self.debugActions[2]:
+ self.debugActions[2]( instring, tokensStart, self, err )
+ if self.failAction:
+ self.failAction( instring, tokensStart, self, err )
+ raise
+ else:
+ if callPreParse and self.callPreparse:
+ preloc = self.preParse( instring, loc )
+ else:
+ preloc = loc
+ tokensStart = preloc
+ if self.mayIndexError or preloc >= len(instring):
+ try:
+ loc,tokens = self.parseImpl( instring, preloc, doActions )
+ except IndexError:
+ raise ParseException( instring, len(instring), self.errmsg, self )
+ else:
+ loc,tokens = self.parseImpl( instring, preloc, doActions )
+
+ tokens = self.postParse( instring, loc, tokens )
+
+ retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults )
+ if self.parseAction and (doActions or self.callDuringTry):
+ if debugging:
+ try:
+ for fn in self.parseAction:
+ tokens = fn( instring, tokensStart, retTokens )
+ if tokens is not None:
+ retTokens = ParseResults( tokens,
+ self.resultsName,
+ asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
+ modal=self.modalResults )
+ except ParseBaseException as err:
+ #~ print "Exception raised in user parse action:", err
+ if (self.debugActions[2] ):
+ self.debugActions[2]( instring, tokensStart, self, err )
+ raise
+ else:
+ for fn in self.parseAction:
+ tokens = fn( instring, tokensStart, retTokens )
+ if tokens is not None:
+ retTokens = ParseResults( tokens,
+ self.resultsName,
+ asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
+ modal=self.modalResults )
+ if debugging:
+ #~ print ("Matched",self,"->",retTokens.asList())
+ if (self.debugActions[1] ):
+ self.debugActions[1]( instring, tokensStart, loc, self, retTokens )
+
+ return loc, retTokens
+
+ def tryParse( self, instring, loc ):
+ try:
+ return self._parse( instring, loc, doActions=False )[0]
+ except ParseFatalException:
+ raise ParseException( instring, loc, self.errmsg, self)
+
+ def canParseNext(self, instring, loc):
+ try:
+ self.tryParse(instring, loc)
+ except (ParseException, IndexError):
+ return False
+ else:
+ return True
+
+ class _UnboundedCache(object):
+ def __init__(self):
+ cache = {}
+ self.not_in_cache = not_in_cache = object()
+
+ def get(self, key):
+ return cache.get(key, not_in_cache)
+
+ def set(self, key, value):
+ cache[key] = value
+
+ def clear(self):
+ cache.clear()
+
+ def cache_len(self):
+ return len(cache)
+
+ self.get = types.MethodType(get, self)
+ self.set = types.MethodType(set, self)
+ self.clear = types.MethodType(clear, self)
+ self.__len__ = types.MethodType(cache_len, self)
+
+ if _OrderedDict is not None:
+ class _FifoCache(object):
+ def __init__(self, size):
+ self.not_in_cache = not_in_cache = object()
+
+ cache = _OrderedDict()
+
+ def get(self, key):
+ return cache.get(key, not_in_cache)
+
+ def set(self, key, value):
+ cache[key] = value
+ while len(cache) > size:
+ try:
+ cache.popitem(False)
+ except KeyError:
+ pass
+
+ def clear(self):
+ cache.clear()
+
+ def cache_len(self):
+ return len(cache)
+
+ self.get = types.MethodType(get, self)
+ self.set = types.MethodType(set, self)
+ self.clear = types.MethodType(clear, self)
+ self.__len__ = types.MethodType(cache_len, self)
+
+ else:
+ class _FifoCache(object):
+ def __init__(self, size):
+ self.not_in_cache = not_in_cache = object()
+
+ cache = {}
+ key_fifo = collections.deque([], size)
+
+ def get(self, key):
+ return cache.get(key, not_in_cache)
+
+ def set(self, key, value):
+ cache[key] = value
+ while len(key_fifo) > size:
+ cache.pop(key_fifo.popleft(), None)
+ key_fifo.append(key)
+
+ def clear(self):
+ cache.clear()
+ key_fifo.clear()
+
+ def cache_len(self):
+ return len(cache)
+
+ self.get = types.MethodType(get, self)
+ self.set = types.MethodType(set, self)
+ self.clear = types.MethodType(clear, self)
+ self.__len__ = types.MethodType(cache_len, self)
+
+ # argument cache for optimizing repeated calls when backtracking through recursive expressions
+ packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail
+ packrat_cache_lock = RLock()
+ packrat_cache_stats = [0, 0]
+
+ # this method gets repeatedly called during backtracking with the same arguments -
+ # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression
+ def _parseCache( self, instring, loc, doActions=True, callPreParse=True ):
+ HIT, MISS = 0, 1
+ lookup = (self, instring, loc, callPreParse, doActions)
+ with ParserElement.packrat_cache_lock:
+ cache = ParserElement.packrat_cache
+ value = cache.get(lookup)
+ if value is cache.not_in_cache:
+ ParserElement.packrat_cache_stats[MISS] += 1
+ try:
+ value = self._parseNoCache(instring, loc, doActions, callPreParse)
+ except ParseBaseException as pe:
+ # cache a copy of the exception, without the traceback
+ cache.set(lookup, pe.__class__(*pe.args))
+ raise
+ else:
+ cache.set(lookup, (value[0], value[1].copy()))
+ return value
+ else:
+ ParserElement.packrat_cache_stats[HIT] += 1
+ if isinstance(value, Exception):
+ raise value
+ return (value[0], value[1].copy())
+
+ _parse = _parseNoCache
+
+ @staticmethod
+ def resetCache():
+ ParserElement.packrat_cache.clear()
+ ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats)
+
+ _packratEnabled = False
+ @staticmethod
+ def enablePackrat(cache_size_limit=128):
+ """Enables "packrat" parsing, which adds memoizing to the parsing logic.
+ Repeated parse attempts at the same string location (which happens
+ often in many complex grammars) can immediately return a cached value,
+ instead of re-executing parsing/validating code. Memoizing is done of
+ both valid results and parsing exceptions.
+
+ Parameters:
+ - cache_size_limit - (default=C{128}) - if an integer value is provided
+ will limit the size of the packrat cache; if None is passed, then
+ the cache size will be unbounded; if 0 is passed, the cache will
+ be effectively disabled.
+
+ This speedup may break existing programs that use parse actions that
+ have side-effects. For this reason, packrat parsing is disabled when
+ you first import pyparsing. To activate the packrat feature, your
+ program must call the class method C{ParserElement.enablePackrat()}. If
+ your program uses C{psyco} to "compile as you go", you must call
+ C{enablePackrat} before calling C{psyco.full()}. If you do not do this,
+ Python will crash. For best results, call C{enablePackrat()} immediately
+ after importing pyparsing.
+
+ Example::
+ import pyparsing
+ pyparsing.ParserElement.enablePackrat()
+ """
+ if not ParserElement._packratEnabled:
+ ParserElement._packratEnabled = True
+ if cache_size_limit is None:
+ ParserElement.packrat_cache = ParserElement._UnboundedCache()
+ else:
+ ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit)
+ ParserElement._parse = ParserElement._parseCache
+
+ def parseString( self, instring, parseAll=False ):
+ """
+ Execute the parse expression with the given string.
+ This is the main interface to the client code, once the complete
+ expression has been built.
+
+ If you want the grammar to require that the entire input string be
+ successfully parsed, then set C{parseAll} to True (equivalent to ending
+ the grammar with C{L{StringEnd()}}).
+
+ Note: C{parseString} implicitly calls C{expandtabs()} on the input string,
+ in order to report proper column numbers in parse actions.
+ If the input string contains tabs and
+ the grammar uses parse actions that use the C{loc} argument to index into the
+ string being parsed, you can ensure you have a consistent view of the input
+ string by:
+ - calling C{parseWithTabs} on your grammar before calling C{parseString}
+ (see L{I{parseWithTabs}<parseWithTabs>})
+ - define your parse action using the full C{(s,loc,toks)} signature, and
+ reference the input string using the parse action's C{s} argument
+ - explictly expand the tabs in your input string before calling
+ C{parseString}
+
+ Example::
+ Word('a').parseString('aaaaabaaa') # -> ['aaaaa']
+ Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text
+ """
+ ParserElement.resetCache()
+ if not self.streamlined:
+ self.streamline()
+ #~ self.saveAsList = True
+ for e in self.ignoreExprs:
+ e.streamline()
+ if not self.keepTabs:
+ instring = instring.expandtabs()
+ try:
+ loc, tokens = self._parse( instring, 0 )
+ if parseAll:
+ loc = self.preParse( instring, loc )
+ se = Empty() + StringEnd()
+ se._parse( instring, loc )
+ except ParseBaseException as exc:
+ if ParserElement.verbose_stacktrace:
+ raise
+ else:
+ # catch and re-raise exception from here, clears out pyparsing internal stack trace
+ raise exc
+ else:
+ return tokens
+
+ def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ):
+ """
+ Scan the input string for expression matches. Each match will return the
+ matching tokens, start location, and end location. May be called with optional
+ C{maxMatches} argument, to clip scanning after 'n' matches are found. If
+ C{overlap} is specified, then overlapping matches will be reported.
+
+ Note that the start and end locations are reported relative to the string
+ being parsed. See L{I{parseString}<parseString>} for more information on parsing
+ strings with embedded tabs.
+
+ Example::
+ source = "sldjf123lsdjjkf345sldkjf879lkjsfd987"
+ print(source)
+ for tokens,start,end in Word(alphas).scanString(source):
+ print(' '*start + '^'*(end-start))
+ print(' '*start + tokens[0])
+
+ prints::
+
+ sldjf123lsdjjkf345sldkjf879lkjsfd987
+ ^^^^^
+ sldjf
+ ^^^^^^^
+ lsdjjkf
+ ^^^^^^
+ sldkjf
+ ^^^^^^
+ lkjsfd
+ """
+ if not self.streamlined:
+ self.streamline()
+ for e in self.ignoreExprs:
+ e.streamline()
+
+ if not self.keepTabs:
+ instring = _ustr(instring).expandtabs()
+ instrlen = len(instring)
+ loc = 0
+ preparseFn = self.preParse
+ parseFn = self._parse
+ ParserElement.resetCache()
+ matches = 0
+ try:
+ while loc <= instrlen and matches < maxMatches:
+ try:
+ preloc = preparseFn( instring, loc )
+ nextLoc,tokens = parseFn( instring, preloc, callPreParse=False )
+ except ParseException:
+ loc = preloc+1
+ else:
+ if nextLoc > loc:
+ matches += 1
+ yield tokens, preloc, nextLoc
+ if overlap:
+ nextloc = preparseFn( instring, loc )
+ if nextloc > loc:
+ loc = nextLoc
+ else:
+ loc += 1
+ else:
+ loc = nextLoc
+ else:
+ loc = preloc+1
+ except ParseBaseException as exc:
+ if ParserElement.verbose_stacktrace:
+ raise
+ else:
+ # catch and re-raise exception from here, clears out pyparsing internal stack trace
+ raise exc
+
+ def transformString( self, instring ):
+ """
+ Extension to C{L{scanString}}, to modify matching text with modified tokens that may
+ be returned from a parse action. To use C{transformString}, define a grammar and
+ attach a parse action to it that modifies the returned token list.
+ Invoking C{transformString()} on a target string will then scan for matches,
+ and replace the matched text patterns according to the logic in the parse
+ action. C{transformString()} returns the resulting transformed string.
+
+ Example::
+ wd = Word(alphas)
+ wd.setParseAction(lambda toks: toks[0].title())
+
+ print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york."))
+ Prints::
+ Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York.
+ """
+ out = []
+ lastE = 0
+ # force preservation of <TAB>s, to minimize unwanted transformation of string, and to
+ # keep string locs straight between transformString and scanString
+ self.keepTabs = True
+ try:
+ for t,s,e in self.scanString( instring ):
+ out.append( instring[lastE:s] )
+ if t:
+ if isinstance(t,ParseResults):
+ out += t.asList()
+ elif isinstance(t,list):
+ out += t
+ else:
+ out.append(t)
+ lastE = e
+ out.append(instring[lastE:])
+ out = [o for o in out if o]
+ return "".join(map(_ustr,_flatten(out)))
+ except ParseBaseException as exc:
+ if ParserElement.verbose_stacktrace:
+ raise
+ else:
+ # catch and re-raise exception from here, clears out pyparsing internal stack trace
+ raise exc
+
+ def searchString( self, instring, maxMatches=_MAX_INT ):
+ """
+ Another extension to C{L{scanString}}, simplifying the access to the tokens found
+ to match the given parse expression. May be called with optional
+ C{maxMatches} argument, to clip searching after 'n' matches are found.
+
+ Example::
+ # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters
+ cap_word = Word(alphas.upper(), alphas.lower())
+
+ print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))
+
+ # the sum() builtin can be used to merge results into a single ParseResults object
+ print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")))
+ prints::
+ [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']]
+ ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity']
+ """
+ try:
+ return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ])
+ except ParseBaseException as exc:
+ if ParserElement.verbose_stacktrace:
+ raise
+ else:
+ # catch and re-raise exception from here, clears out pyparsing internal stack trace
+ raise exc
+
+ def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False):
+ """
+ Generator method to split a string using the given expression as a separator.
+ May be called with optional C{maxsplit} argument, to limit the number of splits;
+ and the optional C{includeSeparators} argument (default=C{False}), if the separating
+ matching text should be included in the split results.
+
+ Example::
+ punc = oneOf(list(".,;:/-!?"))
+ print(list(punc.split("This, this?, this sentence, is badly punctuated!")))
+ prints::
+ ['This', ' this', '', ' this sentence', ' is badly punctuated', '']
+ """
+ splits = 0
+ last = 0
+ for t,s,e in self.scanString(instring, maxMatches=maxsplit):
+ yield instring[last:s]
+ if includeSeparators:
+ yield t[0]
+ last = e
+ yield instring[last:]
+
+ def __add__(self, other ):
+ """
+ Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement
+ converts them to L{Literal}s by default.
+
+ Example::
+ greet = Word(alphas) + "," + Word(alphas) + "!"
+ hello = "Hello, World!"
+ print (hello, "->", greet.parseString(hello))
+ Prints::
+ Hello, World! -> ['Hello', ',', 'World', '!']
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return And( [ self, other ] )
+
+ def __radd__(self, other ):
+ """
+ Implementation of + operator when left operand is not a C{L{ParserElement}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return other + self
+
+ def __sub__(self, other):
+ """
+ Implementation of - operator, returns C{L{And}} with error stop
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return self + And._ErrorStop() + other
+
+ def __rsub__(self, other ):
+ """
+ Implementation of - operator when left operand is not a C{L{ParserElement}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return other - self
+
+ def __mul__(self,other):
+ """
+ Implementation of * operator, allows use of C{expr * 3} in place of
+ C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer
+ tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples
+ may also include C{None} as in:
+ - C{expr*(n,None)} or C{expr*(n,)} is equivalent
+ to C{expr*n + L{ZeroOrMore}(expr)}
+ (read as "at least n instances of C{expr}")
+ - C{expr*(None,n)} is equivalent to C{expr*(0,n)}
+ (read as "0 to n instances of C{expr}")
+ - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)}
+ - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)}
+
+ Note that C{expr*(None,n)} does not raise an exception if
+ more than n exprs exist in the input stream; that is,
+ C{expr*(None,n)} does not enforce a maximum number of expr
+ occurrences. If this behavior is desired, then write
+ C{expr*(None,n) + ~expr}
+ """
+ if isinstance(other,int):
+ minElements, optElements = other,0
+ elif isinstance(other,tuple):
+ other = (other + (None, None))[:2]
+ if other[0] is None:
+ other = (0, other[1])
+ if isinstance(other[0],int) and other[1] is None:
+ if other[0] == 0:
+ return ZeroOrMore(self)
+ if other[0] == 1:
+ return OneOrMore(self)
+ else:
+ return self*other[0] + ZeroOrMore(self)
+ elif isinstance(other[0],int) and isinstance(other[1],int):
+ minElements, optElements = other
+ optElements -= minElements
+ else:
+ raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1]))
+ else:
+ raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other))
+
+ if minElements < 0:
+ raise ValueError("cannot multiply ParserElement by negative value")
+ if optElements < 0:
+ raise ValueError("second tuple value must be greater or equal to first tuple value")
+ if minElements == optElements == 0:
+ raise ValueError("cannot multiply ParserElement by 0 or (0,0)")
+
+ if (optElements):
+ def makeOptionalList(n):
+ if n>1:
+ return Optional(self + makeOptionalList(n-1))
+ else:
+ return Optional(self)
+ if minElements:
+ if minElements == 1:
+ ret = self + makeOptionalList(optElements)
+ else:
+ ret = And([self]*minElements) + makeOptionalList(optElements)
+ else:
+ ret = makeOptionalList(optElements)
+ else:
+ if minElements == 1:
+ ret = self
+ else:
+ ret = And([self]*minElements)
+ return ret
+
+ def __rmul__(self, other):
+ return self.__mul__(other)
+
+ def __or__(self, other ):
+ """
+ Implementation of | operator - returns C{L{MatchFirst}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return MatchFirst( [ self, other ] )
+
+ def __ror__(self, other ):
+ """
+ Implementation of | operator when left operand is not a C{L{ParserElement}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return other | self
+
+ def __xor__(self, other ):
+ """
+ Implementation of ^ operator - returns C{L{Or}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return Or( [ self, other ] )
+
+ def __rxor__(self, other ):
+ """
+ Implementation of ^ operator when left operand is not a C{L{ParserElement}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return other ^ self
+
+ def __and__(self, other ):
+ """
+ Implementation of & operator - returns C{L{Each}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return Each( [ self, other ] )
+
+ def __rand__(self, other ):
+ """
+ Implementation of & operator when left operand is not a C{L{ParserElement}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return other & self
+
+ def __invert__( self ):
+ """
+ Implementation of ~ operator - returns C{L{NotAny}}
+ """
+ return NotAny( self )
+
+ def __call__(self, name=None):
+ """
+ Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}.
+
+ If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be
+ passed as C{True}.
+
+ If C{name} is omitted, same as calling C{L{copy}}.
+
+ Example::
+ # these are equivalent
+ userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno")
+ userdata = Word(alphas)("name") + Word(nums+"-")("socsecno")
+ """
+ if name is not None:
+ return self.setResultsName(name)
+ else:
+ return self.copy()
+
+ def suppress( self ):
+ """
+ Suppresses the output of this C{ParserElement}; useful to keep punctuation from
+ cluttering up returned output.
+ """
+ return Suppress( self )
+
+ def leaveWhitespace( self ):
+ """
+ Disables the skipping of whitespace before matching the characters in the
+ C{ParserElement}'s defined pattern. This is normally only used internally by
+ the pyparsing module, but may be needed in some whitespace-sensitive grammars.
+ """
+ self.skipWhitespace = False
+ return self
+
+ def setWhitespaceChars( self, chars ):
+ """
+ Overrides the default whitespace chars
+ """
+ self.skipWhitespace = True
+ self.whiteChars = chars
+ self.copyDefaultWhiteChars = False
+ return self
+
+ def parseWithTabs( self ):
+ """
+ Overrides default behavior to expand C{<TAB>}s to spaces before parsing the input string.
+ Must be called before C{parseString} when the input grammar contains elements that
+ match C{<TAB>} characters.
+ """
+ self.keepTabs = True
+ return self
+
+ def ignore( self, other ):
+ """
+ Define expression to be ignored (e.g., comments) while doing pattern
+ matching; may be called repeatedly, to define multiple comment or other
+ ignorable patterns.
+
+ Example::
+ patt = OneOrMore(Word(alphas))
+ patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj']
+
+ patt.ignore(cStyleComment)
+ patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd']
+ """
+ if isinstance(other, basestring):
+ other = Suppress(other)
+
+ if isinstance( other, Suppress ):
+ if other not in self.ignoreExprs:
+ self.ignoreExprs.append(other)
+ else:
+ self.ignoreExprs.append( Suppress( other.copy() ) )
+ return self
+
+ def setDebugActions( self, startAction, successAction, exceptionAction ):
+ """
+ Enable display of debugging messages while doing pattern matching.
+ """
+ self.debugActions = (startAction or _defaultStartDebugAction,
+ successAction or _defaultSuccessDebugAction,
+ exceptionAction or _defaultExceptionDebugAction)
+ self.debug = True
+ return self
+
+ def setDebug( self, flag=True ):
+ """
+ Enable display of debugging messages while doing pattern matching.
+ Set C{flag} to True to enable, False to disable.
+
+ Example::
+ wd = Word(alphas).setName("alphaword")
+ integer = Word(nums).setName("numword")
+ term = wd | integer
+
+ # turn on debugging for wd
+ wd.setDebug()
+
+ OneOrMore(term).parseString("abc 123 xyz 890")
+
+ prints::
+ Match alphaword at loc 0(1,1)
+ Matched alphaword -> ['abc']
+ Match alphaword at loc 3(1,4)
+ Exception raised:Expected alphaword (at char 4), (line:1, col:5)
+ Match alphaword at loc 7(1,8)
+ Matched alphaword -> ['xyz']
+ Match alphaword at loc 11(1,12)
+ Exception raised:Expected alphaword (at char 12), (line:1, col:13)
+ Match alphaword at loc 15(1,16)
+ Exception raised:Expected alphaword (at char 15), (line:1, col:16)
+
+ The output shown is that produced by the default debug actions - custom debug actions can be
+ specified using L{setDebugActions}. Prior to attempting
+ to match the C{wd} expression, the debugging message C{"Match <exprname> at loc <n>(<line>,<col>)"}
+ is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"}
+ message is shown. Also note the use of L{setName} to assign a human-readable name to the expression,
+ which makes debugging and exception messages easier to understand - for instance, the default
+ name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}.
+ """
+ if flag:
+ self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction )
+ else:
+ self.debug = False
+ return self
+
+ def __str__( self ):
+ return self.name
+
+ def __repr__( self ):
+ return _ustr(self)
+
+ def streamline( self ):
+ self.streamlined = True
+ self.strRepr = None
+ return self
+
+ def checkRecursion( self, parseElementList ):
+ pass
+
+ def validate( self, validateTrace=[] ):
+ """
+ Check defined expressions for valid structure, check for infinite recursive definitions.
+ """
+ self.checkRecursion( [] )
+
+ def parseFile( self, file_or_filename, parseAll=False ):
+ """
+ Execute the parse expression on the given file or filename.
+ If a filename is specified (instead of a file object),
+ the entire file is opened, read, and closed before parsing.
+ """
+ try:
+ file_contents = file_or_filename.read()
+ except AttributeError:
+ with open(file_or_filename, "r") as f:
+ file_contents = f.read()
+ try:
+ return self.parseString(file_contents, parseAll)
+ except ParseBaseException as exc:
+ if ParserElement.verbose_stacktrace:
+ raise
+ else:
+ # catch and re-raise exception from here, clears out pyparsing internal stack trace
+ raise exc
+
+ def __eq__(self,other):
+ if isinstance(other, ParserElement):
+ return self is other or vars(self) == vars(other)
+ elif isinstance(other, basestring):
+ return self.matches(other)
+ else:
+ return super(ParserElement,self)==other
+
+ def __ne__(self,other):
+ return not (self == other)
+
+ def __hash__(self):
+ return hash(id(self))
+
+ def __req__(self,other):
+ return self == other
+
+ def __rne__(self,other):
+ return not (self == other)
+
+ def matches(self, testString, parseAll=True):
+ """
+ Method for quick testing of a parser against a test string. Good for simple
+ inline microtests of sub expressions while building up larger parser.
+
+ Parameters:
+ - testString - to test against this expression for a match
+ - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests
+
+ Example::
+ expr = Word(nums)
+ assert expr.matches("100")
+ """
+ try:
+ self.parseString(_ustr(testString), parseAll=parseAll)
+ return True
+ except ParseBaseException:
+ return False
+
+ def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False):
+ """
+ Execute the parse expression on a series of test strings, showing each
+ test, the parsed results or where the parse failed. Quick and easy way to
+ run a parse expression against a list of sample strings.
+
+ Parameters:
+ - tests - a list of separate test strings, or a multiline string of test strings
+ - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests
+ - comment - (default=C{'#'}) - expression for indicating embedded comments in the test
+ string; pass None to disable comment filtering
+ - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline;
+ if False, only dump nested list
+ - printResults - (default=C{True}) prints test output to stdout
+ - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing
+
+ Returns: a (success, results) tuple, where success indicates that all tests succeeded
+ (or failed if C{failureTests} is True), and the results contain a list of lines of each
+ test's output
+
+ Example::
+ number_expr = pyparsing_common.number.copy()
+
+ result = number_expr.runTests('''
+ # unsigned integer
+ 100
+ # negative integer
+ -100
+ # float with scientific notation
+ 6.02e23
+ # integer with scientific notation
+ 1e-12
+ ''')
+ print("Success" if result[0] else "Failed!")
+
+ result = number_expr.runTests('''
+ # stray character
+ 100Z
+ # missing leading digit before '.'
+ -.100
+ # too many '.'
+ 3.14.159
+ ''', failureTests=True)
+ print("Success" if result[0] else "Failed!")
+ prints::
+ # unsigned integer
+ 100
+ [100]
+
+ # negative integer
+ -100
+ [-100]
+
+ # float with scientific notation
+ 6.02e23
+ [6.02e+23]
+
+ # integer with scientific notation
+ 1e-12
+ [1e-12]
+
+ Success
+
+ # stray character
+ 100Z
+ ^
+ FAIL: Expected end of text (at char 3), (line:1, col:4)
+
+ # missing leading digit before '.'
+ -.100
+ ^
+ FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1)
+
+ # too many '.'
+ 3.14.159
+ ^
+ FAIL: Expected end of text (at char 4), (line:1, col:5)
+
+ Success
+
+ Each test string must be on a single line. If you want to test a string that spans multiple
+ lines, create a test like this::
+
+ expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines")
+
+ (Note that this is a raw string literal, you must include the leading 'r'.)
+ """
+ if isinstance(tests, basestring):
+ tests = list(map(str.strip, tests.rstrip().splitlines()))
+ if isinstance(comment, basestring):
+ comment = Literal(comment)
+ allResults = []
+ comments = []
+ success = True
+ for t in tests:
+ if comment is not None and comment.matches(t, False) or comments and not t:
+ comments.append(t)
+ continue
+ if not t:
+ continue
+ out = ['\n'.join(comments), t]
+ comments = []
+ try:
+ t = t.replace(r'\n','\n')
+ result = self.parseString(t, parseAll=parseAll)
+ out.append(result.dump(full=fullDump))
+ success = success and not failureTests
+ except ParseBaseException as pe:
+ fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else ""
+ if '\n' in t:
+ out.append(line(pe.loc, t))
+ out.append(' '*(col(pe.loc,t)-1) + '^' + fatal)
+ else:
+ out.append(' '*pe.loc + '^' + fatal)
+ out.append("FAIL: " + str(pe))
+ success = success and failureTests
+ result = pe
+ except Exception as exc:
+ out.append("FAIL-EXCEPTION: " + str(exc))
+ success = success and failureTests
+ result = exc
+
+ if printResults:
+ if fullDump:
+ out.append('')
+ print('\n'.join(out))
+
+ allResults.append((t, result))
+
+ return success, allResults
+
+
+class Token(ParserElement):
+ """
+ Abstract C{ParserElement} subclass, for defining atomic matching patterns.
+ """
+ def __init__( self ):
+ super(Token,self).__init__( savelist=False )
+
+
+class Empty(Token):
+ """
+ An empty token, will always match.
+ """
+ def __init__( self ):
+ super(Empty,self).__init__()
+ self.name = "Empty"
+ self.mayReturnEmpty = True
+ self.mayIndexError = False
+
+
+class NoMatch(Token):
+ """
+ A token that will never match.
+ """
+ def __init__( self ):
+ super(NoMatch,self).__init__()
+ self.name = "NoMatch"
+ self.mayReturnEmpty = True
+ self.mayIndexError = False
+ self.errmsg = "Unmatchable token"
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ raise ParseException(instring, loc, self.errmsg, self)
+
+
+class Literal(Token):
+ """
+ Token to exactly match a specified string.
+
+ Example::
+ Literal('blah').parseString('blah') # -> ['blah']
+ Literal('blah').parseString('blahfooblah') # -> ['blah']
+ Literal('blah').parseString('bla') # -> Exception: Expected "blah"
+
+ For case-insensitive matching, use L{CaselessLiteral}.
+
+ For keyword matching (force word break before and after the matched string),
+ use L{Keyword} or L{CaselessKeyword}.
+ """
+ def __init__( self, matchString ):
+ super(Literal,self).__init__()
+ self.match = matchString
+ self.matchLen = len(matchString)
+ try:
+ self.firstMatchChar = matchString[0]
+ except IndexError:
+ warnings.warn("null string passed to Literal; use Empty() instead",
+ SyntaxWarning, stacklevel=2)
+ self.__class__ = Empty
+ self.name = '"%s"' % _ustr(self.match)
+ self.errmsg = "Expected " + self.name
+ self.mayReturnEmpty = False
+ self.mayIndexError = False
+
+ # Performance tuning: this routine gets called a *lot*
+ # if this is a single character match string and the first character matches,
+ # short-circuit as quickly as possible, and avoid calling startswith
+ #~ @profile
+ def parseImpl( self, instring, loc, doActions=True ):
+ if (instring[loc] == self.firstMatchChar and
+ (self.matchLen==1 or instring.startswith(self.match,loc)) ):
+ return loc+self.matchLen, self.match
+ raise ParseException(instring, loc, self.errmsg, self)
+_L = Literal
+ParserElement._literalStringClass = Literal
+
+class Keyword(Token):
+ """
+ Token to exactly match a specified string as a keyword, that is, it must be
+ immediately followed by a non-keyword character. Compare with C{L{Literal}}:
+ - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}.
+ - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'}
+ Accepts two optional constructor arguments in addition to the keyword string:
+ - C{identChars} is a string of characters that would be valid identifier characters,
+ defaulting to all alphanumerics + "_" and "$"
+ - C{caseless} allows case-insensitive matching, default is C{False}.
+
+ Example::
+ Keyword("start").parseString("start") # -> ['start']
+ Keyword("start").parseString("starting") # -> Exception
+
+ For case-insensitive matching, use L{CaselessKeyword}.
+ """
+ DEFAULT_KEYWORD_CHARS = alphanums+"_$"
+
+ def __init__( self, matchString, identChars=None, caseless=False ):
+ super(Keyword,self).__init__()
+ if identChars is None:
+ identChars = Keyword.DEFAULT_KEYWORD_CHARS
+ self.match = matchString
+ self.matchLen = len(matchString)
+ try:
+ self.firstMatchChar = matchString[0]
+ except IndexError:
+ warnings.warn("null string passed to Keyword; use Empty() instead",
+ SyntaxWarning, stacklevel=2)
+ self.name = '"%s"' % self.match
+ self.errmsg = "Expected " + self.name
+ self.mayReturnEmpty = False
+ self.mayIndexError = False
+ self.caseless = caseless
+ if caseless:
+ self.caselessmatch = matchString.upper()
+ identChars = identChars.upper()
+ self.identChars = set(identChars)
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if self.caseless:
+ if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
+ (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and
+ (loc == 0 or instring[loc-1].upper() not in self.identChars) ):
+ return loc+self.matchLen, self.match
+ else:
+ if (instring[loc] == self.firstMatchChar and
+ (self.matchLen==1 or instring.startswith(self.match,loc)) and
+ (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and
+ (loc == 0 or instring[loc-1] not in self.identChars) ):
+ return loc+self.matchLen, self.match
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ def copy(self):
+ c = super(Keyword,self).copy()
+ c.identChars = Keyword.DEFAULT_KEYWORD_CHARS
+ return c
+
+ @staticmethod
+ def setDefaultKeywordChars( chars ):
+ """Overrides the default Keyword chars
+ """
+ Keyword.DEFAULT_KEYWORD_CHARS = chars
+
+class CaselessLiteral(Literal):
+ """
+ Token to match a specified string, ignoring case of letters.
+ Note: the matched results will always be in the case of the given
+ match string, NOT the case of the input text.
+
+ Example::
+ OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD']
+
+ (Contrast with example for L{CaselessKeyword}.)
+ """
+ def __init__( self, matchString ):
+ super(CaselessLiteral,self).__init__( matchString.upper() )
+ # Preserve the defining literal.
+ self.returnString = matchString
+ self.name = "'%s'" % self.returnString
+ self.errmsg = "Expected " + self.name
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if instring[ loc:loc+self.matchLen ].upper() == self.match:
+ return loc+self.matchLen, self.returnString
+ raise ParseException(instring, loc, self.errmsg, self)
+
+class CaselessKeyword(Keyword):
+ """
+ Caseless version of L{Keyword}.
+
+ Example::
+ OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD']
+
+ (Contrast with example for L{CaselessLiteral}.)
+ """
+ def __init__( self, matchString, identChars=None ):
+ super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True )
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
+ (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ):
+ return loc+self.matchLen, self.match
+ raise ParseException(instring, loc, self.errmsg, self)
+
+class CloseMatch(Token):
+ """
+ A variation on L{Literal} which matches "close" matches, that is,
+ strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters:
+ - C{match_string} - string to be matched
+ - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match
+
+ The results from a successful parse will contain the matched text from the input string and the following named results:
+ - C{mismatches} - a list of the positions within the match_string where mismatches were found
+ - C{original} - the original match_string used to compare against the input string
+
+ If C{mismatches} is an empty list, then the match was an exact match.
+
+ Example::
+ patt = CloseMatch("ATCATCGAATGGA")
+ patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']})
+ patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1)
+
+ # exact match
+ patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']})
+
+ # close match allowing up to 2 mismatches
+ patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2)
+ patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']})
+ """
+ def __init__(self, match_string, maxMismatches=1):
+ super(CloseMatch,self).__init__()
+ self.name = match_string
+ self.match_string = match_string
+ self.maxMismatches = maxMismatches
+ self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches)
+ self.mayIndexError = False
+ self.mayReturnEmpty = False
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ start = loc
+ instrlen = len(instring)
+ maxloc = start + len(self.match_string)
+
+ if maxloc <= instrlen:
+ match_string = self.match_string
+ match_stringloc = 0
+ mismatches = []
+ maxMismatches = self.maxMismatches
+
+ for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)):
+ src,mat = s_m
+ if src != mat:
+ mismatches.append(match_stringloc)
+ if len(mismatches) > maxMismatches:
+ break
+ else:
+ loc = match_stringloc + 1
+ results = ParseResults([instring[start:loc]])
+ results['original'] = self.match_string
+ results['mismatches'] = mismatches
+ return loc, results
+
+ raise ParseException(instring, loc, self.errmsg, self)
+
+
+class Word(Token):
+ """
+ Token for matching words composed of allowed character sets.
+ Defined with string containing all allowed initial characters,
+ an optional string containing allowed body characters (if omitted,
+ defaults to the initial character set), and an optional minimum,
+ maximum, and/or exact length. The default value for C{min} is 1 (a
+ minimum value < 1 is not valid); the default values for C{max} and C{exact}
+ are 0, meaning no maximum or exact length restriction. An optional
+ C{excludeChars} parameter can list characters that might be found in
+ the input C{bodyChars} string; useful to define a word of all printables
+ except for one or two characters, for instance.
+
+ L{srange} is useful for defining custom character set strings for defining
+ C{Word} expressions, using range notation from regular expression character sets.
+
+ A common mistake is to use C{Word} to match a specific literal string, as in
+ C{Word("Address")}. Remember that C{Word} uses the string argument to define
+ I{sets} of matchable characters. This expression would match "Add", "AAA",
+ "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'.
+ To match an exact literal string, use L{Literal} or L{Keyword}.
+
+ pyparsing includes helper strings for building Words:
+ - L{alphas}
+ - L{nums}
+ - L{alphanums}
+ - L{hexnums}
+ - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.)
+ - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.)
+ - L{printables} (any non-whitespace character)
+
+ Example::
+ # a word composed of digits
+ integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9"))
+
+ # a word with a leading capital, and zero or more lowercase
+ capital_word = Word(alphas.upper(), alphas.lower())
+
+ # hostnames are alphanumeric, with leading alpha, and '-'
+ hostname = Word(alphas, alphanums+'-')
+
+ # roman numeral (not a strict parser, accepts invalid mix of characters)
+ roman = Word("IVXLCDM")
+
+ # any string of non-whitespace characters, except for ','
+ csv_value = Word(printables, excludeChars=",")
+ """
+ def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ):
+ super(Word,self).__init__()
+ if excludeChars:
+ initChars = ''.join(c for c in initChars if c not in excludeChars)
+ if bodyChars:
+ bodyChars = ''.join(c for c in bodyChars if c not in excludeChars)
+ self.initCharsOrig = initChars
+ self.initChars = set(initChars)
+ if bodyChars :
+ self.bodyCharsOrig = bodyChars
+ self.bodyChars = set(bodyChars)
+ else:
+ self.bodyCharsOrig = initChars
+ self.bodyChars = set(initChars)
+
+ self.maxSpecified = max > 0
+
+ if min < 1:
+ raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted")
+
+ self.minLen = min
+
+ if max > 0:
+ self.maxLen = max
+ else:
+ self.maxLen = _MAX_INT
+
+ if exact > 0:
+ self.maxLen = exact
+ self.minLen = exact
+
+ self.name = _ustr(self)
+ self.errmsg = "Expected " + self.name
+ self.mayIndexError = False
+ self.asKeyword = asKeyword
+
+ if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0):
+ if self.bodyCharsOrig == self.initCharsOrig:
+ self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig)
+ elif len(self.initCharsOrig) == 1:
+ self.reString = "%s[%s]*" % \
+ (re.escape(self.initCharsOrig),
+ _escapeRegexRangeChars(self.bodyCharsOrig),)
+ else:
+ self.reString = "[%s][%s]*" % \
+ (_escapeRegexRangeChars(self.initCharsOrig),
+ _escapeRegexRangeChars(self.bodyCharsOrig),)
+ if self.asKeyword:
+ self.reString = r"\b"+self.reString+r"\b"
+ try:
+ self.re = re.compile( self.reString )
+ except Exception:
+ self.re = None
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if self.re:
+ result = self.re.match(instring,loc)
+ if not result:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ loc = result.end()
+ return loc, result.group()
+
+ if not(instring[ loc ] in self.initChars):
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ start = loc
+ loc += 1
+ instrlen = len(instring)
+ bodychars = self.bodyChars
+ maxloc = start + self.maxLen
+ maxloc = min( maxloc, instrlen )
+ while loc < maxloc and instring[loc] in bodychars:
+ loc += 1
+
+ throwException = False
+ if loc - start < self.minLen:
+ throwException = True
+ if self.maxSpecified and loc < instrlen and instring[loc] in bodychars:
+ throwException = True
+ if self.asKeyword:
+ if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars):
+ throwException = True
+
+ if throwException:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ return loc, instring[start:loc]
+
+ def __str__( self ):
+ try:
+ return super(Word,self).__str__()
+ except Exception:
+ pass
+
+
+ if self.strRepr is None:
+
+ def charsAsStr(s):
+ if len(s)>4:
+ return s[:4]+"..."
+ else:
+ return s
+
+ if ( self.initCharsOrig != self.bodyCharsOrig ):
+ self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) )
+ else:
+ self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig)
+
+ return self.strRepr
+
+
+class Regex(Token):
+ r"""
+ Token for matching strings that match a given regular expression.
+ Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module.
+ If the given regex contains named groups (defined using C{(?P<name>...)}), these will be preserved as
+ named parse results.
+
+ Example::
+ realnum = Regex(r"[+-]?\d+\.\d*")
+ date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)')
+ # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression
+ roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})")
+ """
+ compiledREtype = type(re.compile("[A-Z]"))
+ def __init__( self, pattern, flags=0):
+ """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags."""
+ super(Regex,self).__init__()
+
+ if isinstance(pattern, basestring):
+ if not pattern:
+ warnings.warn("null string passed to Regex; use Empty() instead",
+ SyntaxWarning, stacklevel=2)
+
+ self.pattern = pattern
+ self.flags = flags
+
+ try:
+ self.re = re.compile(self.pattern, self.flags)
+ self.reString = self.pattern
+ except sre_constants.error:
+ warnings.warn("invalid pattern (%s) passed to Regex" % pattern,
+ SyntaxWarning, stacklevel=2)
+ raise
+
+ elif isinstance(pattern, Regex.compiledREtype):
+ self.re = pattern
+ self.pattern = \
+ self.reString = str(pattern)
+ self.flags = flags
+
+ else:
+ raise ValueError("Regex may only be constructed with a string or a compiled RE object")
+
+ self.name = _ustr(self)
+ self.errmsg = "Expected " + self.name
+ self.mayIndexError = False
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ result = self.re.match(instring,loc)
+ if not result:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ loc = result.end()
+ d = result.groupdict()
+ ret = ParseResults(result.group())
+ if d:
+ for k in d:
+ ret[k] = d[k]
+ return loc,ret
+
+ def __str__( self ):
+ try:
+ return super(Regex,self).__str__()
+ except Exception:
+ pass
+
+ if self.strRepr is None:
+ self.strRepr = "Re:(%s)" % repr(self.pattern)
+
+ return self.strRepr
+
+
+class QuotedString(Token):
+ r"""
+ Token for matching strings that are delimited by quoting characters.
+
+ Defined with the following parameters:
+ - quoteChar - string of one or more characters defining the quote delimiting string
+ - escChar - character to escape quotes, typically backslash (default=C{None})
+ - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None})
+ - multiline - boolean indicating whether quotes can span multiple lines (default=C{False})
+ - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True})
+ - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar)
+ - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True})
+
+ Example::
+ qs = QuotedString('"')
+ print(qs.searchString('lsjdf "This is the quote" sldjf'))
+ complex_qs = QuotedString('{{', endQuoteChar='}}')
+ print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf'))
+ sql_qs = QuotedString('"', escQuote='""')
+ print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf'))
+ prints::
+ [['This is the quote']]
+ [['This is the "quote"']]
+ [['This is the quote with "embedded" quotes']]
+ """
+ def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True):
+ super(QuotedString,self).__init__()
+
+ # remove white space from quote chars - wont work anyway
+ quoteChar = quoteChar.strip()
+ if not quoteChar:
+ warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
+ raise SyntaxError()
+
+ if endQuoteChar is None:
+ endQuoteChar = quoteChar
+ else:
+ endQuoteChar = endQuoteChar.strip()
+ if not endQuoteChar:
+ warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
+ raise SyntaxError()
+
+ self.quoteChar = quoteChar
+ self.quoteCharLen = len(quoteChar)
+ self.firstQuoteChar = quoteChar[0]
+ self.endQuoteChar = endQuoteChar
+ self.endQuoteCharLen = len(endQuoteChar)
+ self.escChar = escChar
+ self.escQuote = escQuote
+ self.unquoteResults = unquoteResults
+ self.convertWhitespaceEscapes = convertWhitespaceEscapes
+
+ if multiline:
+ self.flags = re.MULTILINE | re.DOTALL
+ self.pattern = r'%s(?:[^%s%s]' % \
+ ( re.escape(self.quoteChar),
+ _escapeRegexRangeChars(self.endQuoteChar[0]),
+ (escChar is not None and _escapeRegexRangeChars(escChar) or '') )
+ else:
+ self.flags = 0
+ self.pattern = r'%s(?:[^%s\n\r%s]' % \
+ ( re.escape(self.quoteChar),
+ _escapeRegexRangeChars(self.endQuoteChar[0]),
+ (escChar is not None and _escapeRegexRangeChars(escChar) or '') )
+ if len(self.endQuoteChar) > 1:
+ self.pattern += (
+ '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]),
+ _escapeRegexRangeChars(self.endQuoteChar[i]))
+ for i in range(len(self.endQuoteChar)-1,0,-1)) + ')'
+ )
+ if escQuote:
+ self.pattern += (r'|(?:%s)' % re.escape(escQuote))
+ if escChar:
+ self.pattern += (r'|(?:%s.)' % re.escape(escChar))
+ self.escCharReplacePattern = re.escape(self.escChar)+"(.)"
+ self.pattern += (r')*%s' % re.escape(self.endQuoteChar))
+
+ try:
+ self.re = re.compile(self.pattern, self.flags)
+ self.reString = self.pattern
+ except sre_constants.error:
+ warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern,
+ SyntaxWarning, stacklevel=2)
+ raise
+
+ self.name = _ustr(self)
+ self.errmsg = "Expected " + self.name
+ self.mayIndexError = False
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None
+ if not result:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ loc = result.end()
+ ret = result.group()
+
+ if self.unquoteResults:
+
+ # strip off quotes
+ ret = ret[self.quoteCharLen:-self.endQuoteCharLen]
+
+ if isinstance(ret,basestring):
+ # replace escaped whitespace
+ if '\\' in ret and self.convertWhitespaceEscapes:
+ ws_map = {
+ r'\t' : '\t',
+ r'\n' : '\n',
+ r'\f' : '\f',
+ r'\r' : '\r',
+ }
+ for wslit,wschar in ws_map.items():
+ ret = ret.replace(wslit, wschar)
+
+ # replace escaped characters
+ if self.escChar:
+ ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret)
+
+ # replace escaped quotes
+ if self.escQuote:
+ ret = ret.replace(self.escQuote, self.endQuoteChar)
+
+ return loc, ret
+
+ def __str__( self ):
+ try:
+ return super(QuotedString,self).__str__()
+ except Exception:
+ pass
+
+ if self.strRepr is None:
+ self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar)
+
+ return self.strRepr
+
+
+class CharsNotIn(Token):
+ """
+ Token for matching words composed of characters I{not} in a given set (will
+ include whitespace in matched characters if not listed in the provided exclusion set - see example).
+ Defined with string containing all disallowed characters, and an optional
+ minimum, maximum, and/or exact length. The default value for C{min} is 1 (a
+ minimum value < 1 is not valid); the default values for C{max} and C{exact}
+ are 0, meaning no maximum or exact length restriction.
+
+ Example::
+ # define a comma-separated-value as anything that is not a ','
+ csv_value = CharsNotIn(',')
+ print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213"))
+ prints::
+ ['dkls', 'lsdkjf', 's12 34', '@!#', '213']
+ """
+ def __init__( self, notChars, min=1, max=0, exact=0 ):
+ super(CharsNotIn,self).__init__()
+ self.skipWhitespace = False
+ self.notChars = notChars
+
+ if min < 1:
+ raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted")
+
+ self.minLen = min
+
+ if max > 0:
+ self.maxLen = max
+ else:
+ self.maxLen = _MAX_INT
+
+ if exact > 0:
+ self.maxLen = exact
+ self.minLen = exact
+
+ self.name = _ustr(self)
+ self.errmsg = "Expected " + self.name
+ self.mayReturnEmpty = ( self.minLen == 0 )
+ self.mayIndexError = False
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if instring[loc] in self.notChars:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ start = loc
+ loc += 1
+ notchars = self.notChars
+ maxlen = min( start+self.maxLen, len(instring) )
+ while loc < maxlen and \
+ (instring[loc] not in notchars):
+ loc += 1
+
+ if loc - start < self.minLen:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ return loc, instring[start:loc]
+
+ def __str__( self ):
+ try:
+ return super(CharsNotIn, self).__str__()
+ except Exception:
+ pass
+
+ if self.strRepr is None:
+ if len(self.notChars) > 4:
+ self.strRepr = "!W:(%s...)" % self.notChars[:4]
+ else:
+ self.strRepr = "!W:(%s)" % self.notChars
+
+ return self.strRepr
+
+class White(Token):
+ """
+ Special matching class for matching whitespace. Normally, whitespace is ignored
+ by pyparsing grammars. This class is included when some whitespace structures
+ are significant. Define with a string containing the whitespace characters to be
+ matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments,
+ as defined for the C{L{Word}} class.
+ """
+ whiteStrs = {
+ " " : "<SPC>",
+ "\t": "<TAB>",
+ "\n": "<LF>",
+ "\r": "<CR>",
+ "\f": "<FF>",
+ }
+ def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0):
+ super(White,self).__init__()
+ self.matchWhite = ws
+ self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) )
+ #~ self.leaveWhitespace()
+ self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite))
+ self.mayReturnEmpty = True
+ self.errmsg = "Expected " + self.name
+
+ self.minLen = min
+
+ if max > 0:
+ self.maxLen = max
+ else:
+ self.maxLen = _MAX_INT
+
+ if exact > 0:
+ self.maxLen = exact
+ self.minLen = exact
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if not(instring[ loc ] in self.matchWhite):
+ raise ParseException(instring, loc, self.errmsg, self)
+ start = loc
+ loc += 1
+ maxloc = start + self.maxLen
+ maxloc = min( maxloc, len(instring) )
+ while loc < maxloc and instring[loc] in self.matchWhite:
+ loc += 1
+
+ if loc - start < self.minLen:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ return loc, instring[start:loc]
+
+
+class _PositionToken(Token):
+ def __init__( self ):
+ super(_PositionToken,self).__init__()
+ self.name=self.__class__.__name__
+ self.mayReturnEmpty = True
+ self.mayIndexError = False
+
+class GoToColumn(_PositionToken):
+ """
+ Token to advance to a specific column of input text; useful for tabular report scraping.
+ """
+ def __init__( self, colno ):
+ super(GoToColumn,self).__init__()
+ self.col = colno
+
+ def preParse( self, instring, loc ):
+ if col(loc,instring) != self.col:
+ instrlen = len(instring)
+ if self.ignoreExprs:
+ loc = self._skipIgnorables( instring, loc )
+ while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col :
+ loc += 1
+ return loc
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ thiscol = col( loc, instring )
+ if thiscol > self.col:
+ raise ParseException( instring, loc, "Text not in expected column", self )
+ newloc = loc + self.col - thiscol
+ ret = instring[ loc: newloc ]
+ return newloc, ret
+
+
+class LineStart(_PositionToken):
+ """
+ Matches if current position is at the beginning of a line within the parse string
+
+ Example::
+
+ test = '''\
+ AAA this line
+ AAA and this line
+ AAA but not this one
+ B AAA and definitely not this one
+ '''
+
+ for t in (LineStart() + 'AAA' + restOfLine).searchString(test):
+ print(t)
+
+ Prints::
+ ['AAA', ' this line']
+ ['AAA', ' and this line']
+
+ """
+ def __init__( self ):
+ super(LineStart,self).__init__()
+ self.errmsg = "Expected start of line"
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if col(loc, instring) == 1:
+ return loc, []
+ raise ParseException(instring, loc, self.errmsg, self)
+
+class LineEnd(_PositionToken):
+ """
+ Matches if current position is at the end of a line within the parse string
+ """
+ def __init__( self ):
+ super(LineEnd,self).__init__()
+ self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") )
+ self.errmsg = "Expected end of line"
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if loc<len(instring):
+ if instring[loc] == "\n":
+ return loc+1, "\n"
+ else:
+ raise ParseException(instring, loc, self.errmsg, self)
+ elif loc == len(instring):
+ return loc+1, []
+ else:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+class StringStart(_PositionToken):
+ """
+ Matches if current position is at the beginning of the parse string
+ """
+ def __init__( self ):
+ super(StringStart,self).__init__()
+ self.errmsg = "Expected start of text"
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if loc != 0:
+ # see if entire string up to here is just whitespace and ignoreables
+ if loc != self.preParse( instring, 0 ):
+ raise ParseException(instring, loc, self.errmsg, self)
+ return loc, []
+
+class StringEnd(_PositionToken):
+ """
+ Matches if current position is at the end of the parse string
+ """
+ def __init__( self ):
+ super(StringEnd,self).__init__()
+ self.errmsg = "Expected end of text"
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if loc < len(instring):
+ raise ParseException(instring, loc, self.errmsg, self)
+ elif loc == len(instring):
+ return loc+1, []
+ elif loc > len(instring):
+ return loc, []
+ else:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+class WordStart(_PositionToken):
+ """
+ Matches if the current position is at the beginning of a Word, and
+ is not preceded by any character in a given set of C{wordChars}
+ (default=C{printables}). To emulate the C{\b} behavior of regular expressions,
+ use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of
+ the string being parsed, or at the beginning of a line.
+ """
+ def __init__(self, wordChars = printables):
+ super(WordStart,self).__init__()
+ self.wordChars = set(wordChars)
+ self.errmsg = "Not at the start of a word"
+
+ def parseImpl(self, instring, loc, doActions=True ):
+ if loc != 0:
+ if (instring[loc-1] in self.wordChars or
+ instring[loc] not in self.wordChars):
+ raise ParseException(instring, loc, self.errmsg, self)
+ return loc, []
+
+class WordEnd(_PositionToken):
+ """
+ Matches if the current position is at the end of a Word, and
+ is not followed by any character in a given set of C{wordChars}
+ (default=C{printables}). To emulate the C{\b} behavior of regular expressions,
+ use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of
+ the string being parsed, or at the end of a line.
+ """
+ def __init__(self, wordChars = printables):
+ super(WordEnd,self).__init__()
+ self.wordChars = set(wordChars)
+ self.skipWhitespace = False
+ self.errmsg = "Not at the end of a word"
+
+ def parseImpl(self, instring, loc, doActions=True ):
+ instrlen = len(instring)
+ if instrlen>0 and loc<instrlen:
+ if (instring[loc] in self.wordChars or
+ instring[loc-1] not in self.wordChars):
+ raise ParseException(instring, loc, self.errmsg, self)
+ return loc, []
+
+
+class ParseExpression(ParserElement):
+ """
+ Abstract subclass of ParserElement, for combining and post-processing parsed tokens.
+ """
+ def __init__( self, exprs, savelist = False ):
+ super(ParseExpression,self).__init__(savelist)
+ if isinstance( exprs, _generatorType ):
+ exprs = list(exprs)
+
+ if isinstance( exprs, basestring ):
+ self.exprs = [ ParserElement._literalStringClass( exprs ) ]
+ elif isinstance( exprs, Iterable ):
+ exprs = list(exprs)
+ # if sequence of strings provided, wrap with Literal
+ if all(isinstance(expr, basestring) for expr in exprs):
+ exprs = map(ParserElement._literalStringClass, exprs)
+ self.exprs = list(exprs)
+ else:
+ try:
+ self.exprs = list( exprs )
+ except TypeError:
+ self.exprs = [ exprs ]
+ self.callPreparse = False
+
+ def __getitem__( self, i ):
+ return self.exprs[i]
+
+ def append( self, other ):
+ self.exprs.append( other )
+ self.strRepr = None
+ return self
+
+ def leaveWhitespace( self ):
+ """Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on
+ all contained expressions."""
+ self.skipWhitespace = False
+ self.exprs = [ e.copy() for e in self.exprs ]
+ for e in self.exprs:
+ e.leaveWhitespace()
+ return self
+
+ def ignore( self, other ):
+ if isinstance( other, Suppress ):
+ if other not in self.ignoreExprs:
+ super( ParseExpression, self).ignore( other )
+ for e in self.exprs:
+ e.ignore( self.ignoreExprs[-1] )
+ else:
+ super( ParseExpression, self).ignore( other )
+ for e in self.exprs:
+ e.ignore( self.ignoreExprs[-1] )
+ return self
+
+ def __str__( self ):
+ try:
+ return super(ParseExpression,self).__str__()
+ except Exception:
+ pass
+
+ if self.strRepr is None:
+ self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) )
+ return self.strRepr
+
+ def streamline( self ):
+ super(ParseExpression,self).streamline()
+
+ for e in self.exprs:
+ e.streamline()
+
+ # collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d )
+ # but only if there are no parse actions or resultsNames on the nested And's
+ # (likewise for Or's and MatchFirst's)
+ if ( len(self.exprs) == 2 ):
+ other = self.exprs[0]
+ if ( isinstance( other, self.__class__ ) and
+ not(other.parseAction) and
+ other.resultsName is None and
+ not other.debug ):
+ self.exprs = other.exprs[:] + [ self.exprs[1] ]
+ self.strRepr = None
+ self.mayReturnEmpty |= other.mayReturnEmpty
+ self.mayIndexError |= other.mayIndexError
+
+ other = self.exprs[-1]
+ if ( isinstance( other, self.__class__ ) and
+ not(other.parseAction) and
+ other.resultsName is None and
+ not other.debug ):
+ self.exprs = self.exprs[:-1] + other.exprs[:]
+ self.strRepr = None
+ self.mayReturnEmpty |= other.mayReturnEmpty
+ self.mayIndexError |= other.mayIndexError
+
+ self.errmsg = "Expected " + _ustr(self)
+
+ return self
+
+ def setResultsName( self, name, listAllMatches=False ):
+ ret = super(ParseExpression,self).setResultsName(name,listAllMatches)
+ return ret
+
+ def validate( self, validateTrace=[] ):
+ tmp = validateTrace[:]+[self]
+ for e in self.exprs:
+ e.validate(tmp)
+ self.checkRecursion( [] )
+
+ def copy(self):
+ ret = super(ParseExpression,self).copy()
+ ret.exprs = [e.copy() for e in self.exprs]
+ return ret
+
+class And(ParseExpression):
+ """
+ Requires all given C{ParseExpression}s to be found in the given order.
+ Expressions may be separated by whitespace.
+ May be constructed using the C{'+'} operator.
+ May also be constructed using the C{'-'} operator, which will suppress backtracking.
+
+ Example::
+ integer = Word(nums)
+ name_expr = OneOrMore(Word(alphas))
+
+ expr = And([integer("id"),name_expr("name"),integer("age")])
+ # more easily written as:
+ expr = integer("id") + name_expr("name") + integer("age")
+ """
+
+ class _ErrorStop(Empty):
+ def __init__(self, *args, **kwargs):
+ super(And._ErrorStop,self).__init__(*args, **kwargs)
+ self.name = '-'
+ self.leaveWhitespace()
+
+ def __init__( self, exprs, savelist = True ):
+ super(And,self).__init__(exprs, savelist)
+ self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
+ self.setWhitespaceChars( self.exprs[0].whiteChars )
+ self.skipWhitespace = self.exprs[0].skipWhitespace
+ self.callPreparse = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ # pass False as last arg to _parse for first element, since we already
+ # pre-parsed the string as part of our And pre-parsing
+ loc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False )
+ errorStop = False
+ for e in self.exprs[1:]:
+ if isinstance(e, And._ErrorStop):
+ errorStop = True
+ continue
+ if errorStop:
+ try:
+ loc, exprtokens = e._parse( instring, loc, doActions )
+ except ParseSyntaxException:
+ raise
+ except ParseBaseException as pe:
+ pe.__traceback__ = None
+ raise ParseSyntaxException._from_exception(pe)
+ except IndexError:
+ raise ParseSyntaxException(instring, len(instring), self.errmsg, self)
+ else:
+ loc, exprtokens = e._parse( instring, loc, doActions )
+ if exprtokens or exprtokens.haskeys():
+ resultlist += exprtokens
+ return loc, resultlist
+
+ def __iadd__(self, other ):
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ return self.append( other ) #And( [ self, other ] )
+
+ def checkRecursion( self, parseElementList ):
+ subRecCheckList = parseElementList[:] + [ self ]
+ for e in self.exprs:
+ e.checkRecursion( subRecCheckList )
+ if not e.mayReturnEmpty:
+ break
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}"
+
+ return self.strRepr
+
+
+class Or(ParseExpression):
+ """
+ Requires that at least one C{ParseExpression} is found.
+ If two expressions match, the expression that matches the longest string will be used.
+ May be constructed using the C{'^'} operator.
+
+ Example::
+ # construct Or using '^' operator
+
+ number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums))
+ print(number.searchString("123 3.1416 789"))
+ prints::
+ [['123'], ['3.1416'], ['789']]
+ """
+ def __init__( self, exprs, savelist = False ):
+ super(Or,self).__init__(exprs, savelist)
+ if self.exprs:
+ self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)
+ else:
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ maxExcLoc = -1
+ maxException = None
+ matches = []
+ for e in self.exprs:
+ try:
+ loc2 = e.tryParse( instring, loc )
+ except ParseException as err:
+ err.__traceback__ = None
+ if err.loc > maxExcLoc:
+ maxException = err
+ maxExcLoc = err.loc
+ except IndexError:
+ if len(instring) > maxExcLoc:
+ maxException = ParseException(instring,len(instring),e.errmsg,self)
+ maxExcLoc = len(instring)
+ else:
+ # save match among all matches, to retry longest to shortest
+ matches.append((loc2, e))
+
+ if matches:
+ matches.sort(key=lambda x: -x[0])
+ for _,e in matches:
+ try:
+ return e._parse( instring, loc, doActions )
+ except ParseException as err:
+ err.__traceback__ = None
+ if err.loc > maxExcLoc:
+ maxException = err
+ maxExcLoc = err.loc
+
+ if maxException is not None:
+ maxException.msg = self.errmsg
+ raise maxException
+ else:
+ raise ParseException(instring, loc, "no defined alternatives to match", self)
+
+
+ def __ixor__(self, other ):
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ return self.append( other ) #Or( [ self, other ] )
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}"
+
+ return self.strRepr
+
+ def checkRecursion( self, parseElementList ):
+ subRecCheckList = parseElementList[:] + [ self ]
+ for e in self.exprs:
+ e.checkRecursion( subRecCheckList )
+
+
+class MatchFirst(ParseExpression):
+ """
+ Requires that at least one C{ParseExpression} is found.
+ If two expressions match, the first one listed is the one that will match.
+ May be constructed using the C{'|'} operator.
+
+ Example::
+ # construct MatchFirst using '|' operator
+
+ # watch the order of expressions to match
+ number = Word(nums) | Combine(Word(nums) + '.' + Word(nums))
+ print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']]
+
+ # put more selective expression first
+ number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums)
+ print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']]
+ """
+ def __init__( self, exprs, savelist = False ):
+ super(MatchFirst,self).__init__(exprs, savelist)
+ if self.exprs:
+ self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)
+ else:
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ maxExcLoc = -1
+ maxException = None
+ for e in self.exprs:
+ try:
+ ret = e._parse( instring, loc, doActions )
+ return ret
+ except ParseException as err:
+ if err.loc > maxExcLoc:
+ maxException = err
+ maxExcLoc = err.loc
+ except IndexError:
+ if len(instring) > maxExcLoc:
+ maxException = ParseException(instring,len(instring),e.errmsg,self)
+ maxExcLoc = len(instring)
+
+ # only got here if no expression matched, raise exception for match that made it the furthest
+ else:
+ if maxException is not None:
+ maxException.msg = self.errmsg
+ raise maxException
+ else:
+ raise ParseException(instring, loc, "no defined alternatives to match", self)
+
+ def __ior__(self, other ):
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ return self.append( other ) #MatchFirst( [ self, other ] )
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}"
+
+ return self.strRepr
+
+ def checkRecursion( self, parseElementList ):
+ subRecCheckList = parseElementList[:] + [ self ]
+ for e in self.exprs:
+ e.checkRecursion( subRecCheckList )
+
+
+class Each(ParseExpression):
+ """
+ Requires all given C{ParseExpression}s to be found, but in any order.
+ Expressions may be separated by whitespace.
+ May be constructed using the C{'&'} operator.
+
+ Example::
+ color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN")
+ shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON")
+ integer = Word(nums)
+ shape_attr = "shape:" + shape_type("shape")
+ posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn")
+ color_attr = "color:" + color("color")
+ size_attr = "size:" + integer("size")
+
+ # use Each (using operator '&') to accept attributes in any order
+ # (shape and posn are required, color and size are optional)
+ shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr)
+
+ shape_spec.runTests('''
+ shape: SQUARE color: BLACK posn: 100, 120
+ shape: CIRCLE size: 50 color: BLUE posn: 50,80
+ color:GREEN size:20 shape:TRIANGLE posn:20,40
+ '''
+ )
+ prints::
+ shape: SQUARE color: BLACK posn: 100, 120
+ ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']]
+ - color: BLACK
+ - posn: ['100', ',', '120']
+ - x: 100
+ - y: 120
+ - shape: SQUARE
+
+
+ shape: CIRCLE size: 50 color: BLUE posn: 50,80
+ ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']]
+ - color: BLUE
+ - posn: ['50', ',', '80']
+ - x: 50
+ - y: 80
+ - shape: CIRCLE
+ - size: 50
+
+
+ color: GREEN size: 20 shape: TRIANGLE posn: 20,40
+ ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']]
+ - color: GREEN
+ - posn: ['20', ',', '40']
+ - x: 20
+ - y: 40
+ - shape: TRIANGLE
+ - size: 20
+ """
+ def __init__( self, exprs, savelist = True ):
+ super(Each,self).__init__(exprs, savelist)
+ self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
+ self.skipWhitespace = True
+ self.initExprGroups = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if self.initExprGroups:
+ self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional))
+ opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ]
+ opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)]
+ self.optionals = opt1 + opt2
+ self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ]
+ self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ]
+ self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ]
+ self.required += self.multirequired
+ self.initExprGroups = False
+ tmpLoc = loc
+ tmpReqd = self.required[:]
+ tmpOpt = self.optionals[:]
+ matchOrder = []
+
+ keepMatching = True
+ while keepMatching:
+ tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired
+ failed = []
+ for e in tmpExprs:
+ try:
+ tmpLoc = e.tryParse( instring, tmpLoc )
+ except ParseException:
+ failed.append(e)
+ else:
+ matchOrder.append(self.opt1map.get(id(e),e))
+ if e in tmpReqd:
+ tmpReqd.remove(e)
+ elif e in tmpOpt:
+ tmpOpt.remove(e)
+ if len(failed) == len(tmpExprs):
+ keepMatching = False
+
+ if tmpReqd:
+ missing = ", ".join(_ustr(e) for e in tmpReqd)
+ raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing )
+
+ # add any unmatched Optionals, in case they have default values defined
+ matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt]
+
+ resultlist = []
+ for e in matchOrder:
+ loc,results = e._parse(instring,loc,doActions)
+ resultlist.append(results)
+
+ finalResults = sum(resultlist, ParseResults([]))
+ return loc, finalResults
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}"
+
+ return self.strRepr
+
+ def checkRecursion( self, parseElementList ):
+ subRecCheckList = parseElementList[:] + [ self ]
+ for e in self.exprs:
+ e.checkRecursion( subRecCheckList )
+
+
+class ParseElementEnhance(ParserElement):
+ """
+ Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens.
+ """
+ def __init__( self, expr, savelist=False ):
+ super(ParseElementEnhance,self).__init__(savelist)
+ if isinstance( expr, basestring ):
+ if issubclass(ParserElement._literalStringClass, Token):
+ expr = ParserElement._literalStringClass(expr)
+ else:
+ expr = ParserElement._literalStringClass(Literal(expr))
+ self.expr = expr
+ self.strRepr = None
+ if expr is not None:
+ self.mayIndexError = expr.mayIndexError
+ self.mayReturnEmpty = expr.mayReturnEmpty
+ self.setWhitespaceChars( expr.whiteChars )
+ self.skipWhitespace = expr.skipWhitespace
+ self.saveAsList = expr.saveAsList
+ self.callPreparse = expr.callPreparse
+ self.ignoreExprs.extend(expr.ignoreExprs)
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if self.expr is not None:
+ return self.expr._parse( instring, loc, doActions, callPreParse=False )
+ else:
+ raise ParseException("",loc,self.errmsg,self)
+
+ def leaveWhitespace( self ):
+ self.skipWhitespace = False
+ self.expr = self.expr.copy()
+ if self.expr is not None:
+ self.expr.leaveWhitespace()
+ return self
+
+ def ignore( self, other ):
+ if isinstance( other, Suppress ):
+ if other not in self.ignoreExprs:
+ super( ParseElementEnhance, self).ignore( other )
+ if self.expr is not None:
+ self.expr.ignore( self.ignoreExprs[-1] )
+ else:
+ super( ParseElementEnhance, self).ignore( other )
+ if self.expr is not None:
+ self.expr.ignore( self.ignoreExprs[-1] )
+ return self
+
+ def streamline( self ):
+ super(ParseElementEnhance,self).streamline()
+ if self.expr is not None:
+ self.expr.streamline()
+ return self
+
+ def checkRecursion( self, parseElementList ):
+ if self in parseElementList:
+ raise RecursiveGrammarException( parseElementList+[self] )
+ subRecCheckList = parseElementList[:] + [ self ]
+ if self.expr is not None:
+ self.expr.checkRecursion( subRecCheckList )
+
+ def validate( self, validateTrace=[] ):
+ tmp = validateTrace[:]+[self]
+ if self.expr is not None:
+ self.expr.validate(tmp)
+ self.checkRecursion( [] )
+
+ def __str__( self ):
+ try:
+ return super(ParseElementEnhance,self).__str__()
+ except Exception:
+ pass
+
+ if self.strRepr is None and self.expr is not None:
+ self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) )
+ return self.strRepr
+
+
+class FollowedBy(ParseElementEnhance):
+ """
+ Lookahead matching of the given parse expression. C{FollowedBy}
+ does I{not} advance the parsing position within the input string, it only
+ verifies that the specified parse expression matches at the current
+ position. C{FollowedBy} always returns a null token list.
+
+ Example::
+ # use FollowedBy to match a label only if it is followed by a ':'
+ data_word = Word(alphas)
+ label = data_word + FollowedBy(':')
+ attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+
+ OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint()
+ prints::
+ [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']]
+ """
+ def __init__( self, expr ):
+ super(FollowedBy,self).__init__(expr)
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ self.expr.tryParse( instring, loc )
+ return loc, []
+
+
+class NotAny(ParseElementEnhance):
+ """
+ Lookahead to disallow matching with the given parse expression. C{NotAny}
+ does I{not} advance the parsing position within the input string, it only
+ verifies that the specified parse expression does I{not} match at the current
+ position. Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny}
+ always returns a null token list. May be constructed using the '~' operator.
+
+ Example::
+
+ """
+ def __init__( self, expr ):
+ super(NotAny,self).__init__(expr)
+ #~ self.leaveWhitespace()
+ self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs
+ self.mayReturnEmpty = True
+ self.errmsg = "Found unwanted token, "+_ustr(self.expr)
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if self.expr.canParseNext(instring, loc):
+ raise ParseException(instring, loc, self.errmsg, self)
+ return loc, []
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "~{" + _ustr(self.expr) + "}"
+
+ return self.strRepr
+
+class _MultipleMatch(ParseElementEnhance):
+ def __init__( self, expr, stopOn=None):
+ super(_MultipleMatch, self).__init__(expr)
+ self.saveAsList = True
+ ender = stopOn
+ if isinstance(ender, basestring):
+ ender = ParserElement._literalStringClass(ender)
+ self.not_ender = ~ender if ender is not None else None
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ self_expr_parse = self.expr._parse
+ self_skip_ignorables = self._skipIgnorables
+ check_ender = self.not_ender is not None
+ if check_ender:
+ try_not_ender = self.not_ender.tryParse
+
+ # must be at least one (but first see if we are the stopOn sentinel;
+ # if so, fail)
+ if check_ender:
+ try_not_ender(instring, loc)
+ loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False )
+ try:
+ hasIgnoreExprs = (not not self.ignoreExprs)
+ while 1:
+ if check_ender:
+ try_not_ender(instring, loc)
+ if hasIgnoreExprs:
+ preloc = self_skip_ignorables( instring, loc )
+ else:
+ preloc = loc
+ loc, tmptokens = self_expr_parse( instring, preloc, doActions )
+ if tmptokens or tmptokens.haskeys():
+ tokens += tmptokens
+ except (ParseException,IndexError):
+ pass
+
+ return loc, tokens
+
+class OneOrMore(_MultipleMatch):
+ """
+ Repetition of one or more of the given expression.
+
+ Parameters:
+ - expr - expression that must match one or more times
+ - stopOn - (default=C{None}) - expression for a terminating sentinel
+ (only required if the sentinel would ordinarily match the repetition
+ expression)
+
+ Example::
+ data_word = Word(alphas)
+ label = data_word + FollowedBy(':')
+ attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))
+
+ text = "shape: SQUARE posn: upper left color: BLACK"
+ OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']]
+
+ # use stopOn attribute for OneOrMore to avoid reading label string as part of the data
+ attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+ OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']]
+
+ # could also be written as
+ (attr_expr * (1,)).parseString(text).pprint()
+ """
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "{" + _ustr(self.expr) + "}..."
+
+ return self.strRepr
+
+class ZeroOrMore(_MultipleMatch):
+ """
+ Optional repetition of zero or more of the given expression.
+
+ Parameters:
+ - expr - expression that must match zero or more times
+ - stopOn - (default=C{None}) - expression for a terminating sentinel
+ (only required if the sentinel would ordinarily match the repetition
+ expression)
+
+ Example: similar to L{OneOrMore}
+ """
+ def __init__( self, expr, stopOn=None):
+ super(ZeroOrMore,self).__init__(expr, stopOn=stopOn)
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ try:
+ return super(ZeroOrMore, self).parseImpl(instring, loc, doActions)
+ except (ParseException,IndexError):
+ return loc, []
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "[" + _ustr(self.expr) + "]..."
+
+ return self.strRepr
+
+class _NullToken(object):
+ def __bool__(self):
+ return False
+ __nonzero__ = __bool__
+ def __str__(self):
+ return ""
+
+_optionalNotMatched = _NullToken()
+class Optional(ParseElementEnhance):
+ """
+ Optional matching of the given expression.
+
+ Parameters:
+ - expr - expression that must match zero or more times
+ - default (optional) - value to be returned if the optional expression is not found.
+
+ Example::
+ # US postal code can be a 5-digit zip, plus optional 4-digit qualifier
+ zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4)))
+ zip.runTests('''
+ # traditional ZIP code
+ 12345
+
+ # ZIP+4 form
+ 12101-0001
+
+ # invalid ZIP
+ 98765-
+ ''')
+ prints::
+ # traditional ZIP code
+ 12345
+ ['12345']
+
+ # ZIP+4 form
+ 12101-0001
+ ['12101-0001']
+
+ # invalid ZIP
+ 98765-
+ ^
+ FAIL: Expected end of text (at char 5), (line:1, col:6)
+ """
+ def __init__( self, expr, default=_optionalNotMatched ):
+ super(Optional,self).__init__( expr, savelist=False )
+ self.saveAsList = self.expr.saveAsList
+ self.defaultValue = default
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ try:
+ loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False )
+ except (ParseException,IndexError):
+ if self.defaultValue is not _optionalNotMatched:
+ if self.expr.resultsName:
+ tokens = ParseResults([ self.defaultValue ])
+ tokens[self.expr.resultsName] = self.defaultValue
+ else:
+ tokens = [ self.defaultValue ]
+ else:
+ tokens = []
+ return loc, tokens
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "[" + _ustr(self.expr) + "]"
+
+ return self.strRepr
+
+class SkipTo(ParseElementEnhance):
+ """
+ Token for skipping over all undefined text until the matched expression is found.
+
+ Parameters:
+ - expr - target expression marking the end of the data to be skipped
+ - include - (default=C{False}) if True, the target expression is also parsed
+ (the skipped text and target expression are returned as a 2-element list).
+ - ignore - (default=C{None}) used to define grammars (typically quoted strings and
+ comments) that might contain false matches to the target expression
+ - failOn - (default=C{None}) define expressions that are not allowed to be
+ included in the skipped test; if found before the target expression is found,
+ the SkipTo is not a match
+
+ Example::
+ report = '''
+ Outstanding Issues Report - 1 Jan 2000
+
+ # | Severity | Description | Days Open
+ -----+----------+-------------------------------------------+-----------
+ 101 | Critical | Intermittent system crash | 6
+ 94 | Cosmetic | Spelling error on Login ('log|n') | 14
+ 79 | Minor | System slow when running too many reports | 47
+ '''
+ integer = Word(nums)
+ SEP = Suppress('|')
+ # use SkipTo to simply match everything up until the next SEP
+ # - ignore quoted strings, so that a '|' character inside a quoted string does not match
+ # - parse action will call token.strip() for each matched token, i.e., the description body
+ string_data = SkipTo(SEP, ignore=quotedString)
+ string_data.setParseAction(tokenMap(str.strip))
+ ticket_expr = (integer("issue_num") + SEP
+ + string_data("sev") + SEP
+ + string_data("desc") + SEP
+ + integer("days_open"))
+
+ for tkt in ticket_expr.searchString(report):
+ print tkt.dump()
+ prints::
+ ['101', 'Critical', 'Intermittent system crash', '6']
+ - days_open: 6
+ - desc: Intermittent system crash
+ - issue_num: 101
+ - sev: Critical
+ ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14']
+ - days_open: 14
+ - desc: Spelling error on Login ('log|n')
+ - issue_num: 94
+ - sev: Cosmetic
+ ['79', 'Minor', 'System slow when running too many reports', '47']
+ - days_open: 47
+ - desc: System slow when running too many reports
+ - issue_num: 79
+ - sev: Minor
+ """
+ def __init__( self, other, include=False, ignore=None, failOn=None ):
+ super( SkipTo, self ).__init__( other )
+ self.ignoreExpr = ignore
+ self.mayReturnEmpty = True
+ self.mayIndexError = False
+ self.includeMatch = include
+ self.asList = False
+ if isinstance(failOn, basestring):
+ self.failOn = ParserElement._literalStringClass(failOn)
+ else:
+ self.failOn = failOn
+ self.errmsg = "No match found for "+_ustr(self.expr)
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ startloc = loc
+ instrlen = len(instring)
+ expr = self.expr
+ expr_parse = self.expr._parse
+ self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None
+ self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None
+
+ tmploc = loc
+ while tmploc <= instrlen:
+ if self_failOn_canParseNext is not None:
+ # break if failOn expression matches
+ if self_failOn_canParseNext(instring, tmploc):
+ break
+
+ if self_ignoreExpr_tryParse is not None:
+ # advance past ignore expressions
+ while 1:
+ try:
+ tmploc = self_ignoreExpr_tryParse(instring, tmploc)
+ except ParseBaseException:
+ break
+
+ try:
+ expr_parse(instring, tmploc, doActions=False, callPreParse=False)
+ except (ParseException, IndexError):
+ # no match, advance loc in string
+ tmploc += 1
+ else:
+ # matched skipto expr, done
+ break
+
+ else:
+ # ran off the end of the input string without matching skipto expr, fail
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ # build up return values
+ loc = tmploc
+ skiptext = instring[startloc:loc]
+ skipresult = ParseResults(skiptext)
+
+ if self.includeMatch:
+ loc, mat = expr_parse(instring,loc,doActions,callPreParse=False)
+ skipresult += mat
+
+ return loc, skipresult
+
+class Forward(ParseElementEnhance):
+ """
+ Forward declaration of an expression to be defined later -
+ used for recursive grammars, such as algebraic infix notation.
+ When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator.
+
+ Note: take care when assigning to C{Forward} not to overlook precedence of operators.
+ Specifically, '|' has a lower precedence than '<<', so that::
+ fwdExpr << a | b | c
+ will actually be evaluated as::
+ (fwdExpr << a) | b | c
+ thereby leaving b and c out as parseable alternatives. It is recommended that you
+ explicitly group the values inserted into the C{Forward}::
+ fwdExpr << (a | b | c)
+ Converting to use the '<<=' operator instead will avoid this problem.
+
+ See L{ParseResults.pprint} for an example of a recursive parser created using
+ C{Forward}.
+ """
+ def __init__( self, other=None ):
+ super(Forward,self).__init__( other, savelist=False )
+
+ def __lshift__( self, other ):
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass(other)
+ self.expr = other
+ self.strRepr = None
+ self.mayIndexError = self.expr.mayIndexError
+ self.mayReturnEmpty = self.expr.mayReturnEmpty
+ self.setWhitespaceChars( self.expr.whiteChars )
+ self.skipWhitespace = self.expr.skipWhitespace
+ self.saveAsList = self.expr.saveAsList
+ self.ignoreExprs.extend(self.expr.ignoreExprs)
+ return self
+
+ def __ilshift__(self, other):
+ return self << other
+
+ def leaveWhitespace( self ):
+ self.skipWhitespace = False
+ return self
+
+ def streamline( self ):
+ if not self.streamlined:
+ self.streamlined = True
+ if self.expr is not None:
+ self.expr.streamline()
+ return self
+
+ def validate( self, validateTrace=[] ):
+ if self not in validateTrace:
+ tmp = validateTrace[:]+[self]
+ if self.expr is not None:
+ self.expr.validate(tmp)
+ self.checkRecursion([])
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+ return self.__class__.__name__ + ": ..."
+
+ # stubbed out for now - creates awful memory and perf issues
+ self._revertClass = self.__class__
+ self.__class__ = _ForwardNoRecurse
+ try:
+ if self.expr is not None:
+ retString = _ustr(self.expr)
+ else:
+ retString = "None"
+ finally:
+ self.__class__ = self._revertClass
+ return self.__class__.__name__ + ": " + retString
+
+ def copy(self):
+ if self.expr is not None:
+ return super(Forward,self).copy()
+ else:
+ ret = Forward()
+ ret <<= self
+ return ret
+
+class _ForwardNoRecurse(Forward):
+ def __str__( self ):
+ return "..."
+
+class TokenConverter(ParseElementEnhance):
+ """
+ Abstract subclass of C{ParseExpression}, for converting parsed results.
+ """
+ def __init__( self, expr, savelist=False ):
+ super(TokenConverter,self).__init__( expr )#, savelist )
+ self.saveAsList = False
+
+class Combine(TokenConverter):
+ """
+ Converter to concatenate all matching tokens to a single string.
+ By default, the matching patterns must also be contiguous in the input string;
+ this can be disabled by specifying C{'adjacent=False'} in the constructor.
+
+ Example::
+ real = Word(nums) + '.' + Word(nums)
+ print(real.parseString('3.1416')) # -> ['3', '.', '1416']
+ # will also erroneously match the following
+ print(real.parseString('3. 1416')) # -> ['3', '.', '1416']
+
+ real = Combine(Word(nums) + '.' + Word(nums))
+ print(real.parseString('3.1416')) # -> ['3.1416']
+ # no match when there are internal spaces
+ print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...)
+ """
+ def __init__( self, expr, joinString="", adjacent=True ):
+ super(Combine,self).__init__( expr )
+ # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself
+ if adjacent:
+ self.leaveWhitespace()
+ self.adjacent = adjacent
+ self.skipWhitespace = True
+ self.joinString = joinString
+ self.callPreparse = True
+
+ def ignore( self, other ):
+ if self.adjacent:
+ ParserElement.ignore(self, other)
+ else:
+ super( Combine, self).ignore( other )
+ return self
+
+ def postParse( self, instring, loc, tokenlist ):
+ retToks = tokenlist.copy()
+ del retToks[:]
+ retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults)
+
+ if self.resultsName and retToks.haskeys():
+ return [ retToks ]
+ else:
+ return retToks
+
+class Group(TokenConverter):
+ """
+ Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions.
+
+ Example::
+ ident = Word(alphas)
+ num = Word(nums)
+ term = ident | num
+ func = ident + Optional(delimitedList(term))
+ print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100']
+
+ func = ident + Group(Optional(delimitedList(term)))
+ print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']]
+ """
+ def __init__( self, expr ):
+ super(Group,self).__init__( expr )
+ self.saveAsList = True
+
+ def postParse( self, instring, loc, tokenlist ):
+ return [ tokenlist ]
+
+class Dict(TokenConverter):
+ """
+ Converter to return a repetitive expression as a list, but also as a dictionary.
+ Each element can also be referenced using the first token in the expression as its key.
+ Useful for tabular report scraping when the first column can be used as a item key.
+
+ Example::
+ data_word = Word(alphas)
+ label = data_word + FollowedBy(':')
+ attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))
+
+ text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
+ attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+
+ # print attributes as plain groups
+ print(OneOrMore(attr_expr).parseString(text).dump())
+
+ # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names
+ result = Dict(OneOrMore(Group(attr_expr))).parseString(text)
+ print(result.dump())
+
+ # access named fields as dict entries, or output as dict
+ print(result['shape'])
+ print(result.asDict())
+ prints::
+ ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap']
+
+ [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
+ - color: light blue
+ - posn: upper left
+ - shape: SQUARE
+ - texture: burlap
+ SQUARE
+ {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'}
+ See more examples at L{ParseResults} of accessing fields by results name.
+ """
+ def __init__( self, expr ):
+ super(Dict,self).__init__( expr )
+ self.saveAsList = True
+
+ def postParse( self, instring, loc, tokenlist ):
+ for i,tok in enumerate(tokenlist):
+ if len(tok) == 0:
+ continue
+ ikey = tok[0]
+ if isinstance(ikey,int):
+ ikey = _ustr(tok[0]).strip()
+ if len(tok)==1:
+ tokenlist[ikey] = _ParseResultsWithOffset("",i)
+ elif len(tok)==2 and not isinstance(tok[1],ParseResults):
+ tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i)
+ else:
+ dictvalue = tok.copy() #ParseResults(i)
+ del dictvalue[0]
+ if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()):
+ tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i)
+ else:
+ tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i)
+
+ if self.resultsName:
+ return [ tokenlist ]
+ else:
+ return tokenlist
+
+
+class Suppress(TokenConverter):
+ """
+ Converter for ignoring the results of a parsed expression.
+
+ Example::
+ source = "a, b, c,d"
+ wd = Word(alphas)
+ wd_list1 = wd + ZeroOrMore(',' + wd)
+ print(wd_list1.parseString(source))
+
+ # often, delimiters that are useful during parsing are just in the
+ # way afterward - use Suppress to keep them out of the parsed output
+ wd_list2 = wd + ZeroOrMore(Suppress(',') + wd)
+ print(wd_list2.parseString(source))
+ prints::
+ ['a', ',', 'b', ',', 'c', ',', 'd']
+ ['a', 'b', 'c', 'd']
+ (See also L{delimitedList}.)
+ """
+ def postParse( self, instring, loc, tokenlist ):
+ return []
+
+ def suppress( self ):
+ return self
+
+
+class OnlyOnce(object):
+ """
+ Wrapper for parse actions, to ensure they are only called once.
+ """
+ def __init__(self, methodCall):
+ self.callable = _trim_arity(methodCall)
+ self.called = False
+ def __call__(self,s,l,t):
+ if not self.called:
+ results = self.callable(s,l,t)
+ self.called = True
+ return results
+ raise ParseException(s,l,"")
+ def reset(self):
+ self.called = False
+
+def traceParseAction(f):
+ """
+ Decorator for debugging parse actions.
+
+ When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".}
+ When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised.
+
+ Example::
+ wd = Word(alphas)
+
+ @traceParseAction
+ def remove_duplicate_chars(tokens):
+ return ''.join(sorted(set(''.join(tokens))))
+
+ wds = OneOrMore(wd).setParseAction(remove_duplicate_chars)
+ print(wds.parseString("slkdjs sld sldd sdlf sdljf"))
+ prints::
+ >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {}))
+ <<leaving remove_duplicate_chars (ret: 'dfjkls')
+ ['dfjkls']
+ """
+ f = _trim_arity(f)
+ def z(*paArgs):
+ thisFunc = f.__name__
+ s,l,t = paArgs[-3:]
+ if len(paArgs)>3:
+ thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc
+ sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) )
+ try:
+ ret = f(*paArgs)
+ except Exception as exc:
+ sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) )
+ raise
+ sys.stderr.write( "<<leaving %s (ret: %r)\n" % (thisFunc,ret) )
+ return ret
+ try:
+ z.__name__ = f.__name__
+ except AttributeError:
+ pass
+ return z
+
+#
+# global helpers
+#
+def delimitedList( expr, delim=",", combine=False ):
+ """
+ Helper to define a delimited list of expressions - the delimiter defaults to ','.
+ By default, the list elements and delimiters can have intervening whitespace, and
+ comments, but this can be overridden by passing C{combine=True} in the constructor.
+ If C{combine} is set to C{True}, the matching tokens are returned as a single token
+ string, with the delimiters included; otherwise, the matching tokens are returned
+ as a list of tokens, with the delimiters suppressed.
+
+ Example::
+ delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc']
+ delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE']
+ """
+ dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..."
+ if combine:
+ return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName)
+ else:
+ return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName)
+
+def countedArray( expr, intExpr=None ):
+ """
+ Helper to define a counted list of expressions.
+ This helper defines a pattern of the form::
+ integer expr expr expr...
+ where the leading integer tells how many expr expressions follow.
+ The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed.
+
+ If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value.
+
+ Example::
+ countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd']
+
+ # in this parser, the leading integer value is given in binary,
+ # '10' indicating that 2 values are in the array
+ binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2))
+ countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd']
+ """
+ arrayExpr = Forward()
+ def countFieldParseAction(s,l,t):
+ n = t[0]
+ arrayExpr << (n and Group(And([expr]*n)) or Group(empty))
+ return []
+ if intExpr is None:
+ intExpr = Word(nums).setParseAction(lambda t:int(t[0]))
+ else:
+ intExpr = intExpr.copy()
+ intExpr.setName("arrayLen")
+ intExpr.addParseAction(countFieldParseAction, callDuringTry=True)
+ return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...')
+
+def _flatten(L):
+ ret = []
+ for i in L:
+ if isinstance(i,list):
+ ret.extend(_flatten(i))
+ else:
+ ret.append(i)
+ return ret
+
+def matchPreviousLiteral(expr):
+ """
+ Helper to define an expression that is indirectly defined from
+ the tokens matched in a previous expression, that is, it looks
+ for a 'repeat' of a previous expression. For example::
+ first = Word(nums)
+ second = matchPreviousLiteral(first)
+ matchExpr = first + ":" + second
+ will match C{"1:1"}, but not C{"1:2"}. Because this matches a
+ previous literal, will also match the leading C{"1:1"} in C{"1:10"}.
+ If this is not desired, use C{matchPreviousExpr}.
+ Do I{not} use with packrat parsing enabled.
+ """
+ rep = Forward()
+ def copyTokenToRepeater(s,l,t):
+ if t:
+ if len(t) == 1:
+ rep << t[0]
+ else:
+ # flatten t tokens
+ tflat = _flatten(t.asList())
+ rep << And(Literal(tt) for tt in tflat)
+ else:
+ rep << Empty()
+ expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
+ rep.setName('(prev) ' + _ustr(expr))
+ return rep
+
+def matchPreviousExpr(expr):
+ """
+ Helper to define an expression that is indirectly defined from
+ the tokens matched in a previous expression, that is, it looks
+ for a 'repeat' of a previous expression. For example::
+ first = Word(nums)
+ second = matchPreviousExpr(first)
+ matchExpr = first + ":" + second
+ will match C{"1:1"}, but not C{"1:2"}. Because this matches by
+ expressions, will I{not} match the leading C{"1:1"} in C{"1:10"};
+ the expressions are evaluated first, and then compared, so
+ C{"1"} is compared with C{"10"}.
+ Do I{not} use with packrat parsing enabled.
+ """
+ rep = Forward()
+ e2 = expr.copy()
+ rep <<= e2
+ def copyTokenToRepeater(s,l,t):
+ matchTokens = _flatten(t.asList())
+ def mustMatchTheseTokens(s,l,t):
+ theseTokens = _flatten(t.asList())
+ if theseTokens != matchTokens:
+ raise ParseException("",0,"")
+ rep.setParseAction( mustMatchTheseTokens, callDuringTry=True )
+ expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
+ rep.setName('(prev) ' + _ustr(expr))
+ return rep
+
+def _escapeRegexRangeChars(s):
+ #~ escape these chars: ^-]
+ for c in r"\^-]":
+ s = s.replace(c,_bslash+c)
+ s = s.replace("\n",r"\n")
+ s = s.replace("\t",r"\t")
+ return _ustr(s)
+
+def oneOf( strs, caseless=False, useRegex=True ):
+ """
+ Helper to quickly define a set of alternative Literals, and makes sure to do
+ longest-first testing when there is a conflict, regardless of the input order,
+ but returns a C{L{MatchFirst}} for best performance.
+
+ Parameters:
+ - strs - a string of space-delimited literals, or a collection of string literals
+ - caseless - (default=C{False}) - treat all literals as caseless
+ - useRegex - (default=C{True}) - as an optimization, will generate a Regex
+ object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or
+ if creating a C{Regex} raises an exception)
+
+ Example::
+ comp_oper = oneOf("< = > <= >= !=")
+ var = Word(alphas)
+ number = Word(nums)
+ term = var | number
+ comparison_expr = term + comp_oper + term
+ print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12"))
+ prints::
+ [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']]
+ """
+ if caseless:
+ isequal = ( lambda a,b: a.upper() == b.upper() )
+ masks = ( lambda a,b: b.upper().startswith(a.upper()) )
+ parseElementClass = CaselessLiteral
+ else:
+ isequal = ( lambda a,b: a == b )
+ masks = ( lambda a,b: b.startswith(a) )
+ parseElementClass = Literal
+
+ symbols = []
+ if isinstance(strs,basestring):
+ symbols = strs.split()
+ elif isinstance(strs, Iterable):
+ symbols = list(strs)
+ else:
+ warnings.warn("Invalid argument to oneOf, expected string or iterable",
+ SyntaxWarning, stacklevel=2)
+ if not symbols:
+ return NoMatch()
+
+ i = 0
+ while i < len(symbols)-1:
+ cur = symbols[i]
+ for j,other in enumerate(symbols[i+1:]):
+ if ( isequal(other, cur) ):
+ del symbols[i+j+1]
+ break
+ elif ( masks(cur, other) ):
+ del symbols[i+j+1]
+ symbols.insert(i,other)
+ cur = other
+ break
+ else:
+ i += 1
+
+ if not caseless and useRegex:
+ #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] ))
+ try:
+ if len(symbols)==len("".join(symbols)):
+ return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols))
+ else:
+ return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols))
+ except Exception:
+ warnings.warn("Exception creating Regex for oneOf, building MatchFirst",
+ SyntaxWarning, stacklevel=2)
+
+
+ # last resort, just use MatchFirst
+ return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols))
+
+def dictOf( key, value ):
+ """
+ Helper to easily and clearly define a dictionary by specifying the respective patterns
+ for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens
+ in the proper order. The key pattern can include delimiting markers or punctuation,
+ as long as they are suppressed, thereby leaving the significant key text. The value
+ pattern can include named results, so that the C{Dict} results can include named token
+ fields.
+
+ Example::
+ text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
+ attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+ print(OneOrMore(attr_expr).parseString(text).dump())
+
+ attr_label = label
+ attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)
+
+ # similar to Dict, but simpler call format
+ result = dictOf(attr_label, attr_value).parseString(text)
+ print(result.dump())
+ print(result['shape'])
+ print(result.shape) # object attribute access works too
+ print(result.asDict())
+ prints::
+ [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
+ - color: light blue
+ - posn: upper left
+ - shape: SQUARE
+ - texture: burlap
+ SQUARE
+ SQUARE
+ {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'}
+ """
+ return Dict( ZeroOrMore( Group ( key + value ) ) )
+
+def originalTextFor(expr, asString=True):
+ """
+ Helper to return the original, untokenized text for a given expression. Useful to
+ restore the parsed fields of an HTML start tag into the raw tag text itself, or to
+ revert separate tokens with intervening whitespace back to the original matching
+ input text. By default, returns astring containing the original parsed text.
+
+ If the optional C{asString} argument is passed as C{False}, then the return value is a
+ C{L{ParseResults}} containing any results names that were originally matched, and a
+ single token containing the original matched text from the input string. So if
+ the expression passed to C{L{originalTextFor}} contains expressions with defined
+ results names, you must set C{asString} to C{False} if you want to preserve those
+ results name values.
+
+ Example::
+ src = "this is test <b> bold <i>text</i> </b> normal text "
+ for tag in ("b","i"):
+ opener,closer = makeHTMLTags(tag)
+ patt = originalTextFor(opener + SkipTo(closer) + closer)
+ print(patt.searchString(src)[0])
+ prints::
+ ['<b> bold <i>text</i> </b>']
+ ['<i>text</i>']
+ """
+ locMarker = Empty().setParseAction(lambda s,loc,t: loc)
+ endlocMarker = locMarker.copy()
+ endlocMarker.callPreparse = False
+ matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end")
+ if asString:
+ extractText = lambda s,l,t: s[t._original_start:t._original_end]
+ else:
+ def extractText(s,l,t):
+ t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]]
+ matchExpr.setParseAction(extractText)
+ matchExpr.ignoreExprs = expr.ignoreExprs
+ return matchExpr
+
+def ungroup(expr):
+ """
+ Helper to undo pyparsing's default grouping of And expressions, even
+ if all but one are non-empty.
+ """
+ return TokenConverter(expr).setParseAction(lambda t:t[0])
+
+def locatedExpr(expr):
+ """
+ Helper to decorate a returned token with its starting and ending locations in the input string.
+ This helper adds the following results names:
+ - locn_start = location where matched expression begins
+ - locn_end = location where matched expression ends
+ - value = the actual parsed results
+
+ Be careful if the input text contains C{<TAB>} characters, you may want to call
+ C{L{ParserElement.parseWithTabs}}
+
+ Example::
+ wd = Word(alphas)
+ for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"):
+ print(match)
+ prints::
+ [[0, 'ljsdf', 5]]
+ [[8, 'lksdjjf', 15]]
+ [[18, 'lkkjj', 23]]
+ """
+ locator = Empty().setParseAction(lambda s,l,t: l)
+ return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end"))
+
+
+# convenience constants for positional expressions
+empty = Empty().setName("empty")
+lineStart = LineStart().setName("lineStart")
+lineEnd = LineEnd().setName("lineEnd")
+stringStart = StringStart().setName("stringStart")
+stringEnd = StringEnd().setName("stringEnd")
+
+_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1])
+_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16)))
+_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8)))
+_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1)
+_charRange = Group(_singleChar + Suppress("-") + _singleChar)
+_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]"
+
+def srange(s):
+ r"""
+ Helper to easily define string ranges for use in Word construction. Borrows
+ syntax from regexp '[]' string range definitions::
+ srange("[0-9]") -> "0123456789"
+ srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz"
+ srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"
+ The input string must be enclosed in []'s, and the returned string is the expanded
+ character set joined into a single string.
+ The values enclosed in the []'s may be:
+ - a single character
+ - an escaped character with a leading backslash (such as C{\-} or C{\]})
+ - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character)
+ (C{\0x##} is also supported for backwards compatibility)
+ - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character)
+ - a range of any of the above, separated by a dash (C{'a-z'}, etc.)
+ - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.)
+ """
+ _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1))
+ try:
+ return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body)
+ except Exception:
+ return ""
+
+def matchOnlyAtCol(n):
+ """
+ Helper method for defining parse actions that require matching at a specific
+ column in the input text.
+ """
+ def verifyCol(strg,locn,toks):
+ if col(locn,strg) != n:
+ raise ParseException(strg,locn,"matched token not at column %d" % n)
+ return verifyCol
+
+def replaceWith(replStr):
+ """
+ Helper method for common parse actions that simply return a literal value. Especially
+ useful when used with C{L{transformString<ParserElement.transformString>}()}.
+
+ Example::
+ num = Word(nums).setParseAction(lambda toks: int(toks[0]))
+ na = oneOf("N/A NA").setParseAction(replaceWith(math.nan))
+ term = na | num
+
+ OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234]
+ """
+ return lambda s,l,t: [replStr]
+
+def removeQuotes(s,l,t):
+ """
+ Helper parse action for removing quotation marks from parsed quoted strings.
+
+ Example::
+ # by default, quotation marks are included in parsed results
+ quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"]
+
+ # use removeQuotes to strip quotation marks from parsed results
+ quotedString.setParseAction(removeQuotes)
+ quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"]
+ """
+ return t[0][1:-1]
+
+def tokenMap(func, *args):
+ """
+ Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional
+ args are passed, they are forwarded to the given function as additional arguments after
+ the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the
+ parsed data to an integer using base 16.
+
+ Example (compare the last to example in L{ParserElement.transformString}::
+ hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16))
+ hex_ints.runTests('''
+ 00 11 22 aa FF 0a 0d 1a
+ ''')
+
+ upperword = Word(alphas).setParseAction(tokenMap(str.upper))
+ OneOrMore(upperword).runTests('''
+ my kingdom for a horse
+ ''')
+
+ wd = Word(alphas).setParseAction(tokenMap(str.title))
+ OneOrMore(wd).setParseAction(' '.join).runTests('''
+ now is the winter of our discontent made glorious summer by this sun of york
+ ''')
+ prints::
+ 00 11 22 aa FF 0a 0d 1a
+ [0, 17, 34, 170, 255, 10, 13, 26]
+
+ my kingdom for a horse
+ ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE']
+
+ now is the winter of our discontent made glorious summer by this sun of york
+ ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York']
+ """
+ def pa(s,l,t):
+ return [func(tokn, *args) for tokn in t]
+
+ try:
+ func_name = getattr(func, '__name__',
+ getattr(func, '__class__').__name__)
+ except Exception:
+ func_name = str(func)
+ pa.__name__ = func_name
+
+ return pa
+
+upcaseTokens = tokenMap(lambda t: _ustr(t).upper())
+"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}"""
+
+downcaseTokens = tokenMap(lambda t: _ustr(t).lower())
+"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}"""
+
+def _makeTags(tagStr, xml):
+ """Internal helper to construct opening and closing tag expressions, given a tag name"""
+ if isinstance(tagStr,basestring):
+ resname = tagStr
+ tagStr = Keyword(tagStr, caseless=not xml)
+ else:
+ resname = tagStr.name
+
+ tagAttrName = Word(alphas,alphanums+"_-:")
+ if (xml):
+ tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes )
+ openTag = Suppress("<") + tagStr("tag") + \
+ Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \
+ Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
+ else:
+ printablesLessRAbrack = "".join(c for c in printables if c not in ">")
+ tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack)
+ openTag = Suppress("<") + tagStr("tag") + \
+ Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \
+ Optional( Suppress("=") + tagAttrValue ) ))) + \
+ Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
+ closeTag = Combine(_L("</") + tagStr + ">")
+
+ openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname)
+ closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % resname)
+ openTag.tag = resname
+ closeTag.tag = resname
+ return openTag, closeTag
+
+def makeHTMLTags(tagStr):
+ """
+ Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches
+ tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values.
+
+ Example::
+ text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>'
+ # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple
+ a,a_end = makeHTMLTags("A")
+ link_expr = a + SkipTo(a_end)("link_text") + a_end
+
+ for link in link_expr.searchString(text):
+ # attributes in the <A> tag (like "href" shown here) are also accessible as named results
+ print(link.link_text, '->', link.href)
+ prints::
+ pyparsing -> http://pyparsing.wikispaces.com
+ """
+ return _makeTags( tagStr, False )
+
+def makeXMLTags(tagStr):
+ """
+ Helper to construct opening and closing tag expressions for XML, given a tag name. Matches
+ tags only in the given upper/lower case.
+
+ Example: similar to L{makeHTMLTags}
+ """
+ return _makeTags( tagStr, True )
+
+def withAttribute(*args,**attrDict):
+ """
+ Helper to create a validating parse action to be used with start tags created
+ with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag
+ with a required attribute value, to avoid false matches on common tags such as
+ C{<TD>} or C{<DIV>}.
+
+ Call C{withAttribute} with a series of attribute names and values. Specify the list
+ of filter attributes names and values as:
+ - keyword arguments, as in C{(align="right")}, or
+ - as an explicit dict with C{**} operator, when an attribute name is also a Python
+ reserved word, as in C{**{"class":"Customer", "align":"right"}}
+ - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") )
+ For attribute names with a namespace prefix, you must use the second form. Attribute
+ names are matched insensitive to upper/lower case.
+
+ If just testing for C{class} (with or without a namespace), use C{L{withClass}}.
+
+ To verify that the attribute exists, but without specifying a value, pass
+ C{withAttribute.ANY_VALUE} as the value.
+
+ Example::
+ html = '''
+ <div>
+ Some text
+ <div type="grid">1 4 0 1 0</div>
+ <div type="graph">1,3 2,3 1,1</div>
+ <div>this has no type</div>
+ </div>
+
+ '''
+ div,div_end = makeHTMLTags("div")
+
+ # only match div tag having a type attribute with value "grid"
+ div_grid = div().setParseAction(withAttribute(type="grid"))
+ grid_expr = div_grid + SkipTo(div | div_end)("body")
+ for grid_header in grid_expr.searchString(html):
+ print(grid_header.body)
+
+ # construct a match with any div tag having a type attribute, regardless of the value
+ div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE))
+ div_expr = div_any_type + SkipTo(div | div_end)("body")
+ for div_header in div_expr.searchString(html):
+ print(div_header.body)
+ prints::
+ 1 4 0 1 0
+
+ 1 4 0 1 0
+ 1,3 2,3 1,1
+ """
+ if args:
+ attrs = args[:]
+ else:
+ attrs = attrDict.items()
+ attrs = [(k,v) for k,v in attrs]
+ def pa(s,l,tokens):
+ for attrName,attrValue in attrs:
+ if attrName not in tokens:
+ raise ParseException(s,l,"no matching attribute " + attrName)
+ if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue:
+ raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" %
+ (attrName, tokens[attrName], attrValue))
+ return pa
+withAttribute.ANY_VALUE = object()
+
+def withClass(classname, namespace=''):
+ """
+ Simplified version of C{L{withAttribute}} when matching on a div class - made
+ difficult because C{class} is a reserved word in Python.
+
+ Example::
+ html = '''
+ <div>
+ Some text
+ <div class="grid">1 4 0 1 0</div>
+ <div class="graph">1,3 2,3 1,1</div>
+ <div>this <div> has no class</div>
+ </div>
+
+ '''
+ div,div_end = makeHTMLTags("div")
+ div_grid = div().setParseAction(withClass("grid"))
+
+ grid_expr = div_grid + SkipTo(div | div_end)("body")
+ for grid_header in grid_expr.searchString(html):
+ print(grid_header.body)
+
+ div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE))
+ div_expr = div_any_type + SkipTo(div | div_end)("body")
+ for div_header in div_expr.searchString(html):
+ print(div_header.body)
+ prints::
+ 1 4 0 1 0
+
+ 1 4 0 1 0
+ 1,3 2,3 1,1
+ """
+ classattr = "%s:class" % namespace if namespace else "class"
+ return withAttribute(**{classattr : classname})
+
+opAssoc = _Constants()
+opAssoc.LEFT = object()
+opAssoc.RIGHT = object()
+
+def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ):
+ """
+ Helper method for constructing grammars of expressions made up of
+ operators working in a precedence hierarchy. Operators may be unary or
+ binary, left- or right-associative. Parse actions can also be attached
+ to operator expressions. The generated parser will also recognize the use
+ of parentheses to override operator precedences (see example below).
+
+ Note: if you define a deep operator list, you may see performance issues
+ when using infixNotation. See L{ParserElement.enablePackrat} for a
+ mechanism to potentially improve your parser performance.
+
+ Parameters:
+ - baseExpr - expression representing the most basic element for the nested
+ - opList - list of tuples, one for each operator precedence level in the
+ expression grammar; each tuple is of the form
+ (opExpr, numTerms, rightLeftAssoc, parseAction), where:
+ - opExpr is the pyparsing expression for the operator;
+ may also be a string, which will be converted to a Literal;
+ if numTerms is 3, opExpr is a tuple of two expressions, for the
+ two operators separating the 3 terms
+ - numTerms is the number of terms for this operator (must
+ be 1, 2, or 3)
+ - rightLeftAssoc is the indicator whether the operator is
+ right or left associative, using the pyparsing-defined
+ constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}.
+ - parseAction is the parse action to be associated with
+ expressions matching this operator expression (the
+ parse action tuple member may be omitted); if the parse action
+ is passed a tuple or list of functions, this is equivalent to
+ calling C{setParseAction(*fn)} (L{ParserElement.setParseAction})
+ - lpar - expression for matching left-parentheses (default=C{Suppress('(')})
+ - rpar - expression for matching right-parentheses (default=C{Suppress(')')})
+
+ Example::
+ # simple example of four-function arithmetic with ints and variable names
+ integer = pyparsing_common.signed_integer
+ varname = pyparsing_common.identifier
+
+ arith_expr = infixNotation(integer | varname,
+ [
+ ('-', 1, opAssoc.RIGHT),
+ (oneOf('* /'), 2, opAssoc.LEFT),
+ (oneOf('+ -'), 2, opAssoc.LEFT),
+ ])
+
+ arith_expr.runTests('''
+ 5+3*6
+ (5+3)*6
+ -2--11
+ ''', fullDump=False)
+ prints::
+ 5+3*6
+ [[5, '+', [3, '*', 6]]]
+
+ (5+3)*6
+ [[[5, '+', 3], '*', 6]]
+
+ -2--11
+ [[['-', 2], '-', ['-', 11]]]
+ """
+ ret = Forward()
+ lastExpr = baseExpr | ( lpar + ret + rpar )
+ for i,operDef in enumerate(opList):
+ opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4]
+ termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr
+ if arity == 3:
+ if opExpr is None or len(opExpr) != 2:
+ raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions")
+ opExpr1, opExpr2 = opExpr
+ thisExpr = Forward().setName(termName)
+ if rightLeftAssoc == opAssoc.LEFT:
+ if arity == 1:
+ matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) )
+ elif arity == 2:
+ if opExpr is not None:
+ matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) )
+ else:
+ matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) )
+ elif arity == 3:
+ matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \
+ Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr )
+ else:
+ raise ValueError("operator must be unary (1), binary (2), or ternary (3)")
+ elif rightLeftAssoc == opAssoc.RIGHT:
+ if arity == 1:
+ # try to avoid LR with this extra test
+ if not isinstance(opExpr, Optional):
+ opExpr = Optional(opExpr)
+ matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr )
+ elif arity == 2:
+ if opExpr is not None:
+ matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) )
+ else:
+ matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) )
+ elif arity == 3:
+ matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \
+ Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr )
+ else:
+ raise ValueError("operator must be unary (1), binary (2), or ternary (3)")
+ else:
+ raise ValueError("operator must indicate right or left associativity")
+ if pa:
+ if isinstance(pa, (tuple, list)):
+ matchExpr.setParseAction(*pa)
+ else:
+ matchExpr.setParseAction(pa)
+ thisExpr <<= ( matchExpr.setName(termName) | lastExpr )
+ lastExpr = thisExpr
+ ret <<= lastExpr
+ return ret
+
+operatorPrecedence = infixNotation
+"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release."""
+
+dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes")
+sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes")
+quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'|
+ Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes")
+unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal")
+
+def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()):
+ """
+ Helper method for defining nested lists enclosed in opening and closing
+ delimiters ("(" and ")" are the default).
+
+ Parameters:
+ - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression
+ - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression
+ - content - expression for items within the nested lists (default=C{None})
+ - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString})
+
+ If an expression is not provided for the content argument, the nested
+ expression will capture all whitespace-delimited content between delimiters
+ as a list of separate values.
+
+ Use the C{ignoreExpr} argument to define expressions that may contain
+ opening or closing characters that should not be treated as opening
+ or closing characters for nesting, such as quotedString or a comment
+ expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}.
+ The default is L{quotedString}, but if no expressions are to be ignored,
+ then pass C{None} for this argument.
+
+ Example::
+ data_type = oneOf("void int short long char float double")
+ decl_data_type = Combine(data_type + Optional(Word('*')))
+ ident = Word(alphas+'_', alphanums+'_')
+ number = pyparsing_common.number
+ arg = Group(decl_data_type + ident)
+ LPAR,RPAR = map(Suppress, "()")
+
+ code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment))
+
+ c_function = (decl_data_type("type")
+ + ident("name")
+ + LPAR + Optional(delimitedList(arg), [])("args") + RPAR
+ + code_body("body"))
+ c_function.ignore(cStyleComment)
+
+ source_code = '''
+ int is_odd(int x) {
+ return (x%2);
+ }
+
+ int dec_to_hex(char hchar) {
+ if (hchar >= '0' && hchar <= '9') {
+ return (ord(hchar)-ord('0'));
+ } else {
+ return (10+ord(hchar)-ord('A'));
+ }
+ }
+ '''
+ for func in c_function.searchString(source_code):
+ print("%(name)s (%(type)s) args: %(args)s" % func)
+
+ prints::
+ is_odd (int) args: [['int', 'x']]
+ dec_to_hex (int) args: [['char', 'hchar']]
+ """
+ if opener == closer:
+ raise ValueError("opening and closing strings cannot be the same")
+ if content is None:
+ if isinstance(opener,basestring) and isinstance(closer,basestring):
+ if len(opener) == 1 and len(closer)==1:
+ if ignoreExpr is not None:
+ content = (Combine(OneOrMore(~ignoreExpr +
+ CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1))
+ ).setParseAction(lambda t:t[0].strip()))
+ else:
+ content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS
+ ).setParseAction(lambda t:t[0].strip()))
+ else:
+ if ignoreExpr is not None:
+ content = (Combine(OneOrMore(~ignoreExpr +
+ ~Literal(opener) + ~Literal(closer) +
+ CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1))
+ ).setParseAction(lambda t:t[0].strip()))
+ else:
+ content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) +
+ CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1))
+ ).setParseAction(lambda t:t[0].strip()))
+ else:
+ raise ValueError("opening and closing arguments must be strings if no content expression is given")
+ ret = Forward()
+ if ignoreExpr is not None:
+ ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) )
+ else:
+ ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) )
+ ret.setName('nested %s%s expression' % (opener,closer))
+ return ret
+
+def indentedBlock(blockStatementExpr, indentStack, indent=True):
+ """
+ Helper method for defining space-delimited indentation blocks, such as
+ those used to define block statements in Python source code.
+
+ Parameters:
+ - blockStatementExpr - expression defining syntax of statement that
+ is repeated within the indented block
+ - indentStack - list created by caller to manage indentation stack
+ (multiple statementWithIndentedBlock expressions within a single grammar
+ should share a common indentStack)
+ - indent - boolean indicating whether block must be indented beyond the
+ the current level; set to False for block of left-most statements
+ (default=C{True})
+
+ A valid block must contain at least one C{blockStatement}.
+
+ Example::
+ data = '''
+ def A(z):
+ A1
+ B = 100
+ G = A2
+ A2
+ A3
+ B
+ def BB(a,b,c):
+ BB1
+ def BBA():
+ bba1
+ bba2
+ bba3
+ C
+ D
+ def spam(x,y):
+ def eggs(z):
+ pass
+ '''
+
+
+ indentStack = [1]
+ stmt = Forward()
+
+ identifier = Word(alphas, alphanums)
+ funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":")
+ func_body = indentedBlock(stmt, indentStack)
+ funcDef = Group( funcDecl + func_body )
+
+ rvalue = Forward()
+ funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")")
+ rvalue << (funcCall | identifier | Word(nums))
+ assignment = Group(identifier + "=" + rvalue)
+ stmt << ( funcDef | assignment | identifier )
+
+ module_body = OneOrMore(stmt)
+
+ parseTree = module_body.parseString(data)
+ parseTree.pprint()
+ prints::
+ [['def',
+ 'A',
+ ['(', 'z', ')'],
+ ':',
+ [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]],
+ 'B',
+ ['def',
+ 'BB',
+ ['(', 'a', 'b', 'c', ')'],
+ ':',
+ [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]],
+ 'C',
+ 'D',
+ ['def',
+ 'spam',
+ ['(', 'x', 'y', ')'],
+ ':',
+ [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]]
+ """
+ def checkPeerIndent(s,l,t):
+ if l >= len(s): return
+ curCol = col(l,s)
+ if curCol != indentStack[-1]:
+ if curCol > indentStack[-1]:
+ raise ParseFatalException(s,l,"illegal nesting")
+ raise ParseException(s,l,"not a peer entry")
+
+ def checkSubIndent(s,l,t):
+ curCol = col(l,s)
+ if curCol > indentStack[-1]:
+ indentStack.append( curCol )
+ else:
+ raise ParseException(s,l,"not a subentry")
+
+ def checkUnindent(s,l,t):
+ if l >= len(s): return
+ curCol = col(l,s)
+ if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]):
+ raise ParseException(s,l,"not an unindent")
+ indentStack.pop()
+
+ NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress())
+ INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT')
+ PEER = Empty().setParseAction(checkPeerIndent).setName('')
+ UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT')
+ if indent:
+ smExpr = Group( Optional(NL) +
+ #~ FollowedBy(blockStatementExpr) +
+ INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT)
+ else:
+ smExpr = Group( Optional(NL) +
+ (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) )
+ blockStatementExpr.ignore(_bslash + LineEnd())
+ return smExpr.setName('indented block')
+
+alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]")
+punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]")
+
+anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag'))
+_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\''))
+commonHTMLEntity = Regex('&(?P<entity>' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity")
+def replaceHTMLEntity(t):
+ """Helper parser action to replace common HTML entities with their special characters"""
+ return _htmlEntityMap.get(t.entity)
+
+# it's easy to get these comment structures wrong - they're very common, so may as well make them available
+cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment")
+"Comment of the form C{/* ... */}"
+
+htmlComment = Regex(r"<!--[\s\S]*?-->").setName("HTML comment")
+"Comment of the form C{<!-- ... -->}"
+
+restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line")
+dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment")
+"Comment of the form C{// ... (to end of line)}"
+
+cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment")
+"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}"
+
+javaStyleComment = cppStyleComment
+"Same as C{L{cppStyleComment}}"
+
+pythonStyleComment = Regex(r"#.*").setName("Python style comment")
+"Comment of the form C{# ... (to end of line)}"
+
+_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') +
+ Optional( Word(" \t") +
+ ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem")
+commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList")
+"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas.
+ This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}."""
+
+# some other useful expressions - using lower-case class name since we are really using this as a namespace
+class pyparsing_common:
+ """
+ Here are some common low-level expressions that may be useful in jump-starting parser development:
+ - numeric forms (L{integers<integer>}, L{reals<real>}, L{scientific notation<sci_real>})
+ - common L{programming identifiers<identifier>}
+ - network addresses (L{MAC<mac_address>}, L{IPv4<ipv4_address>}, L{IPv6<ipv6_address>})
+ - ISO8601 L{dates<iso8601_date>} and L{datetime<iso8601_datetime>}
+ - L{UUID<uuid>}
+ - L{comma-separated list<comma_separated_list>}
+ Parse actions:
+ - C{L{convertToInteger}}
+ - C{L{convertToFloat}}
+ - C{L{convertToDate}}
+ - C{L{convertToDatetime}}
+ - C{L{stripHTMLTags}}
+ - C{L{upcaseTokens}}
+ - C{L{downcaseTokens}}
+
+ Example::
+ pyparsing_common.number.runTests('''
+ # any int or real number, returned as the appropriate type
+ 100
+ -100
+ +100
+ 3.14159
+ 6.02e23
+ 1e-12
+ ''')
+
+ pyparsing_common.fnumber.runTests('''
+ # any int or real number, returned as float
+ 100
+ -100
+ +100
+ 3.14159
+ 6.02e23
+ 1e-12
+ ''')
+
+ pyparsing_common.hex_integer.runTests('''
+ # hex numbers
+ 100
+ FF
+ ''')
+
+ pyparsing_common.fraction.runTests('''
+ # fractions
+ 1/2
+ -3/4
+ ''')
+
+ pyparsing_common.mixed_integer.runTests('''
+ # mixed fractions
+ 1
+ 1/2
+ -3/4
+ 1-3/4
+ ''')
+
+ import uuid
+ pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID))
+ pyparsing_common.uuid.runTests('''
+ # uuid
+ 12345678-1234-5678-1234-567812345678
+ ''')
+ prints::
+ # any int or real number, returned as the appropriate type
+ 100
+ [100]
+
+ -100
+ [-100]
+
+ +100
+ [100]
+
+ 3.14159
+ [3.14159]
+
+ 6.02e23
+ [6.02e+23]
+
+ 1e-12
+ [1e-12]
+
+ # any int or real number, returned as float
+ 100
+ [100.0]
+
+ -100
+ [-100.0]
+
+ +100
+ [100.0]
+
+ 3.14159
+ [3.14159]
+
+ 6.02e23
+ [6.02e+23]
+
+ 1e-12
+ [1e-12]
+
+ # hex numbers
+ 100
+ [256]
+
+ FF
+ [255]
+
+ # fractions
+ 1/2
+ [0.5]
+
+ -3/4
+ [-0.75]
+
+ # mixed fractions
+ 1
+ [1]
+
+ 1/2
+ [0.5]
+
+ -3/4
+ [-0.75]
+
+ 1-3/4
+ [1.75]
+
+ # uuid
+ 12345678-1234-5678-1234-567812345678
+ [UUID('12345678-1234-5678-1234-567812345678')]
+ """
+
+ convertToInteger = tokenMap(int)
+ """
+ Parse action for converting parsed integers to Python int
+ """
+
+ convertToFloat = tokenMap(float)
+ """
+ Parse action for converting parsed numbers to Python float
+ """
+
+ integer = Word(nums).setName("integer").setParseAction(convertToInteger)
+ """expression that parses an unsigned integer, returns an int"""
+
+ hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16))
+ """expression that parses a hexadecimal integer, returns an int"""
+
+ signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger)
+ """expression that parses an integer with optional leading sign, returns an int"""
+
+ fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction")
+ """fractional expression of an integer divided by an integer, returns a float"""
+ fraction.addParseAction(lambda t: t[0]/t[-1])
+
+ mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction")
+ """mixed integer of the form 'integer - fraction', with optional leading integer, returns float"""
+ mixed_integer.addParseAction(sum)
+
+ real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat)
+ """expression that parses a floating point number and returns a float"""
+
+ sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat)
+ """expression that parses a floating point number with optional scientific notation and returns a float"""
+
+ # streamlining this expression makes the docs nicer-looking
+ number = (sci_real | real | signed_integer).streamline()
+ """any numeric expression, returns the corresponding Python type"""
+
+ fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat)
+ """any int or real number, returned as float"""
+
+ identifier = Word(alphas+'_', alphanums+'_').setName("identifier")
+ """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')"""
+
+ ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address")
+ "IPv4 address (C{0.0.0.0 - 255.255.255.255})"
+
+ _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer")
+ _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address")
+ _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address")
+ _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8)
+ _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address")
+ ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address")
+ "IPv6 address (long, short, or mixed form)"
+
+ mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address")
+ "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)"
+
+ @staticmethod
+ def convertToDate(fmt="%Y-%m-%d"):
+ """
+ Helper to create a parse action for converting parsed date string to Python datetime.date
+
+ Params -
+ - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"})
+
+ Example::
+ date_expr = pyparsing_common.iso8601_date.copy()
+ date_expr.setParseAction(pyparsing_common.convertToDate())
+ print(date_expr.parseString("1999-12-31"))
+ prints::
+ [datetime.date(1999, 12, 31)]
+ """
+ def cvt_fn(s,l,t):
+ try:
+ return datetime.strptime(t[0], fmt).date()
+ except ValueError as ve:
+ raise ParseException(s, l, str(ve))
+ return cvt_fn
+
+ @staticmethod
+ def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"):
+ """
+ Helper to create a parse action for converting parsed datetime string to Python datetime.datetime
+
+ Params -
+ - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"})
+
+ Example::
+ dt_expr = pyparsing_common.iso8601_datetime.copy()
+ dt_expr.setParseAction(pyparsing_common.convertToDatetime())
+ print(dt_expr.parseString("1999-12-31T23:59:59.999"))
+ prints::
+ [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)]
+ """
+ def cvt_fn(s,l,t):
+ try:
+ return datetime.strptime(t[0], fmt)
+ except ValueError as ve:
+ raise ParseException(s, l, str(ve))
+ return cvt_fn
+
+ iso8601_date = Regex(r'(?P<year>\d{4})(?:-(?P<month>\d\d)(?:-(?P<day>\d\d))?)?').setName("ISO8601 date")
+ "ISO8601 date (C{yyyy-mm-dd})"
+
+ iso8601_datetime = Regex(r'(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)[T ](?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d(\.\d*)?)?)?(?P<tz>Z|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime")
+ "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}"
+
+ uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID")
+ "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})"
+
+ _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress()
+ @staticmethod
+ def stripHTMLTags(s, l, tokens):
+ """
+ Parse action to remove HTML tags from web page HTML source
+
+ Example::
+ # strip HTML links from normal text
+ text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>'
+ td,td_end = makeHTMLTags("TD")
+ table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end
+
+ print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page'
+ """
+ return pyparsing_common._html_stripper.transformString(tokens[0])
+
+ _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',')
+ + Optional( White(" \t") ) ) ).streamline().setName("commaItem")
+ comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list")
+ """Predefined expression of 1 or more printable words or quoted strings, separated by commas."""
+
+ upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper()))
+ """Parse action to convert tokens to upper case."""
+
+ downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower()))
+ """Parse action to convert tokens to lower case."""
+
+
+if __name__ == "__main__":
+
+ selectToken = CaselessLiteral("select")
+ fromToken = CaselessLiteral("from")
+
+ ident = Word(alphas, alphanums + "_$")
+
+ columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens)
+ columnNameList = Group(delimitedList(columnName)).setName("columns")
+ columnSpec = ('*' | columnNameList)
+
+ tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens)
+ tableNameList = Group(delimitedList(tableName)).setName("tables")
+
+ simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables")
+
+ # demo runTests method, including embedded comments in test string
+ simpleSQL.runTests("""
+ # '*' as column list and dotted table name
+ select * from SYS.XYZZY
+
+ # caseless match on "SELECT", and casts back to "select"
+ SELECT * from XYZZY, ABC
+
+ # list of column names, and mixed case SELECT keyword
+ Select AA,BB,CC from Sys.dual
+
+ # multiple tables
+ Select A, B, C from Sys.dual, Table2
+
+ # invalid SELECT keyword - should fail
+ Xelect A, B, C from Sys.dual
+
+ # incomplete command - should fail
+ Select
+
+ # invalid column name - should fail
+ Select ^^^ frox Sys.dual
+
+ """)
+
+ pyparsing_common.number.runTests("""
+ 100
+ -100
+ +100
+ 3.14159
+ 6.02e23
+ 1e-12
+ """)
+
+ # any int or real number, returned as float
+ pyparsing_common.fnumber.runTests("""
+ 100
+ -100
+ +100
+ 3.14159
+ 6.02e23
+ 1e-12
+ """)
+
+ pyparsing_common.hex_integer.runTests("""
+ 100
+ FF
+ """)
+
+ import uuid
+ pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID))
+ pyparsing_common.uuid.runTests("""
+ 12345678-1234-5678-1234-567812345678
+ """)
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/six.py b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/six.py
new file mode 100644
index 0000000000000000000000000000000000000000..190c0239cd7d7af82a6e0cbc8d68053fa2e3dfaf
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources/_vendor/six.py
@@ -0,0 +1,868 @@
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+# Copyright (c) 2010-2015 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+from __future__ import absolute_import
+
+import functools
+import itertools
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson <benjamin@python.org>"
+__version__ = "1.10.0"
+
+
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+PY34 = sys.version_info[0:2] >= (3, 4)
+
+if PY3:
+ string_types = str,
+ integer_types = int,
+ class_types = type,
+ text_type = str
+ binary_type = bytes
+
+ MAXSIZE = sys.maxsize
+else:
+ string_types = basestring,
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+ if sys.platform.startswith("java"):
+ # Jython always uses 32 bits.
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+ class X(object):
+
+ def __len__(self):
+ return 1 << 31
+ try:
+ len(X())
+ except OverflowError:
+ # 32-bit
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # 64-bit
+ MAXSIZE = int((1 << 63) - 1)
+ del X
+
+
+def _add_doc(func, doc):
+ """Add documentation to a function."""
+ func.__doc__ = doc
+
+
+def _import_module(name):
+ """Import module, returning the module after the last dot."""
+ __import__(name)
+ return sys.modules[name]
+
+
+class _LazyDescr(object):
+
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, tp):
+ result = self._resolve()
+ setattr(obj, self.name, result) # Invokes __set__.
+ try:
+ # This is a bit ugly, but it avoids running this again by
+ # removing this descriptor.
+ delattr(obj.__class__, self.name)
+ except AttributeError:
+ pass
+ return result
+
+
+class MovedModule(_LazyDescr):
+
+ def __init__(self, name, old, new=None):
+ super(MovedModule, self).__init__(name)
+ if PY3:
+ if new is None:
+ new = name
+ self.mod = new
+ else:
+ self.mod = old
+
+ def _resolve(self):
+ return _import_module(self.mod)
+
+ def __getattr__(self, attr):
+ _module = self._resolve()
+ value = getattr(_module, attr)
+ setattr(self, attr, value)
+ return value
+
+
+class _LazyModule(types.ModuleType):
+
+ def __init__(self, name):
+ super(_LazyModule, self).__init__(name)
+ self.__doc__ = self.__class__.__doc__
+
+ def __dir__(self):
+ attrs = ["__doc__", "__name__"]
+ attrs += [attr.name for attr in self._moved_attributes]
+ return attrs
+
+ # Subclasses should override this
+ _moved_attributes = []
+
+
+class MovedAttribute(_LazyDescr):
+
+ def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+ super(MovedAttribute, self).__init__(name)
+ if PY3:
+ if new_mod is None:
+ new_mod = name
+ self.mod = new_mod
+ if new_attr is None:
+ if old_attr is None:
+ new_attr = name
+ else:
+ new_attr = old_attr
+ self.attr = new_attr
+ else:
+ self.mod = old_mod
+ if old_attr is None:
+ old_attr = name
+ self.attr = old_attr
+
+ def _resolve(self):
+ module = _import_module(self.mod)
+ return getattr(module, self.attr)
+
+
+class _SixMetaPathImporter(object):
+
+ """
+ A meta path importer to import six.moves and its submodules.
+
+ This class implements a PEP302 finder and loader. It should be compatible
+ with Python 2.5 and all existing versions of Python3
+ """
+
+ def __init__(self, six_module_name):
+ self.name = six_module_name
+ self.known_modules = {}
+
+ def _add_module(self, mod, *fullnames):
+ for fullname in fullnames:
+ self.known_modules[self.name + "." + fullname] = mod
+
+ def _get_module(self, fullname):
+ return self.known_modules[self.name + "." + fullname]
+
+ def find_module(self, fullname, path=None):
+ if fullname in self.known_modules:
+ return self
+ return None
+
+ def __get_module(self, fullname):
+ try:
+ return self.known_modules[fullname]
+ except KeyError:
+ raise ImportError("This loader does not know module " + fullname)
+
+ def load_module(self, fullname):
+ try:
+ # in case of a reload
+ return sys.modules[fullname]
+ except KeyError:
+ pass
+ mod = self.__get_module(fullname)
+ if isinstance(mod, MovedModule):
+ mod = mod._resolve()
+ else:
+ mod.__loader__ = self
+ sys.modules[fullname] = mod
+ return mod
+
+ def is_package(self, fullname):
+ """
+ Return true, if the named module is a package.
+
+ We need this method to get correct spec objects with
+ Python 3.4 (see PEP451)
+ """
+ return hasattr(self.__get_module(fullname), "__path__")
+
+ def get_code(self, fullname):
+ """Return None
+
+ Required, if is_package is implemented"""
+ self.__get_module(fullname) # eventually raises ImportError
+ return None
+ get_source = get_code # same as get_code
+
+_importer = _SixMetaPathImporter(__name__)
+
+
+class _MovedItems(_LazyModule):
+
+ """Lazy loading of moved objects"""
+ __path__ = [] # mark as package
+
+
+_moved_attributes = [
+ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+ MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
+ MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+ MovedAttribute("intern", "__builtin__", "sys"),
+ MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
+ MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
+ MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
+ MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
+ MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("UserDict", "UserDict", "collections"),
+ MovedAttribute("UserList", "UserList", "collections"),
+ MovedAttribute("UserString", "UserString", "collections"),
+ MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+ MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
+ MovedModule("builtins", "__builtin__"),
+ MovedModule("configparser", "ConfigParser"),
+ MovedModule("copyreg", "copy_reg"),
+ MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+ MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
+ MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+ MovedModule("http_cookies", "Cookie", "http.cookies"),
+ MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+ MovedModule("html_parser", "HTMLParser", "html.parser"),
+ MovedModule("http_client", "httplib", "http.client"),
+ MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+ MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
+ MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
+ MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+ MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+ MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+ MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+ MovedModule("cPickle", "cPickle", "pickle"),
+ MovedModule("queue", "Queue"),
+ MovedModule("reprlib", "repr"),
+ MovedModule("socketserver", "SocketServer"),
+ MovedModule("_thread", "thread", "_thread"),
+ MovedModule("tkinter", "Tkinter"),
+ MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+ MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+ MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
+ MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+ MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+ MovedModule("tkinter_colorchooser", "tkColorChooser",
+ "tkinter.colorchooser"),
+ MovedModule("tkinter_commondialog", "tkCommonDialog",
+ "tkinter.commondialog"),
+ MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+ MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
+ "tkinter.simpledialog"),
+ MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+ MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+ MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
+ MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+ MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+ MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
+]
+# Add windows specific modules.
+if sys.platform == "win32":
+ _moved_attributes += [
+ MovedModule("winreg", "_winreg"),
+ ]
+
+for attr in _moved_attributes:
+ setattr(_MovedItems, attr.name, attr)
+ if isinstance(attr, MovedModule):
+ _importer._add_module(attr, "moves." + attr.name)
+del attr
+
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = _MovedItems(__name__ + ".moves")
+_importer._add_module(moves, "moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+ MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+ MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+ MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+ MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+ MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("quote", "urllib", "urllib.parse"),
+ MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("urlencode", "urllib", "urllib.parse"),
+ MovedAttribute("splitquery", "urllib", "urllib.parse"),
+ MovedAttribute("splittag", "urllib", "urllib.parse"),
+ MovedAttribute("splituser", "urllib", "urllib.parse"),
+ MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_params", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_query", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+ setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
+ "moves.urllib_parse", "moves.urllib.parse")
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+ MovedAttribute("URLError", "urllib2", "urllib.error"),
+ MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+ MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+ setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
+ "moves.urllib_error", "moves.urllib.error")
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+ MovedAttribute("urlopen", "urllib2", "urllib.request"),
+ MovedAttribute("install_opener", "urllib2", "urllib.request"),
+ MovedAttribute("build_opener", "urllib2", "urllib.request"),
+ MovedAttribute("pathname2url", "urllib", "urllib.request"),
+ MovedAttribute("url2pathname", "urllib", "urllib.request"),
+ MovedAttribute("getproxies", "urllib", "urllib.request"),
+ MovedAttribute("Request", "urllib2", "urllib.request"),
+ MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+ MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+ MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+ MovedAttribute("URLopener", "urllib", "urllib.request"),
+ MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+ MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+ setattr(Module_six_moves_urllib_request, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
+ "moves.urllib_request", "moves.urllib.request")
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+ MovedAttribute("addbase", "urllib", "urllib.response"),
+ MovedAttribute("addclosehook", "urllib", "urllib.response"),
+ MovedAttribute("addinfo", "urllib", "urllib.response"),
+ MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+ setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
+ "moves.urllib_response", "moves.urllib.response")
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+]
+for attr in _urllib_robotparser_moved_attributes:
+ setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
+ "moves.urllib_robotparser", "moves.urllib.robotparser")
+
+
+class Module_six_moves_urllib(types.ModuleType):
+
+ """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+ __path__ = [] # mark as package
+ parse = _importer._get_module("moves.urllib_parse")
+ error = _importer._get_module("moves.urllib_error")
+ request = _importer._get_module("moves.urllib_request")
+ response = _importer._get_module("moves.urllib_response")
+ robotparser = _importer._get_module("moves.urllib_robotparser")
+
+ def __dir__(self):
+ return ['parse', 'error', 'request', 'response', 'robotparser']
+
+_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
+ "moves.urllib")
+
+
+def add_move(move):
+ """Add an item to six.moves."""
+ setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+ """Remove item from six.moves."""
+ try:
+ delattr(_MovedItems, name)
+ except AttributeError:
+ try:
+ del moves.__dict__[name]
+ except KeyError:
+ raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+ _meth_func = "__func__"
+ _meth_self = "__self__"
+
+ _func_closure = "__closure__"
+ _func_code = "__code__"
+ _func_defaults = "__defaults__"
+ _func_globals = "__globals__"
+else:
+ _meth_func = "im_func"
+ _meth_self = "im_self"
+
+ _func_closure = "func_closure"
+ _func_code = "func_code"
+ _func_defaults = "func_defaults"
+ _func_globals = "func_globals"
+
+
+try:
+ advance_iterator = next
+except NameError:
+ def advance_iterator(it):
+ return it.next()
+next = advance_iterator
+
+
+try:
+ callable = callable
+except NameError:
+ def callable(obj):
+ return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+
+
+if PY3:
+ def get_unbound_function(unbound):
+ return unbound
+
+ create_bound_method = types.MethodType
+
+ def create_unbound_method(func, cls):
+ return func
+
+ Iterator = object
+else:
+ def get_unbound_function(unbound):
+ return unbound.im_func
+
+ def create_bound_method(func, obj):
+ return types.MethodType(func, obj, obj.__class__)
+
+ def create_unbound_method(func, cls):
+ return types.MethodType(func, None, cls)
+
+ class Iterator(object):
+
+ def next(self):
+ return type(self).__next__(self)
+
+ callable = callable
+_add_doc(get_unbound_function,
+ """Get the function out of a possibly unbound function""")
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_closure = operator.attrgetter(_func_closure)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+get_function_globals = operator.attrgetter(_func_globals)
+
+
+if PY3:
+ def iterkeys(d, **kw):
+ return iter(d.keys(**kw))
+
+ def itervalues(d, **kw):
+ return iter(d.values(**kw))
+
+ def iteritems(d, **kw):
+ return iter(d.items(**kw))
+
+ def iterlists(d, **kw):
+ return iter(d.lists(**kw))
+
+ viewkeys = operator.methodcaller("keys")
+
+ viewvalues = operator.methodcaller("values")
+
+ viewitems = operator.methodcaller("items")
+else:
+ def iterkeys(d, **kw):
+ return d.iterkeys(**kw)
+
+ def itervalues(d, **kw):
+ return d.itervalues(**kw)
+
+ def iteritems(d, **kw):
+ return d.iteritems(**kw)
+
+ def iterlists(d, **kw):
+ return d.iterlists(**kw)
+
+ viewkeys = operator.methodcaller("viewkeys")
+
+ viewvalues = operator.methodcaller("viewvalues")
+
+ viewitems = operator.methodcaller("viewitems")
+
+_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
+_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
+_add_doc(iteritems,
+ "Return an iterator over the (key, value) pairs of a dictionary.")
+_add_doc(iterlists,
+ "Return an iterator over the (key, [values]) pairs of a dictionary.")
+
+
+if PY3:
+ def b(s):
+ return s.encode("latin-1")
+
+ def u(s):
+ return s
+ unichr = chr
+ import struct
+ int2byte = struct.Struct(">B").pack
+ del struct
+ byte2int = operator.itemgetter(0)
+ indexbytes = operator.getitem
+ iterbytes = iter
+ import io
+ StringIO = io.StringIO
+ BytesIO = io.BytesIO
+ _assertCountEqual = "assertCountEqual"
+ if sys.version_info[1] <= 1:
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ else:
+ _assertRaisesRegex = "assertRaisesRegex"
+ _assertRegex = "assertRegex"
+else:
+ def b(s):
+ return s
+ # Workaround for standalone backslash
+
+ def u(s):
+ return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
+ unichr = unichr
+ int2byte = chr
+
+ def byte2int(bs):
+ return ord(bs[0])
+
+ def indexbytes(buf, i):
+ return ord(buf[i])
+ iterbytes = functools.partial(itertools.imap, ord)
+ import StringIO
+ StringIO = BytesIO = StringIO.StringIO
+ _assertCountEqual = "assertItemsEqual"
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+def assertCountEqual(self, *args, **kwargs):
+ return getattr(self, _assertCountEqual)(*args, **kwargs)
+
+
+def assertRaisesRegex(self, *args, **kwargs):
+ return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+ return getattr(self, _assertRegex)(*args, **kwargs)
+
+
+if PY3:
+ exec_ = getattr(moves.builtins, "exec")
+
+ def reraise(tp, value, tb=None):
+ if value is None:
+ value = tp()
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+
+else:
+ def exec_(_code_, _globs_=None, _locs_=None):
+ """Execute code in a namespace."""
+ if _globs_ is None:
+ frame = sys._getframe(1)
+ _globs_ = frame.f_globals
+ if _locs_ is None:
+ _locs_ = frame.f_locals
+ del frame
+ elif _locs_ is None:
+ _locs_ = _globs_
+ exec("""exec _code_ in _globs_, _locs_""")
+
+ exec_("""def reraise(tp, value, tb=None):
+ raise tp, value, tb
+""")
+
+
+if sys.version_info[:2] == (3, 2):
+ exec_("""def raise_from(value, from_value):
+ if from_value is None:
+ raise value
+ raise value from from_value
+""")
+elif sys.version_info[:2] > (3, 2):
+ exec_("""def raise_from(value, from_value):
+ raise value from from_value
+""")
+else:
+ def raise_from(value, from_value):
+ raise value
+
+
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
+ def print_(*args, **kwargs):
+ """The new-style print function for Python 2.4 and 2.5."""
+ fp = kwargs.pop("file", sys.stdout)
+ if fp is None:
+ return
+
+ def write(data):
+ if not isinstance(data, basestring):
+ data = str(data)
+ # If the file has an encoding, encode unicode with it.
+ if (isinstance(fp, file) and
+ isinstance(data, unicode) and
+ fp.encoding is not None):
+ errors = getattr(fp, "errors", None)
+ if errors is None:
+ errors = "strict"
+ data = data.encode(fp.encoding, errors)
+ fp.write(data)
+ want_unicode = False
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if isinstance(sep, unicode):
+ want_unicode = True
+ elif not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if isinstance(end, unicode):
+ want_unicode = True
+ elif not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+ if kwargs:
+ raise TypeError("invalid keyword arguments to print()")
+ if not want_unicode:
+ for arg in args:
+ if isinstance(arg, unicode):
+ want_unicode = True
+ break
+ if want_unicode:
+ newline = unicode("\n")
+ space = unicode(" ")
+ else:
+ newline = "\n"
+ space = " "
+ if sep is None:
+ sep = space
+ if end is None:
+ end = newline
+ for i, arg in enumerate(args):
+ if i:
+ write(sep)
+ write(arg)
+ write(end)
+if sys.version_info[:2] < (3, 3):
+ _print = print_
+
+ def print_(*args, **kwargs):
+ fp = kwargs.get("file", sys.stdout)
+ flush = kwargs.pop("flush", False)
+ _print(*args, **kwargs)
+ if flush and fp is not None:
+ fp.flush()
+
+_add_doc(reraise, """Reraise an exception.""")
+
+if sys.version_info[0:2] < (3, 4):
+ def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES):
+ def wrapper(f):
+ f = functools.wraps(wrapped, assigned, updated)(f)
+ f.__wrapped__ = wrapped
+ return f
+ return wrapper
+else:
+ wraps = functools.wraps
+
+
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass."""
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
+ class metaclass(meta):
+
+ def __new__(cls, name, this_bases, d):
+ return meta(name, bases, d)
+ return type.__new__(metaclass, 'temporary_class', (), {})
+
+
+def add_metaclass(metaclass):
+ """Class decorator for creating a class with a metaclass."""
+ def wrapper(cls):
+ orig_vars = cls.__dict__.copy()
+ slots = orig_vars.get('__slots__')
+ if slots is not None:
+ if isinstance(slots, str):
+ slots = [slots]
+ for slots_var in slots:
+ orig_vars.pop(slots_var)
+ orig_vars.pop('__dict__', None)
+ orig_vars.pop('__weakref__', None)
+ return metaclass(cls.__name__, cls.__bases__, orig_vars)
+ return wrapper
+
+
+def python_2_unicode_compatible(klass):
+ """
+ A decorator that defines __unicode__ and __str__ methods under Python 2.
+ Under Python 3 it does nothing.
+
+ To support Python 2 and 3 with a single code base, define a __str__ method
+ returning text and apply this decorator to the class.
+ """
+ if PY2:
+ if '__str__' not in klass.__dict__:
+ raise ValueError("@python_2_unicode_compatible cannot be applied "
+ "to %s because it doesn't define __str__()." %
+ klass.__name__)
+ klass.__unicode__ = klass.__str__
+ klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
+ return klass
+
+
+# Complete the moves implementation.
+# This code is at the end of this module to speed up module loading.
+# Turn this module into a package.
+__path__ = [] # required for PEP 302 and PEP 451
+__package__ = __name__ # see PEP 366 @ReservedAssignment
+if globals().get("__spec__") is not None:
+ __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
+# Remove other six meta path importers, since they cause problems. This can
+# happen if six is removed from sys.modules and then reloaded. (Setuptools does
+# this for some reason.)
+if sys.meta_path:
+ for i, importer in enumerate(sys.meta_path):
+ # Here's some real nastiness: Another "instance" of the six module might
+ # be floating around. Therefore, we can't use isinstance() to check for
+ # the six meta path importer, since the other six instance will have
+ # inserted an importer with different class.
+ if (type(importer).__name__ == "_SixMetaPathImporter" and
+ importer.name == __name__):
+ del sys.meta_path[i]
+ break
+ del i, importer
+# Finally, add the importer to the meta path import hook.
+sys.meta_path.append(_importer)
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/extern/__init__.py b/monEnvTP/lib/python3.8/site-packages/pkg_resources/extern/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..c1eb9e998f8e117c82c176bc83ab1d350c729cd7
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources/extern/__init__.py
@@ -0,0 +1,73 @@
+import sys
+
+
+class VendorImporter:
+ """
+ A PEP 302 meta path importer for finding optionally-vendored
+ or otherwise naturally-installed packages from root_name.
+ """
+
+ def __init__(self, root_name, vendored_names=(), vendor_pkg=None):
+ self.root_name = root_name
+ self.vendored_names = set(vendored_names)
+ self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor')
+
+ @property
+ def search_path(self):
+ """
+ Search first the vendor package then as a natural package.
+ """
+ yield self.vendor_pkg + '.'
+ yield ''
+
+ def find_module(self, fullname, path=None):
+ """
+ Return self when fullname starts with root_name and the
+ target module is one vendored through this importer.
+ """
+ root, base, target = fullname.partition(self.root_name + '.')
+ if root:
+ return
+ if not any(map(target.startswith, self.vendored_names)):
+ return
+ return self
+
+ def load_module(self, fullname):
+ """
+ Iterate over the search path to locate and load fullname.
+ """
+ root, base, target = fullname.partition(self.root_name + '.')
+ for prefix in self.search_path:
+ try:
+ extant = prefix + target
+ __import__(extant)
+ mod = sys.modules[extant]
+ sys.modules[fullname] = mod
+ # mysterious hack:
+ # Remove the reference to the extant package/module
+ # on later Python versions to cause relative imports
+ # in the vendor package to resolve the same modules
+ # as those going through this importer.
+ if prefix and sys.version_info > (3, 3):
+ del sys.modules[extant]
+ return mod
+ except ImportError:
+ pass
+ else:
+ raise ImportError(
+ "The '{target}' package is required; "
+ "normally this is bundled with this package so if you get "
+ "this warning, consult the packager of your "
+ "distribution.".format(**locals())
+ )
+
+ def install(self):
+ """
+ Install this importer into sys.meta_path if not already present.
+ """
+ if self not in sys.meta_path:
+ sys.meta_path.append(self)
+
+
+names = 'packaging', 'pyparsing', 'six', 'appdirs'
+VendorImporter(__name__, names).install()
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/extern/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pkg_resources/extern/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..565d16d3ee9dad2c851feb3cb471852c1ca6358f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pkg_resources/extern/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pkg_resources/py31compat.py b/monEnvTP/lib/python3.8/site-packages/pkg_resources/py31compat.py
new file mode 100644
index 0000000000000000000000000000000000000000..a381c424f9eaacb4126d4b8a474052551e34ccfb
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pkg_resources/py31compat.py
@@ -0,0 +1,23 @@
+import os
+import errno
+import sys
+
+from .extern import six
+
+
+def _makedirs_31(path, exist_ok=False):
+ try:
+ os.makedirs(path)
+ except OSError as exc:
+ if not exist_ok or exc.errno != errno.EEXIST:
+ raise
+
+
+# rely on compatibility behavior until mode considerations
+# and exists_ok considerations are disentangled.
+# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
+needs_makedirs = (
+ six.PY2 or
+ (3, 4) <= sys.version_info < (3, 4, 1)
+)
+makedirs = _makedirs_31 if needs_makedirs else os.makedirs
diff --git a/monEnvTP/lib/python3.8/site-packages/portend-3.1.0.dist-info/INSTALLER b/monEnvTP/lib/python3.8/site-packages/portend-3.1.0.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/portend-3.1.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/portend-3.1.0.dist-info/LICENSE b/monEnvTP/lib/python3.8/site-packages/portend-3.1.0.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..353924be0e59b9ad7e6c22848c2189398481821d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/portend-3.1.0.dist-info/LICENSE
@@ -0,0 +1,19 @@
+Copyright Jason R. Coombs
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/monEnvTP/lib/python3.8/site-packages/portend-3.1.0.dist-info/METADATA b/monEnvTP/lib/python3.8/site-packages/portend-3.1.0.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..2dc98806ddd0ad29adc1f7b23b2405ec7c545253
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/portend-3.1.0.dist-info/METADATA
@@ -0,0 +1,99 @@
+Metadata-Version: 2.1
+Name: portend
+Version: 3.1.0
+Summary: TCP port monitoring and discovery
+Home-page: https://github.com/jaraco/portend
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+License: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.7
+License-File: LICENSE
+Requires-Dist: tempora (>=1.8)
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs'
+Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest (>=6) ; extra == 'testing'
+Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
+Requires-Dist: pytest-flake8 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing'
+Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/portend.svg
+ :target: `PyPI link`_
+
+.. image:: https://img.shields.io/pypi/pyversions/portend.svg
+ :target: `PyPI link`_
+
+.. _PyPI link: https://pypi.org/project/portend
+
+.. image:: https://github.com/jaraco/portend/workflows/tests/badge.svg
+ :target: https://github.com/jaraco/portend/actions?query=workflow%3A%22tests%22
+ :alt: tests
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: Black
+
+.. image:: https://readthedocs.org/projects/portend/badge/?version=latest
+ :target: https://portend.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2021-informational
+ :target: https://blog.jaraco.com/skeleton
+
+por·tend
+pôrˈtend/
+verb
+
+ be a sign or warning that (something, especially something momentous or calamitous) is likely to happen.
+
+Usage
+=====
+
+Use portend to monitor TCP ports for bound or unbound states.
+
+For example, to wait for a port to be occupied, timing out after 3 seconds::
+
+ portend.occupied('www.google.com', 80, timeout=3)
+
+Or to wait for a port to be free, timing out after 5 seconds::
+
+ portend.free('::1', 80, timeout=5)
+
+The portend may also be executed directly. If the function succeeds, it
+returns nothing and exits with a status of 0. If it fails, it prints a
+message and exits with a status of 1. For example::
+
+ python -m portend localhost:31923 free
+ (exits immediately)
+
+ python -m portend -t 1 localhost:31923 occupied
+ (one second passes)
+ Port 31923 not bound on localhost.
+
+Portend also exposes a ``find_available_local_port`` for identifying
+a suitable port for binding locally::
+
+ port = portend.find_available_local_port()
+ print(port, "is available for binding")
+
+Portend additionally exposes the lower-level port checking functionality
+in the ``Checker`` class, which currently exposes only one public
+method, ``assert_free``::
+
+ portend.Checker().assert_free('localhost', 31923)
+
+If assert_free is passed a host/port combination that is occupied by
+a bound listener (i.e. a TCP connection is established to that host/port),
+assert_free will raise a ``PortNotFree`` exception.
+
+
diff --git a/monEnvTP/lib/python3.8/site-packages/portend-3.1.0.dist-info/RECORD b/monEnvTP/lib/python3.8/site-packages/portend-3.1.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..42fb6e7542de2a2e088b8c1caad4edda62177629
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/portend-3.1.0.dist-info/RECORD
@@ -0,0 +1,8 @@
+__pycache__/portend.cpython-38.pyc,,
+portend-3.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+portend-3.1.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
+portend-3.1.0.dist-info/METADATA,sha256=udOQVay9p-OXkinDlrw43o3zwfEPcENYrEEDrSoLUEI,3457
+portend-3.1.0.dist-info/RECORD,,
+portend-3.1.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
+portend-3.1.0.dist-info/top_level.txt,sha256=UlThezbYMVwaxw52sMHOIxHOxgNcsUXKhCvPTTi2WVA,8
+portend.py,sha256=5p5EH_Ue8cUs-_ZOTRjXwcgqVzWQmA3Kl0QNlZeeNBc,6488
diff --git a/monEnvTP/lib/python3.8/site-packages/portend-3.1.0.dist-info/WHEEL b/monEnvTP/lib/python3.8/site-packages/portend-3.1.0.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..5bad85fdc1cd08553756d0fb2c7be8b5ad6af7fb
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/portend-3.1.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/monEnvTP/lib/python3.8/site-packages/portend-3.1.0.dist-info/top_level.txt b/monEnvTP/lib/python3.8/site-packages/portend-3.1.0.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..24c61cc78c6180918064ca443b8347f754378b07
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/portend-3.1.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+portend
diff --git a/monEnvTP/lib/python3.8/site-packages/portend.py b/monEnvTP/lib/python3.8/site-packages/portend.py
new file mode 100644
index 0000000000000000000000000000000000000000..a9f6787f8c906a314299104ca93578545c23585f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/portend.py
@@ -0,0 +1,240 @@
+"""
+A simple library for managing the availability of ports.
+"""
+
+import time
+import socket
+import argparse
+import sys
+import itertools
+import contextlib
+import platform
+from collections import abc
+import urllib.parse
+
+from tempora import timing
+
+
+def client_host(server_host):
+ """
+ Return the host on which a client can connect to the given listener.
+
+ >>> client_host('192.168.0.1')
+ '192.168.0.1'
+ >>> client_host('0.0.0.0')
+ '127.0.0.1'
+ >>> client_host('::')
+ '::1'
+ """
+ if server_host == '0.0.0.0':
+ # 0.0.0.0 is INADDR_ANY, which should answer on localhost.
+ return '127.0.0.1'
+ if server_host in ('::', '::0', '::0.0.0.0'):
+ # :: is IN6ADDR_ANY, which should answer on localhost.
+ # ::0 and ::0.0.0.0 are non-canonical but common
+ # ways to write IN6ADDR_ANY.
+ return '::1'
+ return server_host
+
+
+class Checker(object):
+ def __init__(self, timeout=1.0):
+ self.timeout = timeout
+
+ def assert_free(self, host, port=None):
+ """
+ Assert that the given addr is free
+ in that all attempts to connect fail within the timeout
+ or raise a PortNotFree exception.
+
+ >>> free_port = find_available_local_port()
+
+ >>> Checker().assert_free('localhost', free_port)
+ >>> Checker().assert_free('127.0.0.1', free_port)
+ >>> Checker().assert_free('::1', free_port)
+
+ Also accepts an addr tuple
+
+ >>> addr = '::1', free_port, 0, 0
+ >>> Checker().assert_free(addr)
+
+ Host might refer to a server bind address like '::', which
+ should use localhost to perform the check.
+
+ >>> Checker().assert_free('::', free_port)
+ """
+ if port is None and isinstance(host, abc.Sequence):
+ host, port = host[:2]
+ if platform.system() == 'Windows':
+ host = client_host(host) # pragma: nocover
+ info = socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
+ list(itertools.starmap(self._connect, info))
+
+ def _connect(self, af, socktype, proto, canonname, sa):
+ s = socket.socket(af, socktype, proto)
+ # fail fast with a small timeout
+ s.settimeout(self.timeout)
+
+ with contextlib.closing(s):
+ try:
+ s.connect(sa)
+ except socket.error:
+ return
+
+ # the connect succeeded, so the port isn't free
+ host, port = sa[:2]
+ tmpl = "Port {port} is in use on {host}."
+ raise PortNotFree(tmpl.format(**locals()))
+
+
+class Timeout(IOError):
+ pass
+
+
+class PortNotFree(IOError):
+ pass
+
+
+def free(host, port, timeout=float('Inf')):
+ """
+ Wait for the specified port to become free (dropping or rejecting
+ requests). Return when the port is free or raise a Timeout if timeout has
+ elapsed.
+
+ Timeout may be specified in seconds or as a timedelta.
+ If timeout is None or ∞, the routine will run indefinitely.
+
+ >>> free('localhost', find_available_local_port())
+
+ >>> free(None, None)
+ Traceback (most recent call last):
+ ...
+ ValueError: Host values of '' or None are not allowed.
+ """
+ if not host:
+ raise ValueError("Host values of '' or None are not allowed.")
+
+ timer = timing.Timer(timeout)
+
+ while True:
+ try:
+ # Expect a free port, so use a small timeout
+ Checker(timeout=0.1).assert_free(host, port)
+ return
+ except PortNotFree:
+ if timer.expired():
+ raise Timeout("Port {port} not free on {host}.".format(**locals()))
+ # Politely wait.
+ time.sleep(0.1)
+
+
+def occupied(host, port, timeout=float('Inf')):
+ """
+ Wait for the specified port to become occupied (accepting requests).
+ Return when the port is occupied or raise a Timeout if timeout has
+ elapsed.
+
+ Timeout may be specified in seconds or as a timedelta.
+ If timeout is None or ∞, the routine will run indefinitely.
+
+ >>> occupied('localhost', find_available_local_port(), .1)
+ Traceback (most recent call last):
+ ...
+ Timeout: Port ... not bound on localhost.
+
+ >>> occupied(None, None)
+ Traceback (most recent call last):
+ ...
+ ValueError: Host values of '' or None are not allowed.
+ """
+ if not host:
+ raise ValueError("Host values of '' or None are not allowed.")
+
+ timer = timing.Timer(timeout)
+
+ while True:
+ try:
+ Checker(timeout=0.5).assert_free(host, port)
+ if timer.expired():
+ raise Timeout("Port {port} not bound on {host}.".format(**locals()))
+ # Politely wait
+ time.sleep(0.1)
+ except PortNotFree:
+ # port is occupied
+ return
+
+
+def find_available_local_port():
+ """
+ Find a free port on localhost.
+
+ >>> 0 < find_available_local_port() < 65536
+ True
+ """
+ infos = socket.getaddrinfo(None, 0, socket.AF_UNSPEC, socket.SOCK_STREAM)
+ family, proto, _, _, addr = next(iter(infos))
+ sock = socket.socket(family, proto)
+ sock.bind(addr)
+ addr, port = sock.getsockname()[:2]
+ sock.close()
+ return port
+
+
+class HostPort(str):
+ """
+ A simple representation of a host/port pair as a string
+
+ >>> hp = HostPort('localhost:32768')
+
+ >>> hp.host
+ 'localhost'
+
+ >>> hp.port
+ 32768
+
+ >>> len(hp)
+ 15
+
+ >>> hp = HostPort('[::1]:32768')
+
+ >>> hp.host
+ '::1'
+
+ >>> hp.port
+ 32768
+ """
+
+ @property
+ def host(self):
+ return urllib.parse.urlparse(f'//{self}').hostname
+
+ @property
+ def port(self):
+ return urllib.parse.urlparse(f'//{self}').port
+
+ @classmethod
+ def from_addr(cls, addr):
+ listen_host, port = addr[:2]
+ plain_host = client_host(listen_host)
+ host = f'[{plain_host}]' if ':' in plain_host else plain_host
+ return cls(':'.join([host, str(port)]))
+
+
+def _main(args=None):
+ parser = argparse.ArgumentParser()
+
+ def global_lookup(key):
+ return globals()[key]
+
+ parser.add_argument('target', metavar='host:port', type=HostPort)
+ parser.add_argument('func', metavar='state', type=global_lookup)
+ parser.add_argument('-t', '--timeout', default=None, type=float)
+ args = parser.parse_args(args)
+ try:
+ args.func(args.target.host, args.target.port, timeout=args.timeout)
+ except Timeout as timeout:
+ print(timeout, file=sys.stderr)
+ raise SystemExit(1)
+
+
+__name__ == '__main__' and _main()
diff --git a/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3-py3.8-nspkg.pth b/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3-py3.8-nspkg.pth
new file mode 100644
index 0000000000000000000000000000000000000000..baef7a0f418633ed421ab4c995e4a4a233d9c368
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3-py3.8-nspkg.pth
@@ -0,0 +1 @@
+import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('google',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('google', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('google', [os.path.dirname(p)])));m = m or sys.modules.setdefault('google', types.ModuleType('google'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)
diff --git a/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3.dist-info/INSTALLER b/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3.dist-info/METADATA b/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..fa65c9d15a717d4c525b78667f117b987ebb2ecf
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3.dist-info/METADATA
@@ -0,0 +1,22 @@
+Metadata-Version: 2.1
+Name: protobuf
+Version: 3.19.3
+Summary: Protocol Buffers
+Home-page: https://developers.google.com/protocol-buffers/
+Maintainer: protobuf@googlegroups.com
+Maintainer-email: protobuf@googlegroups.com
+License: 3-Clause BSD License
+Download-URL: https://github.com/protocolbuffers/protobuf/releases
+Platform: UNKNOWN
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Requires-Python: >=3.5
+
+Protocol Buffers are Google's data interchange format
+
diff --git a/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3.dist-info/RECORD b/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..3c0c45fd11b21fdf8365f0b7c74183527f1d951b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3.dist-info/RECORD
@@ -0,0 +1,97 @@
+google/protobuf/__init__.py,sha256=YZwsf68SYR84Fy7EXHV0qavNgsoYPf9w-aU5JhwT7sM,1705
+google/protobuf/__pycache__/__init__.cpython-38.pyc,,
+google/protobuf/__pycache__/any_pb2.cpython-38.pyc,,
+google/protobuf/__pycache__/api_pb2.cpython-38.pyc,,
+google/protobuf/__pycache__/descriptor.cpython-38.pyc,,
+google/protobuf/__pycache__/descriptor_database.cpython-38.pyc,,
+google/protobuf/__pycache__/descriptor_pb2.cpython-38.pyc,,
+google/protobuf/__pycache__/descriptor_pool.cpython-38.pyc,,
+google/protobuf/__pycache__/duration_pb2.cpython-38.pyc,,
+google/protobuf/__pycache__/empty_pb2.cpython-38.pyc,,
+google/protobuf/__pycache__/field_mask_pb2.cpython-38.pyc,,
+google/protobuf/__pycache__/json_format.cpython-38.pyc,,
+google/protobuf/__pycache__/message.cpython-38.pyc,,
+google/protobuf/__pycache__/message_factory.cpython-38.pyc,,
+google/protobuf/__pycache__/proto_builder.cpython-38.pyc,,
+google/protobuf/__pycache__/reflection.cpython-38.pyc,,
+google/protobuf/__pycache__/service.cpython-38.pyc,,
+google/protobuf/__pycache__/service_reflection.cpython-38.pyc,,
+google/protobuf/__pycache__/source_context_pb2.cpython-38.pyc,,
+google/protobuf/__pycache__/struct_pb2.cpython-38.pyc,,
+google/protobuf/__pycache__/symbol_database.cpython-38.pyc,,
+google/protobuf/__pycache__/text_encoding.cpython-38.pyc,,
+google/protobuf/__pycache__/text_format.cpython-38.pyc,,
+google/protobuf/__pycache__/timestamp_pb2.cpython-38.pyc,,
+google/protobuf/__pycache__/type_pb2.cpython-38.pyc,,
+google/protobuf/__pycache__/wrappers_pb2.cpython-38.pyc,,
+google/protobuf/any_pb2.py,sha256=ErHDcAFbcXITg6-pvF08KMhhq9ai7Oxsm0FTZaNCULU,1536
+google/protobuf/api_pb2.py,sha256=LItbGUKnwhDZenPrkpWoluiRHv29lRkJtb1-DHSjOK0,3329
+google/protobuf/compiler/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google/protobuf/compiler/__pycache__/__init__.cpython-38.pyc,,
+google/protobuf/compiler/__pycache__/plugin_pb2.cpython-38.pyc,,
+google/protobuf/compiler/plugin_pb2.py,sha256=viYDlPIrSkGLW8c8-i2wEXz2PRxbwSvvutLXYy93VBA,4335
+google/protobuf/descriptor.py,sha256=la9X97A7dmPElEtBCSPAdqv8tu7xRwf0ZjqKG3nryH8,45140
+google/protobuf/descriptor_database.py,sha256=2hBUBbzWjTdyq0nLZ9HYKbqhMpouzZVk9srurERnLVo,6819
+google/protobuf/descriptor_pb2.py,sha256=RqMgHwCW4Bz6Np3gaJZIBRuVJ5kzpY8lQM1dRNTZ4QA,119775
+google/protobuf/descriptor_pool.py,sha256=y-vR8-Mm279DxL8Cq7AD7PWISkGswvon-P755trULbA,46949
+google/protobuf/duration_pb2.py,sha256=x1AtWmOXKvDmKnigpNYbiW5vy1Am-KEktHOB1r4e-xs,1646
+google/protobuf/empty_pb2.py,sha256=_wxMhT_h43VBy44j-EydgoybubGo87RJAM462lHZuNs,1514
+google/protobuf/field_mask_pb2.py,sha256=DounqM7sYI8eLKGb0-MlpRrSkGeh7AfRAsH-XJJl-mk,1624
+google/protobuf/internal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google/protobuf/internal/__pycache__/__init__.cpython-38.pyc,,
+google/protobuf/internal/__pycache__/api_implementation.cpython-38.pyc,,
+google/protobuf/internal/__pycache__/containers.cpython-38.pyc,,
+google/protobuf/internal/__pycache__/decoder.cpython-38.pyc,,
+google/protobuf/internal/__pycache__/encoder.cpython-38.pyc,,
+google/protobuf/internal/__pycache__/enum_type_wrapper.cpython-38.pyc,,
+google/protobuf/internal/__pycache__/extension_dict.cpython-38.pyc,,
+google/protobuf/internal/__pycache__/message_listener.cpython-38.pyc,,
+google/protobuf/internal/__pycache__/python_message.cpython-38.pyc,,
+google/protobuf/internal/__pycache__/type_checkers.cpython-38.pyc,,
+google/protobuf/internal/__pycache__/well_known_types.cpython-38.pyc,,
+google/protobuf/internal/__pycache__/wire_format.cpython-38.pyc,,
+google/protobuf/internal/_api_implementation.cpython-38-x86_64-linux-gnu.so,sha256=ruZwOfc7LXSxKAKUCbgtkiBWJLjuW9Ff5pESL3FpYqM,14408
+google/protobuf/internal/api_implementation.py,sha256=rma5XlGOY6x35S55AS5bSOv0vq_211gjnL4Q9X74lpY,4562
+google/protobuf/internal/containers.py,sha256=BC91WIWmNfZYyGjE4QA4CNfjROXJl5W1_gfc4A97b50,21560
+google/protobuf/internal/decoder.py,sha256=XDqpaEzqavV4Ka7jx2jonxCEyuKClxzbWPS2M4OTe0I,37567
+google/protobuf/internal/encoder.py,sha256=6hXWsTHCB-cumgbAMi5Z3JIxab8E5LD9p_iPS2HohiA,28656
+google/protobuf/internal/enum_type_wrapper.py,sha256=q33nwZPi750ES0rsF7SKSzArNjuTGJobHmf2ItCP520,4469
+google/protobuf/internal/extension_dict.py,sha256=3DbWhlrpGybuur1bjfGKhx2d8IVo7tVQUEcF8tPLTyo,8443
+google/protobuf/internal/message_listener.py,sha256=Qwc5gkifAvWzhm3b0v-nXJkozNTgL-L92XAslngFaow,3367
+google/protobuf/internal/python_message.py,sha256=MEDGdNsrBo8OKk92s87J9qjJCQN_lkZCJHJXaA1th8U,58146
+google/protobuf/internal/type_checkers.py,sha256=P5KbORyPzELyyR0tHkmMA9TclSUZRSAG9z2cdjX4zdo,16248
+google/protobuf/internal/well_known_types.py,sha256=iqWlUFVvMF0_eG-gWsV3xl8g6rV6xYa49H6HgzNX7ro,29375
+google/protobuf/internal/wire_format.py,sha256=7Wz8gV7QOvoTzLMWrwlWSg7hIJ_T8Pm1w8_WLhpieVw,8444
+google/protobuf/json_format.py,sha256=xlCxvMlo8-PCRlzkwvr2Ge2aMg3b5j6sMAo3dmOx5g4,32690
+google/protobuf/message.py,sha256=yCu94McSNrB8Wml_raZbsHvl9on_wsjfGy3BlJAH3_M,14453
+google/protobuf/message_factory.py,sha256=LD18eAKZ_tZnDzIUc_gDmrkxuwiYkUh-f-BkfVW7Wko,7482
+google/protobuf/proto_builder.py,sha256=WcEmUDU26k_JSiUzXJ7bgthgR7jlTiOecV1np0zGyA8,5506
+google/protobuf/pyext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google/protobuf/pyext/__pycache__/__init__.cpython-38.pyc,,
+google/protobuf/pyext/__pycache__/cpp_message.cpython-38.pyc,,
+google/protobuf/pyext/_message.cpython-38-x86_64-linux-gnu.so,sha256=yqEYaDQfvVgfkSx6Zx93oixHTtzBMGzURijhv0W89ZE,2548936
+google/protobuf/pyext/cpp_message.py,sha256=D0-bxtjf1Ri8b0GubL5xgkkEB_z_mIf847yrRvVqDBU,2851
+google/protobuf/reflection.py,sha256=f61wP6k-HMShRwLsfRomScGzG0ZpWULpyhYwvjuZMKQ,3779
+google/protobuf/service.py,sha256=MGWgoxTrSlmqWsgXvp1XaP5Sg-_pq8Sw2XJuY1m6MVM,9146
+google/protobuf/service_reflection.py,sha256=GrQxVlbPi0MFq9RnOFE3fcqZHiXosMEUlydpcOvc2hg,11072
+google/protobuf/source_context_pb2.py,sha256=jOLXn7FfZRkQl_-dfs7qQpZSl9a1aJvGd4ngpSfnuR8,1667
+google/protobuf/struct_pb2.py,sha256=aOOTLZKgMGPQl3d990XGVUb7QTfZUOFQU-RrxEDg43s,3885
+google/protobuf/symbol_database.py,sha256=aCPGE4N2slb6HFB4cHFJDA8zehgMy16XY8BMH_ebfhc,6944
+google/protobuf/text_encoding.py,sha256=IrfncP112lKMLnWhhjXoczxEv2RZ9kzlinzAzHstrlY,4728
+google/protobuf/text_format.py,sha256=6aYyfB-htl2za_waO6LV9JVTPbx5Qj2vf0uE-cZdC6M,60006
+google/protobuf/timestamp_pb2.py,sha256=MlO2VdRCs82q2sWOGHBNuue5PwQTy3spE2DtPfGL4bQ,1662
+google/protobuf/type_pb2.py,sha256=B4L2V8-0EGNvy14c3tS1A_etOlSFsxppouSfgcYm04I,6152
+google/protobuf/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+google/protobuf/util/__pycache__/__init__.cpython-38.pyc,,
+google/protobuf/util/__pycache__/json_format_pb2.cpython-38.pyc,,
+google/protobuf/util/__pycache__/json_format_proto3_pb2.cpython-38.pyc,,
+google/protobuf/util/json_format_pb2.py,sha256=eK2i32AbwJo63DXKxHMDIsbMn9DIzcBwa_gamrtmM18,14124
+google/protobuf/util/json_format_proto3_pb2.py,sha256=dWhEzszvj9HrgMN_YuFmWMDvDaurA5u1xRfNff7kEPw,27085
+google/protobuf/wrappers_pb2.py,sha256=cir7PRYVRby0_Ek0G8uPx5xJ0FCYki5z5KTCRTsxt3c,5389
+protobuf-3.19.3-py3.8-nspkg.pth,sha256=xH5gTxc4UipYP3qrbP-4CCHNGBV97eBR4QqhheCvBl4,539
+protobuf-3.19.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+protobuf-3.19.3.dist-info/METADATA,sha256=WsmHVzDv1GtJgUifEC8OiyaXJ2tMoZLWrhRi9cJrsk0,806
+protobuf-3.19.3.dist-info/RECORD,,
+protobuf-3.19.3.dist-info/WHEEL,sha256=-ijGDuALlPxm3HbhKntps0QzHsi-DPlXqgerYTTJkFE,148
+protobuf-3.19.3.dist-info/namespace_packages.txt,sha256=_1QvSJIhFAGfxb79D6DhB7SUw2X6T4rwnz_LLrbcD3c,7
+protobuf-3.19.3.dist-info/top_level.txt,sha256=_1QvSJIhFAGfxb79D6DhB7SUw2X6T4rwnz_LLrbcD3c,7
diff --git a/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3.dist-info/WHEEL b/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..3a48d3480384503bea53d4a7c55a666ace0eb5fc
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.1)
+Root-Is-Purelib: false
+Tag: cp38-cp38-manylinux_2_17_x86_64
+Tag: cp38-cp38-manylinux2014_x86_64
+
diff --git a/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3.dist-info/namespace_packages.txt b/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3.dist-info/namespace_packages.txt
new file mode 100644
index 0000000000000000000000000000000000000000..cb429113e0f9a73019fd799e8052093fea7f0c8b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3.dist-info/namespace_packages.txt
@@ -0,0 +1 @@
+google
diff --git a/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3.dist-info/top_level.txt b/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..cb429113e0f9a73019fd799e8052093fea7f0c8b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/protobuf-3.19.3.dist-info/top_level.txt
@@ -0,0 +1 @@
+google
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/INSTALLER b/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/LICENSE.txt b/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/LICENSE.txt
new file mode 100644
index 0000000000000000000000000000000000000000..5f1c11289f6a54cb07ebdbf31d02e8e81b18b07f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/LICENSE.txt
@@ -0,0 +1,19 @@
+Copyright (c) 2003-2019 Stuart Bishop <stuart@stuartbishop.net>
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/METADATA b/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..4677130aface905003041f6fcc7ea7fcf9d51582
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/METADATA
@@ -0,0 +1,634 @@
+Metadata-Version: 2.1
+Name: pytz
+Version: 2021.3
+Summary: World timezone definitions, modern and historical
+Home-page: http://pythonhosted.org/pytz
+Author: Stuart Bishop
+Author-email: stuart@stuartbishop.net
+Maintainer: Stuart Bishop
+Maintainer-email: stuart@stuartbishop.net
+License: MIT
+Download-URL: https://pypi.org/project/pytz/
+Keywords: timezone,tzinfo,datetime,olson,time
+Platform: Independent
+Classifier: Development Status :: 6 - Mature
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Natural Language :: English
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.4
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.1
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+
+pytz - World Timezone Definitions for Python
+============================================
+
+:Author: Stuart Bishop <stuart@stuartbishop.net>
+
+Introduction
+~~~~~~~~~~~~
+
+pytz brings the Olson tz database into Python. This library allows
+accurate and cross platform timezone calculations using Python 2.4
+or higher. It also solves the issue of ambiguous times at the end
+of daylight saving time, which you can read more about in the Python
+Library Reference (``datetime.tzinfo``).
+
+Almost all of the Olson timezones are supported.
+
+.. note::
+
+ This library differs from the documented Python API for
+ tzinfo implementations; if you want to create local wallclock
+ times you need to use the ``localize()`` method documented in this
+ document. In addition, if you perform date arithmetic on local
+ times that cross DST boundaries, the result may be in an incorrect
+ timezone (ie. subtract 1 minute from 2002-10-27 1:00 EST and you get
+ 2002-10-27 0:59 EST instead of the correct 2002-10-27 1:59 EDT). A
+ ``normalize()`` method is provided to correct this. Unfortunately these
+ issues cannot be resolved without modifying the Python datetime
+ implementation (see PEP-431).
+
+
+Installation
+~~~~~~~~~~~~
+
+This package can either be installed using ``pip`` or from a tarball using the
+standard Python distutils.
+
+If you are installing using ``pip``, you don't need to download anything as the
+latest version will be downloaded for you from PyPI::
+
+ pip install pytz
+
+If you are installing from a tarball, run the following command as an
+administrative user::
+
+ python setup.py install
+
+
+pytz for Enterprise
+~~~~~~~~~~~~~~~~~~~
+
+Available as part of the Tidelift Subscription.
+
+The maintainers of pytz and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. `Learn more. <https://tidelift.com/subscription/pkg/pypi-pytz?utm_source=pypi-pytz&utm_medium=referral&utm_campaign=enterprise&utm_term=repo>`_.
+
+
+Example & Usage
+~~~~~~~~~~~~~~~
+
+Localized times and date arithmetic
+-----------------------------------
+
+>>> from datetime import datetime, timedelta
+>>> from pytz import timezone
+>>> import pytz
+>>> utc = pytz.utc
+>>> utc.zone
+'UTC'
+>>> eastern = timezone('US/Eastern')
+>>> eastern.zone
+'US/Eastern'
+>>> amsterdam = timezone('Europe/Amsterdam')
+>>> fmt = '%Y-%m-%d %H:%M:%S %Z%z'
+
+This library only supports two ways of building a localized time. The
+first is to use the ``localize()`` method provided by the pytz library.
+This is used to localize a naive datetime (datetime with no timezone
+information):
+
+>>> loc_dt = eastern.localize(datetime(2002, 10, 27, 6, 0, 0))
+>>> print(loc_dt.strftime(fmt))
+2002-10-27 06:00:00 EST-0500
+
+The second way of building a localized time is by converting an existing
+localized time using the standard ``astimezone()`` method:
+
+>>> ams_dt = loc_dt.astimezone(amsterdam)
+>>> ams_dt.strftime(fmt)
+'2002-10-27 12:00:00 CET+0100'
+
+Unfortunately using the tzinfo argument of the standard datetime
+constructors ''does not work'' with pytz for many timezones.
+
+>>> datetime(2002, 10, 27, 12, 0, 0, tzinfo=amsterdam).strftime(fmt) # /!\ Does not work this way!
+'2002-10-27 12:00:00 LMT+0020'
+
+It is safe for timezones without daylight saving transitions though, such
+as UTC:
+
+>>> datetime(2002, 10, 27, 12, 0, 0, tzinfo=pytz.utc).strftime(fmt) # /!\ Not recommended except for UTC
+'2002-10-27 12:00:00 UTC+0000'
+
+The preferred way of dealing with times is to always work in UTC,
+converting to localtime only when generating output to be read
+by humans.
+
+>>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc)
+>>> loc_dt = utc_dt.astimezone(eastern)
+>>> loc_dt.strftime(fmt)
+'2002-10-27 01:00:00 EST-0500'
+
+This library also allows you to do date arithmetic using local
+times, although it is more complicated than working in UTC as you
+need to use the ``normalize()`` method to handle daylight saving time
+and other timezone transitions. In this example, ``loc_dt`` is set
+to the instant when daylight saving time ends in the US/Eastern
+timezone.
+
+>>> before = loc_dt - timedelta(minutes=10)
+>>> before.strftime(fmt)
+'2002-10-27 00:50:00 EST-0500'
+>>> eastern.normalize(before).strftime(fmt)
+'2002-10-27 01:50:00 EDT-0400'
+>>> after = eastern.normalize(before + timedelta(minutes=20))
+>>> after.strftime(fmt)
+'2002-10-27 01:10:00 EST-0500'
+
+Creating local times is also tricky, and the reason why working with
+local times is not recommended. Unfortunately, you cannot just pass
+a ``tzinfo`` argument when constructing a datetime (see the next
+section for more details)
+
+>>> dt = datetime(2002, 10, 27, 1, 30, 0)
+>>> dt1 = eastern.localize(dt, is_dst=True)
+>>> dt1.strftime(fmt)
+'2002-10-27 01:30:00 EDT-0400'
+>>> dt2 = eastern.localize(dt, is_dst=False)
+>>> dt2.strftime(fmt)
+'2002-10-27 01:30:00 EST-0500'
+
+Converting between timezones is more easily done, using the
+standard astimezone method.
+
+>>> utc_dt = utc.localize(datetime.utcfromtimestamp(1143408899))
+>>> utc_dt.strftime(fmt)
+'2006-03-26 21:34:59 UTC+0000'
+>>> au_tz = timezone('Australia/Sydney')
+>>> au_dt = utc_dt.astimezone(au_tz)
+>>> au_dt.strftime(fmt)
+'2006-03-27 08:34:59 AEDT+1100'
+>>> utc_dt2 = au_dt.astimezone(utc)
+>>> utc_dt2.strftime(fmt)
+'2006-03-26 21:34:59 UTC+0000'
+>>> utc_dt == utc_dt2
+True
+
+You can take shortcuts when dealing with the UTC side of timezone
+conversions. ``normalize()`` and ``localize()`` are not really
+necessary when there are no daylight saving time transitions to
+deal with.
+
+>>> utc_dt = datetime.utcfromtimestamp(1143408899).replace(tzinfo=utc)
+>>> utc_dt.strftime(fmt)
+'2006-03-26 21:34:59 UTC+0000'
+>>> au_tz = timezone('Australia/Sydney')
+>>> au_dt = au_tz.normalize(utc_dt.astimezone(au_tz))
+>>> au_dt.strftime(fmt)
+'2006-03-27 08:34:59 AEDT+1100'
+>>> utc_dt2 = au_dt.astimezone(utc)
+>>> utc_dt2.strftime(fmt)
+'2006-03-26 21:34:59 UTC+0000'
+
+
+``tzinfo`` API
+--------------
+
+The ``tzinfo`` instances returned by the ``timezone()`` function have
+been extended to cope with ambiguous times by adding an ``is_dst``
+parameter to the ``utcoffset()``, ``dst()`` && ``tzname()`` methods.
+
+>>> tz = timezone('America/St_Johns')
+
+>>> normal = datetime(2009, 9, 1)
+>>> ambiguous = datetime(2009, 10, 31, 23, 30)
+
+The ``is_dst`` parameter is ignored for most timestamps. It is only used
+during DST transition ambiguous periods to resolve that ambiguity.
+
+>>> print(tz.utcoffset(normal, is_dst=True))
+-1 day, 21:30:00
+>>> print(tz.dst(normal, is_dst=True))
+1:00:00
+>>> tz.tzname(normal, is_dst=True)
+'NDT'
+
+>>> print(tz.utcoffset(ambiguous, is_dst=True))
+-1 day, 21:30:00
+>>> print(tz.dst(ambiguous, is_dst=True))
+1:00:00
+>>> tz.tzname(ambiguous, is_dst=True)
+'NDT'
+
+>>> print(tz.utcoffset(normal, is_dst=False))
+-1 day, 21:30:00
+>>> tz.dst(normal, is_dst=False).seconds
+3600
+>>> tz.tzname(normal, is_dst=False)
+'NDT'
+
+>>> print(tz.utcoffset(ambiguous, is_dst=False))
+-1 day, 20:30:00
+>>> tz.dst(ambiguous, is_dst=False)
+datetime.timedelta(0)
+>>> tz.tzname(ambiguous, is_dst=False)
+'NST'
+
+If ``is_dst`` is not specified, ambiguous timestamps will raise
+an ``pytz.exceptions.AmbiguousTimeError`` exception.
+
+>>> print(tz.utcoffset(normal))
+-1 day, 21:30:00
+>>> print(tz.dst(normal))
+1:00:00
+>>> tz.tzname(normal)
+'NDT'
+
+>>> import pytz.exceptions
+>>> try:
+... tz.utcoffset(ambiguous)
+... except pytz.exceptions.AmbiguousTimeError:
+... print('pytz.exceptions.AmbiguousTimeError: %s' % ambiguous)
+pytz.exceptions.AmbiguousTimeError: 2009-10-31 23:30:00
+>>> try:
+... tz.dst(ambiguous)
+... except pytz.exceptions.AmbiguousTimeError:
+... print('pytz.exceptions.AmbiguousTimeError: %s' % ambiguous)
+pytz.exceptions.AmbiguousTimeError: 2009-10-31 23:30:00
+>>> try:
+... tz.tzname(ambiguous)
+... except pytz.exceptions.AmbiguousTimeError:
+... print('pytz.exceptions.AmbiguousTimeError: %s' % ambiguous)
+pytz.exceptions.AmbiguousTimeError: 2009-10-31 23:30:00
+
+
+Problems with Localtime
+~~~~~~~~~~~~~~~~~~~~~~~
+
+The major problem we have to deal with is that certain datetimes
+may occur twice in a year. For example, in the US/Eastern timezone
+on the last Sunday morning in October, the following sequence
+happens:
+
+ - 01:00 EDT occurs
+ - 1 hour later, instead of 2:00am the clock is turned back 1 hour
+ and 01:00 happens again (this time 01:00 EST)
+
+In fact, every instant between 01:00 and 02:00 occurs twice. This means
+that if you try and create a time in the 'US/Eastern' timezone
+the standard datetime syntax, there is no way to specify if you meant
+before of after the end-of-daylight-saving-time transition. Using the
+pytz custom syntax, the best you can do is make an educated guess:
+
+>>> loc_dt = eastern.localize(datetime(2002, 10, 27, 1, 30, 00))
+>>> loc_dt.strftime(fmt)
+'2002-10-27 01:30:00 EST-0500'
+
+As you can see, the system has chosen one for you and there is a 50%
+chance of it being out by one hour. For some applications, this does
+not matter. However, if you are trying to schedule meetings with people
+in different timezones or analyze log files it is not acceptable.
+
+The best and simplest solution is to stick with using UTC. The pytz
+package encourages using UTC for internal timezone representation by
+including a special UTC implementation based on the standard Python
+reference implementation in the Python documentation.
+
+The UTC timezone unpickles to be the same instance, and pickles to a
+smaller size than other pytz tzinfo instances. The UTC implementation
+can be obtained as pytz.utc, pytz.UTC, or pytz.timezone('UTC').
+
+>>> import pickle, pytz
+>>> dt = datetime(2005, 3, 1, 14, 13, 21, tzinfo=utc)
+>>> naive = dt.replace(tzinfo=None)
+>>> p = pickle.dumps(dt, 1)
+>>> naive_p = pickle.dumps(naive, 1)
+>>> len(p) - len(naive_p)
+17
+>>> new = pickle.loads(p)
+>>> new == dt
+True
+>>> new is dt
+False
+>>> new.tzinfo is dt.tzinfo
+True
+>>> pytz.utc is pytz.UTC is pytz.timezone('UTC')
+True
+
+Note that some other timezones are commonly thought of as the same (GMT,
+Greenwich, Universal, etc.). The definition of UTC is distinct from these
+other timezones, and they are not equivalent. For this reason, they will
+not compare the same in Python.
+
+>>> utc == pytz.timezone('GMT')
+False
+
+See the section `What is UTC`_, below.
+
+If you insist on working with local times, this library provides a
+facility for constructing them unambiguously:
+
+>>> loc_dt = datetime(2002, 10, 27, 1, 30, 00)
+>>> est_dt = eastern.localize(loc_dt, is_dst=True)
+>>> edt_dt = eastern.localize(loc_dt, is_dst=False)
+>>> print(est_dt.strftime(fmt) + ' / ' + edt_dt.strftime(fmt))
+2002-10-27 01:30:00 EDT-0400 / 2002-10-27 01:30:00 EST-0500
+
+If you pass None as the is_dst flag to localize(), pytz will refuse to
+guess and raise exceptions if you try to build ambiguous or non-existent
+times.
+
+For example, 1:30am on 27th Oct 2002 happened twice in the US/Eastern
+timezone when the clocks where put back at the end of Daylight Saving
+Time:
+
+>>> dt = datetime(2002, 10, 27, 1, 30, 00)
+>>> try:
+... eastern.localize(dt, is_dst=None)
+... except pytz.exceptions.AmbiguousTimeError:
+... print('pytz.exceptions.AmbiguousTimeError: %s' % dt)
+pytz.exceptions.AmbiguousTimeError: 2002-10-27 01:30:00
+
+Similarly, 2:30am on 7th April 2002 never happened at all in the
+US/Eastern timezone, as the clocks where put forward at 2:00am skipping
+the entire hour:
+
+>>> dt = datetime(2002, 4, 7, 2, 30, 00)
+>>> try:
+... eastern.localize(dt, is_dst=None)
+... except pytz.exceptions.NonExistentTimeError:
+... print('pytz.exceptions.NonExistentTimeError: %s' % dt)
+pytz.exceptions.NonExistentTimeError: 2002-04-07 02:30:00
+
+Both of these exceptions share a common base class to make error handling
+easier:
+
+>>> isinstance(pytz.AmbiguousTimeError(), pytz.InvalidTimeError)
+True
+>>> isinstance(pytz.NonExistentTimeError(), pytz.InvalidTimeError)
+True
+
+
+A special case is where countries change their timezone definitions
+with no daylight savings time switch. For example, in 1915 Warsaw
+switched from Warsaw time to Central European time with no daylight savings
+transition. So at the stroke of midnight on August 5th 1915 the clocks
+were wound back 24 minutes creating an ambiguous time period that cannot
+be specified without referring to the timezone abbreviation or the
+actual UTC offset. In this case midnight happened twice, neither time
+during a daylight saving time period. pytz handles this transition by
+treating the ambiguous period before the switch as daylight savings
+time, and the ambiguous period after as standard time.
+
+
+>>> warsaw = pytz.timezone('Europe/Warsaw')
+>>> amb_dt1 = warsaw.localize(datetime(1915, 8, 4, 23, 59, 59), is_dst=True)
+>>> amb_dt1.strftime(fmt)
+'1915-08-04 23:59:59 WMT+0124'
+>>> amb_dt2 = warsaw.localize(datetime(1915, 8, 4, 23, 59, 59), is_dst=False)
+>>> amb_dt2.strftime(fmt)
+'1915-08-04 23:59:59 CET+0100'
+>>> switch_dt = warsaw.localize(datetime(1915, 8, 5, 00, 00, 00), is_dst=False)
+>>> switch_dt.strftime(fmt)
+'1915-08-05 00:00:00 CET+0100'
+>>> str(switch_dt - amb_dt1)
+'0:24:01'
+>>> str(switch_dt - amb_dt2)
+'0:00:01'
+
+The best way of creating a time during an ambiguous time period is
+by converting from another timezone such as UTC:
+
+>>> utc_dt = datetime(1915, 8, 4, 22, 36, tzinfo=pytz.utc)
+>>> utc_dt.astimezone(warsaw).strftime(fmt)
+'1915-08-04 23:36:00 CET+0100'
+
+The standard Python way of handling all these ambiguities is not to
+handle them, such as demonstrated in this example using the US/Eastern
+timezone definition from the Python documentation (Note that this
+implementation only works for dates between 1987 and 2006 - it is
+included for tests only!):
+
+>>> from pytz.reference import Eastern # pytz.reference only for tests
+>>> dt = datetime(2002, 10, 27, 0, 30, tzinfo=Eastern)
+>>> str(dt)
+'2002-10-27 00:30:00-04:00'
+>>> str(dt + timedelta(hours=1))
+'2002-10-27 01:30:00-05:00'
+>>> str(dt + timedelta(hours=2))
+'2002-10-27 02:30:00-05:00'
+>>> str(dt + timedelta(hours=3))
+'2002-10-27 03:30:00-05:00'
+
+Notice the first two results? At first glance you might think they are
+correct, but taking the UTC offset into account you find that they are
+actually two hours appart instead of the 1 hour we asked for.
+
+>>> from pytz.reference import UTC # pytz.reference only for tests
+>>> str(dt.astimezone(UTC))
+'2002-10-27 04:30:00+00:00'
+>>> str((dt + timedelta(hours=1)).astimezone(UTC))
+'2002-10-27 06:30:00+00:00'
+
+
+Country Information
+~~~~~~~~~~~~~~~~~~~
+
+A mechanism is provided to access the timezones commonly in use
+for a particular country, looked up using the ISO 3166 country code.
+It returns a list of strings that can be used to retrieve the relevant
+tzinfo instance using ``pytz.timezone()``:
+
+>>> print(' '.join(pytz.country_timezones['nz']))
+Pacific/Auckland Pacific/Chatham
+
+The Olson database comes with a ISO 3166 country code to English country
+name mapping that pytz exposes as a dictionary:
+
+>>> print(pytz.country_names['nz'])
+New Zealand
+
+
+What is UTC
+~~~~~~~~~~~
+
+'UTC' is `Coordinated Universal Time`_. It is a successor to, but distinct
+from, Greenwich Mean Time (GMT) and the various definitions of Universal
+Time. UTC is now the worldwide standard for regulating clocks and time
+measurement.
+
+All other timezones are defined relative to UTC, and include offsets like
+UTC+0800 - hours to add or subtract from UTC to derive the local time. No
+daylight saving time occurs in UTC, making it a useful timezone to perform
+date arithmetic without worrying about the confusion and ambiguities caused
+by daylight saving time transitions, your country changing its timezone, or
+mobile computers that roam through multiple timezones.
+
+.. _Coordinated Universal Time: https://en.wikipedia.org/wiki/Coordinated_Universal_Time
+
+
+Helpers
+~~~~~~~
+
+There are two lists of timezones provided.
+
+``all_timezones`` is the exhaustive list of the timezone names that can
+be used.
+
+>>> from pytz import all_timezones
+>>> len(all_timezones) >= 500
+True
+>>> 'Etc/Greenwich' in all_timezones
+True
+
+``common_timezones`` is a list of useful, current timezones. It doesn't
+contain deprecated zones or historical zones, except for a few I've
+deemed in common usage, such as US/Eastern (open a bug report if you
+think other timezones are deserving of being included here). It is also
+a sequence of strings.
+
+>>> from pytz import common_timezones
+>>> len(common_timezones) < len(all_timezones)
+True
+>>> 'Etc/Greenwich' in common_timezones
+False
+>>> 'Australia/Melbourne' in common_timezones
+True
+>>> 'US/Eastern' in common_timezones
+True
+>>> 'Canada/Eastern' in common_timezones
+True
+>>> 'Australia/Yancowinna' in all_timezones
+True
+>>> 'Australia/Yancowinna' in common_timezones
+False
+
+Both ``common_timezones`` and ``all_timezones`` are alphabetically
+sorted:
+
+>>> common_timezones_dupe = common_timezones[:]
+>>> common_timezones_dupe.sort()
+>>> common_timezones == common_timezones_dupe
+True
+>>> all_timezones_dupe = all_timezones[:]
+>>> all_timezones_dupe.sort()
+>>> all_timezones == all_timezones_dupe
+True
+
+``all_timezones`` and ``common_timezones`` are also available as sets.
+
+>>> from pytz import all_timezones_set, common_timezones_set
+>>> 'US/Eastern' in all_timezones_set
+True
+>>> 'US/Eastern' in common_timezones_set
+True
+>>> 'Australia/Victoria' in common_timezones_set
+False
+
+You can also retrieve lists of timezones used by particular countries
+using the ``country_timezones()`` function. It requires an ISO-3166
+two letter country code.
+
+>>> from pytz import country_timezones
+>>> print(' '.join(country_timezones('ch')))
+Europe/Zurich
+>>> print(' '.join(country_timezones('CH')))
+Europe/Zurich
+
+
+Internationalization - i18n/l10n
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Pytz is an interface to the IANA database, which uses ASCII names. The `Unicode Consortium's Unicode Locales (CLDR) <http://cldr.unicode.org>`_
+project provides translations. Thomas Khyn's
+`l18n <https://pypi.org/project/l18n/>`_ package can be used to access
+these translations from Python.
+
+
+License
+~~~~~~~
+
+MIT license.
+
+This code is also available as part of Zope 3 under the Zope Public
+License, Version 2.1 (ZPL).
+
+I'm happy to relicense this code if necessary for inclusion in other
+open source projects.
+
+
+Latest Versions
+~~~~~~~~~~~~~~~
+
+This package will be updated after releases of the Olson timezone
+database. The latest version can be downloaded from the `Python Package
+Index <https://pypi.org/project/pytz/>`_. The code that is used
+to generate this distribution is hosted on launchpad.net and available
+using git::
+
+ git clone https://git.launchpad.net/pytz
+
+A mirror on github is also available at https://github.com/stub42/pytz
+
+Announcements of new releases are made on
+`Launchpad <https://launchpad.net/pytz>`_, and the
+`Atom feed <http://feeds.launchpad.net/pytz/announcements.atom>`_
+hosted there.
+
+
+Bugs, Feature Requests & Patches
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Bugs can be reported using `Launchpad Bugs <https://bugs.launchpad.net/pytz>`_.
+
+
+Security Issues
+~~~~~~~~~~~~~~~
+
+Reports about security issues can be made via `Tidelift <https://tidelift.com/security>`_.
+
+
+Issues & Limitations
+~~~~~~~~~~~~~~~~~~~~
+
+- Offsets from UTC are rounded to the nearest whole minute, so timezones
+ such as Europe/Amsterdam pre 1937 will be up to 30 seconds out. This
+ is a limitation of the Python datetime library.
+
+- If you think a timezone definition is incorrect, I probably can't fix
+ it. pytz is a direct translation of the Olson timezone database, and
+ changes to the timezone definitions need to be made to this source.
+ If you find errors they should be reported to the time zone mailing
+ list, linked from http://www.iana.org/time-zones.
+
+
+Further Reading
+~~~~~~~~~~~~~~~
+
+More info than you want to know about timezones:
+http://www.twinsun.com/tz/tz-link.htm
+
+
+Contact
+~~~~~~~
+
+Stuart Bishop <stuart@stuartbishop.net>
+
+
+
+
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/RECORD b/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..0f9212a3091457f2a5568f769c8a2c45618aee59
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/RECORD
@@ -0,0 +1,619 @@
+pytz-2021.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+pytz-2021.3.dist-info/LICENSE.txt,sha256=vosaN-vibFkqkPbA6zMQOn84POL010mMCvmlJpkKB7g,1088
+pytz-2021.3.dist-info/METADATA,sha256=1bS7T_2JNf3x1IawE_xHaNlTutwHP-Y5sWzh7l_bBd8,21397
+pytz-2021.3.dist-info/RECORD,,
+pytz-2021.3.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
+pytz-2021.3.dist-info/top_level.txt,sha256=6xRYlt934v1yHb1JIrXgHyGxn3cqACvd-yE8ski_kcc,5
+pytz-2021.3.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
+pytz/__init__.py,sha256=7LmjdxefgadSBQh-ElLF2IM9pbXumZ0RhrAjF-wcXLg,35163
+pytz/__pycache__/__init__.cpython-38.pyc,,
+pytz/__pycache__/exceptions.cpython-38.pyc,,
+pytz/__pycache__/lazy.cpython-38.pyc,,
+pytz/__pycache__/reference.cpython-38.pyc,,
+pytz/__pycache__/tzfile.cpython-38.pyc,,
+pytz/__pycache__/tzinfo.cpython-38.pyc,,
+pytz/exceptions.py,sha256=434ZcuLlpLQY9mWoGq7zJMV1TyiYvVgpKBU1qZkbDjM,1571
+pytz/lazy.py,sha256=toeR5uDWKBj6ezsUZ4elNP6CEMtK7CO2jS9A30nsFbo,5404
+pytz/reference.py,sha256=zUtCki7JFEmrzrjNsfMD7YL0lWDxynKc1Ubo4iXSs74,3778
+pytz/tzfile.py,sha256=K2y7pZs4vydpZVftrfAA_-hgw17y1Szc7z_QCse6udU,4723
+pytz/tzinfo.py,sha256=-5UjW-yqHbtO5NtSaWope7EbSdf2oTES26Kdlxjqdk0,19272
+pytz/zoneinfo/Africa/Abidjan,sha256=0u-sTl8j2IyV1ywdtCgHFw9S9D3ZiiBa9akqkbny2Zc,148
+pytz/zoneinfo/Africa/Accra,sha256=0u-sTl8j2IyV1ywdtCgHFw9S9D3ZiiBa9akqkbny2Zc,148
+pytz/zoneinfo/Africa/Addis_Ababa,sha256=yJsuJTqJJqbOz37_NOS_zbf-JNr_IthHGMMN7sDqSWg,265
+pytz/zoneinfo/Africa/Algiers,sha256=vaFpjNVCwObnbfu82rOQzdJvN6nVgmpXpQ1aqzfzsqY,735
+pytz/zoneinfo/Africa/Asmara,sha256=yJsuJTqJJqbOz37_NOS_zbf-JNr_IthHGMMN7sDqSWg,265
+pytz/zoneinfo/Africa/Asmera,sha256=yJsuJTqJJqbOz37_NOS_zbf-JNr_IthHGMMN7sDqSWg,265
+pytz/zoneinfo/Africa/Bamako,sha256=0u-sTl8j2IyV1ywdtCgHFw9S9D3ZiiBa9akqkbny2Zc,148
+pytz/zoneinfo/Africa/Bangui,sha256=z_6wKCzL1_ug5JP_hneh5abdUZeIUELkN_ladz-ESEY,235
+pytz/zoneinfo/Africa/Banjul,sha256=0u-sTl8j2IyV1ywdtCgHFw9S9D3ZiiBa9akqkbny2Zc,148
+pytz/zoneinfo/Africa/Bissau,sha256=IjuxDP6EZiDHFvl_bHS6NN7sdRxLKXllooBC829poak,194
+pytz/zoneinfo/Africa/Blantyre,sha256=k_GelVHViGiuWCB1LSyTpIYSTDZEY9yclInQRY-LxoI,149
+pytz/zoneinfo/Africa/Brazzaville,sha256=z_6wKCzL1_ug5JP_hneh5abdUZeIUELkN_ladz-ESEY,235
+pytz/zoneinfo/Africa/Bujumbura,sha256=k_GelVHViGiuWCB1LSyTpIYSTDZEY9yclInQRY-LxoI,149
+pytz/zoneinfo/Africa/Cairo,sha256=L6zLQLnQtLkEELOGfm6USaHY33qAEPgGV822-iU1vxc,1955
+pytz/zoneinfo/Africa/Casablanca,sha256=qzlDyFvkLZWy8Bydogdx_cxZCkWzRwEEsuVWstJI_-s,2429
+pytz/zoneinfo/Africa/Ceuta,sha256=jp7xqONgZ3NPnElHzJEVusHKM9rxDK1nxJm4-i7Ln8o,2036
+pytz/zoneinfo/Africa/Conakry,sha256=0u-sTl8j2IyV1ywdtCgHFw9S9D3ZiiBa9akqkbny2Zc,148
+pytz/zoneinfo/Africa/Dakar,sha256=0u-sTl8j2IyV1ywdtCgHFw9S9D3ZiiBa9akqkbny2Zc,148
+pytz/zoneinfo/Africa/Dar_es_Salaam,sha256=yJsuJTqJJqbOz37_NOS_zbf-JNr_IthHGMMN7sDqSWg,265
+pytz/zoneinfo/Africa/Djibouti,sha256=yJsuJTqJJqbOz37_NOS_zbf-JNr_IthHGMMN7sDqSWg,265
+pytz/zoneinfo/Africa/Douala,sha256=z_6wKCzL1_ug5JP_hneh5abdUZeIUELkN_ladz-ESEY,235
+pytz/zoneinfo/Africa/El_Aaiun,sha256=Ja0t5t3QHHrvY0EGgxadypAabj4GjMLuQTTbOAur5M0,2295
+pytz/zoneinfo/Africa/Freetown,sha256=0u-sTl8j2IyV1ywdtCgHFw9S9D3ZiiBa9akqkbny2Zc,148
+pytz/zoneinfo/Africa/Gaborone,sha256=k_GelVHViGiuWCB1LSyTpIYSTDZEY9yclInQRY-LxoI,149
+pytz/zoneinfo/Africa/Harare,sha256=k_GelVHViGiuWCB1LSyTpIYSTDZEY9yclInQRY-LxoI,149
+pytz/zoneinfo/Africa/Johannesburg,sha256=bBvMdSZo53WFowiuhUO9C8zY6BOGViboCb-U8_49l34,246
+pytz/zoneinfo/Africa/Juba,sha256=UVnIqEPJwHLTMC-r5qZQHNv9opoYVsKdq-ta_5XUw_Q,679
+pytz/zoneinfo/Africa/Kampala,sha256=yJsuJTqJJqbOz37_NOS_zbf-JNr_IthHGMMN7sDqSWg,265
+pytz/zoneinfo/Africa/Khartoum,sha256=MYWDoJ3AcCItZdApoeOgtWWDDxquwTon5v5TOGP70-o,679
+pytz/zoneinfo/Africa/Kigali,sha256=k_GelVHViGiuWCB1LSyTpIYSTDZEY9yclInQRY-LxoI,149
+pytz/zoneinfo/Africa/Kinshasa,sha256=z_6wKCzL1_ug5JP_hneh5abdUZeIUELkN_ladz-ESEY,235
+pytz/zoneinfo/Africa/Lagos,sha256=z_6wKCzL1_ug5JP_hneh5abdUZeIUELkN_ladz-ESEY,235
+pytz/zoneinfo/Africa/Libreville,sha256=z_6wKCzL1_ug5JP_hneh5abdUZeIUELkN_ladz-ESEY,235
+pytz/zoneinfo/Africa/Lome,sha256=0u-sTl8j2IyV1ywdtCgHFw9S9D3ZiiBa9akqkbny2Zc,148
+pytz/zoneinfo/Africa/Luanda,sha256=z_6wKCzL1_ug5JP_hneh5abdUZeIUELkN_ladz-ESEY,235
+pytz/zoneinfo/Africa/Lubumbashi,sha256=k_GelVHViGiuWCB1LSyTpIYSTDZEY9yclInQRY-LxoI,149
+pytz/zoneinfo/Africa/Lusaka,sha256=k_GelVHViGiuWCB1LSyTpIYSTDZEY9yclInQRY-LxoI,149
+pytz/zoneinfo/Africa/Malabo,sha256=z_6wKCzL1_ug5JP_hneh5abdUZeIUELkN_ladz-ESEY,235
+pytz/zoneinfo/Africa/Maputo,sha256=k_GelVHViGiuWCB1LSyTpIYSTDZEY9yclInQRY-LxoI,149
+pytz/zoneinfo/Africa/Maseru,sha256=bBvMdSZo53WFowiuhUO9C8zY6BOGViboCb-U8_49l34,246
+pytz/zoneinfo/Africa/Mbabane,sha256=bBvMdSZo53WFowiuhUO9C8zY6BOGViboCb-U8_49l34,246
+pytz/zoneinfo/Africa/Mogadishu,sha256=yJsuJTqJJqbOz37_NOS_zbf-JNr_IthHGMMN7sDqSWg,265
+pytz/zoneinfo/Africa/Monrovia,sha256=-VsJW5cU4KdvfgYaQVv4lcuzmaKIVFMd42nO6RXOBdU,208
+pytz/zoneinfo/Africa/Nairobi,sha256=yJsuJTqJJqbOz37_NOS_zbf-JNr_IthHGMMN7sDqSWg,265
+pytz/zoneinfo/Africa/Ndjamena,sha256=8T3A0Zm9Gj0Bvm6rd88t3GAXKiKdGUfHlIqYlkYI0KM,199
+pytz/zoneinfo/Africa/Niamey,sha256=z_6wKCzL1_ug5JP_hneh5abdUZeIUELkN_ladz-ESEY,235
+pytz/zoneinfo/Africa/Nouakchott,sha256=0u-sTl8j2IyV1ywdtCgHFw9S9D3ZiiBa9akqkbny2Zc,148
+pytz/zoneinfo/Africa/Ouagadougou,sha256=0u-sTl8j2IyV1ywdtCgHFw9S9D3ZiiBa9akqkbny2Zc,148
+pytz/zoneinfo/Africa/Porto-Novo,sha256=z_6wKCzL1_ug5JP_hneh5abdUZeIUELkN_ladz-ESEY,235
+pytz/zoneinfo/Africa/Sao_Tome,sha256=MdjxpQ268uzJ7Zx1ZroFUtRUwqsJ6F_yY3AYV9FXw1I,254
+pytz/zoneinfo/Africa/Timbuktu,sha256=0u-sTl8j2IyV1ywdtCgHFw9S9D3ZiiBa9akqkbny2Zc,148
+pytz/zoneinfo/Africa/Tripoli,sha256=W1dptGD70T7ppGoo0fczFQeDiIp0nultLNPV66MwB2c,625
+pytz/zoneinfo/Africa/Tunis,sha256=OFVMEM4eYT2Ez0beuhEUCTSIpcFldWxsV2uEoTZIUNI,689
+pytz/zoneinfo/Africa/Windhoek,sha256=xuhvudrMH4alnVmouSTQI8YL8F_HbgsF2EQ7AZKzuHs,955
+pytz/zoneinfo/America/Adak,sha256=IB1DhwJQAKbhPJ9jHLf8zW5Dad7HIkBS-dhv64E1OlM,2356
+pytz/zoneinfo/America/Anchorage,sha256=oZA1NSPS2BWdymYpnCHFO8BlYVS-ll5KLg2Ez9CbETs,2371
+pytz/zoneinfo/America/Anguilla,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/Antigua,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/Araguaina,sha256=kppiiytmSQeesflyNGYM3r8NVUl1C-ggu08s9_Tt-co,884
+pytz/zoneinfo/America/Argentina/Buenos_Aires,sha256=ntn_GFHadbrFJ4ZuhU6h2uzbFwmDyS9mXV5S28pkGF8,1076
+pytz/zoneinfo/America/Argentina/Catamarca,sha256=diH1f96kbbY-7gJYQnSCNHs3n9dwHJqUhSdGNx1L7I0,1076
+pytz/zoneinfo/America/Argentina/ComodRivadavia,sha256=diH1f96kbbY-7gJYQnSCNHs3n9dwHJqUhSdGNx1L7I0,1076
+pytz/zoneinfo/America/Argentina/Cordoba,sha256=1XqIP8Qo2bPR7909hrAI-qAttybmwEW4ms7FjZA5Yfw,1076
+pytz/zoneinfo/America/Argentina/Jujuy,sha256=5HR0TlZFifwJ5nLTmg7yWXgCTx9mRhahfs4_Wq70wOY,1048
+pytz/zoneinfo/America/Argentina/La_Rioja,sha256=Zf_E3akFE1YUt9MZ4xxbRnOrp2bH1D-Bjsc0SLFfRyU,1090
+pytz/zoneinfo/America/Argentina/Mendoza,sha256=5DJiYYeQpcLBR_IoIJtk43IswJeGYawx5GykszuJ-Nw,1076
+pytz/zoneinfo/America/Argentina/Rio_Gallegos,sha256=T97WADwva6JbxICviNQUt_7iw9c-nloI4QJCscENSck,1076
+pytz/zoneinfo/America/Argentina/Salta,sha256=ATw0uR6szWKPs6jzdn6revS7UxCXD26ORK6jlmsjL18,1048
+pytz/zoneinfo/America/Argentina/San_Juan,sha256=qlW693a0Tnofy-RdcVBuWY3DvTTGxWwcYdKU3Y98pX8,1090
+pytz/zoneinfo/America/Argentina/San_Luis,sha256=WYdcro5-Fe-N6LkQsKwx_1tVozmnBp58DO1-BJs2suo,1102
+pytz/zoneinfo/America/Argentina/Tucuman,sha256=wsjg1a5AM1dP2gjr112k3vt54trcOOM_StF74xzvBJc,1104
+pytz/zoneinfo/America/Argentina/Ushuaia,sha256=9548Vvq_kpw_NX5s65vYuIbqvwGV-PBxqwmcrflLI0U,1076
+pytz/zoneinfo/America/Aruba,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/Asuncion,sha256=FTLtFk6MjJoh5VIDgJ2Sf4B_iNeCDxrV0MWwQL-sOVM,2044
+pytz/zoneinfo/America/Atikokan,sha256=kayA_pdpMcSQ0FjIzotdcf-m1JYfbKE-qcFT8LC8zqA,182
+pytz/zoneinfo/America/Atka,sha256=IB1DhwJQAKbhPJ9jHLf8zW5Dad7HIkBS-dhv64E1OlM,2356
+pytz/zoneinfo/America/Bahia,sha256=cmLkSAAzINlzYGXBqADEU3uPgA9S5nt-p1AV3Zy86VY,1024
+pytz/zoneinfo/America/Bahia_Banderas,sha256=BNjbcHSlPsJ4UpJx-gs1hpIyx2ScBieh1nyDuGb0PcE,1546
+pytz/zoneinfo/America/Barbados,sha256=ima-Qrrhazu4Qfvu2Z0-e6E-GTiYknuJBu6c2yVG9LE,436
+pytz/zoneinfo/America/Belem,sha256=_258hQZLCEXBX8xRLyQSw-AE-jiDmjVwJX32mN5UUEk,576
+pytz/zoneinfo/America/Belize,sha256=pkfLY2KfPchbeJa1pWcXmWAwp4ZlRvxWLVezXnrbkws,1614
+pytz/zoneinfo/America/Blanc-Sablon,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/Boa_Vista,sha256=V4VVOkrFUV1qUfVp9E974IOJFmA5QxQrctatTBEb-hs,632
+pytz/zoneinfo/America/Bogota,sha256=ZaQKTZi35AMdlROs0vjEDA_phR8ztJOnjA8aLJZ5tHw,246
+pytz/zoneinfo/America/Boise,sha256=Yv4AXa2nSH_oVo3FZqZCR7V7z7c6WnQgKIUyNUpzGXA,2394
+pytz/zoneinfo/America/Buenos_Aires,sha256=ntn_GFHadbrFJ4ZuhU6h2uzbFwmDyS9mXV5S28pkGF8,1076
+pytz/zoneinfo/America/Cambridge_Bay,sha256=Nanl8yH4SshljhEjDe-PZCYEXbUuuZGmkbAAt2dB-bk,2084
+pytz/zoneinfo/America/Campo_Grande,sha256=5BBENR3_8gJp4F_Uj2RRknvRc4JJWNRPnZU9E7tb8QI,1444
+pytz/zoneinfo/America/Cancun,sha256=YR2U5T6mDGd5xm8EVA_TM1NwSRMYPNYWvV7wuthnX0I,782
+pytz/zoneinfo/America/Caracas,sha256=2NpwXPEtQkI82WCZuQWHXf66VCADcawMpfhKTsuA0x4,264
+pytz/zoneinfo/America/Catamarca,sha256=diH1f96kbbY-7gJYQnSCNHs3n9dwHJqUhSdGNx1L7I0,1076
+pytz/zoneinfo/America/Cayenne,sha256=atVbW5ChJiKQ_q-3kFs-DLTTZa9ptkiHkmJlq4AXoY4,198
+pytz/zoneinfo/America/Cayman,sha256=kayA_pdpMcSQ0FjIzotdcf-m1JYfbKE-qcFT8LC8zqA,182
+pytz/zoneinfo/America/Chicago,sha256=4aZFw-svkMyXmSpNufqzK-xveos-oVJDpEyI8Yu9HQE,3576
+pytz/zoneinfo/America/Chihuahua,sha256=cewXJyEw4KCoz33yl8o2tUJZmugBWH4R0Aovdmuqf-o,1484
+pytz/zoneinfo/America/Coral_Harbour,sha256=kayA_pdpMcSQ0FjIzotdcf-m1JYfbKE-qcFT8LC8zqA,182
+pytz/zoneinfo/America/Cordoba,sha256=1XqIP8Qo2bPR7909hrAI-qAttybmwEW4ms7FjZA5Yfw,1076
+pytz/zoneinfo/America/Costa_Rica,sha256=74rYa6lrgIkyls9PkHo8SCYl9oOqiuG5S7MWdnJelP4,316
+pytz/zoneinfo/America/Creston,sha256=nEOwYOnGxENw9zW8m50PGxbtVfTrX3QYAo4x4LgOLfI,328
+pytz/zoneinfo/America/Cuiaba,sha256=M0FsR8T9s4jFSuzD8Qi6pqtb6Rf2NTzyVHKGZrn56n4,1416
+pytz/zoneinfo/America/Curacao,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/Danmarkshavn,sha256=YRZAfUCoVtaL1L-MYMYMH1wyOaVQnfUo_gFnvMXSuzw,698
+pytz/zoneinfo/America/Dawson,sha256=rAHhyuMuyjf_eyA2SBG76MRBf_fj_xi5FAuiWVQgJhw,1614
+pytz/zoneinfo/America/Dawson_Creek,sha256=aJXCyP4j3ggE4wGCN-LrS9hpD_5zWHzQTeSAKTWEPUM,1050
+pytz/zoneinfo/America/Denver,sha256=6_yPo1_mvnt9DgpPzr0QdHsjdsfUG6ALnagQLML1DSM,2444
+pytz/zoneinfo/America/Detroit,sha256=hecz8yqY2Cj5B61G3gLZdAVZvRgK9l0P90c_gN-uD5g,2230
+pytz/zoneinfo/America/Dominica,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/Edmonton,sha256=-TkIfc3QlvaCf0p8COZ43Y1HRBAl-nARUi-JdXeK1vE,2332
+pytz/zoneinfo/America/Eirunepe,sha256=pS90HZzRwH4Tf8ugmKHfiphX7zCPqZkh_0CNb-fEMAM,656
+pytz/zoneinfo/America/El_Salvador,sha256=gvGN8Lkj-sGm2_rs8OUjAMf1oMtKp2Xes6UfWT0WqgU,224
+pytz/zoneinfo/America/Ensenada,sha256=OHHtvy3J70z6wvKBHgPqMEnGs6SXp8fkf0WX9ZiOODk,2342
+pytz/zoneinfo/America/Fort_Nelson,sha256=erfODr3DrSpz65kAdO7Ts2dGbZxvddEP6gx4BX3y2J0,2240
+pytz/zoneinfo/America/Fort_Wayne,sha256=GrNub1_3Um5Qh67wOx58_TEAz4fwAeAlk2AlMTVA_sI,1666
+pytz/zoneinfo/America/Fortaleza,sha256=mITuMrRLRTWyoiF04Oy_UZ8gxZofTpXDblM8t7ch7Sg,716
+pytz/zoneinfo/America/Glace_Bay,sha256=G8DGLGCapH_aYCF_OhaL5Qonf7FOAgAPwelO5htCWBc,2192
+pytz/zoneinfo/America/Godthab,sha256=FtlXWP_hBNuwBHkI2b1yne_tSUJpwLtWLyTHZoFZkmM,1878
+pytz/zoneinfo/America/Goose_Bay,sha256=JgaLueghSvX2g725FOfIgpgvsqxZGykWOhAZWGpQZRY,3210
+pytz/zoneinfo/America/Grand_Turk,sha256=4YOFEPK60Bel2_fCsY6vSZxUcMJKjiKtyOf_Q0khEwU,1834
+pytz/zoneinfo/America/Grenada,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/Guadeloupe,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/Guatemala,sha256=dugUgCd6QY52yHkHuUP4jRWzo5x439IQigaYCvEF46Q,280
+pytz/zoneinfo/America/Guayaquil,sha256=PbcF4bvGAm-aFwdtGPotJy3kb4NwoyWwxgwL98BeUWA,246
+pytz/zoneinfo/America/Guyana,sha256=icHu0YLCJhwk9D47f4VCBHgnex6yGrY4JFtjkfMIeDs,262
+pytz/zoneinfo/America/Halifax,sha256=TZpmc5PwWoLfTfQoQ_b3U17BE2iVKSeNkR0Ho8mbTn8,3424
+pytz/zoneinfo/America/Havana,sha256=HUQeAuKBsEkI5SLZjqynXICOUVOajkKzKH5r-Ov5Odc,2416
+pytz/zoneinfo/America/Hermosillo,sha256=9Ij30JYmMscC1XHi4o9v-uSXoUuE8V9zhGz2iV5hVFI,416
+pytz/zoneinfo/America/Indiana/Indianapolis,sha256=GrNub1_3Um5Qh67wOx58_TEAz4fwAeAlk2AlMTVA_sI,1666
+pytz/zoneinfo/America/Indiana/Knox,sha256=BiALShjiOLg1o8mMRWJ1jyTlJkgvwzte7B9WSOvTUNg,2428
+pytz/zoneinfo/America/Indiana/Marengo,sha256=CPYY3XgJFNEzONxei7x04wOGI_b86RAn4jBPewi1HZw,1722
+pytz/zoneinfo/America/Indiana/Petersburg,sha256=axot1SloP27ZWjezmo7kldu9qA2frEtPVqWngcXtft0,1904
+pytz/zoneinfo/America/Indiana/Tell_City,sha256=GrWNjb1i4sbIYlJ8fU0viJ2Q5JmrlvLgcLQILnk3El8,1684
+pytz/zoneinfo/America/Indiana/Vevay,sha256=GGosHbQUoIDOKPZxdal42X40veEITMmrnlKOnLUhb-c,1414
+pytz/zoneinfo/America/Indiana/Vincennes,sha256=gh7LAbHbMD92eo9C_c5IiwQ1fJvxhdJN402Q_4YJdLg,1694
+pytz/zoneinfo/America/Indiana/Winamac,sha256=yS-_aKSC4crd0WdNutkHRHxUjmBCU56QVQcqy7kYpbQ,1778
+pytz/zoneinfo/America/Indianapolis,sha256=GrNub1_3Um5Qh67wOx58_TEAz4fwAeAlk2AlMTVA_sI,1666
+pytz/zoneinfo/America/Inuvik,sha256=MU_oDiidQaijt1KV0B5h9LqHoCrJ8ieldD9tsiJiX5o,1894
+pytz/zoneinfo/America/Iqaluit,sha256=6PitEMSFWcSb-Io8fvm4oQ_7v39G_qANc6reTjXoZJ0,2032
+pytz/zoneinfo/America/Jamaica,sha256=wlagieUPRf5-beie-h7QsONbNzjGsm8vMs8uf28pw28,482
+pytz/zoneinfo/America/Jujuy,sha256=5HR0TlZFifwJ5nLTmg7yWXgCTx9mRhahfs4_Wq70wOY,1048
+pytz/zoneinfo/America/Juneau,sha256=k7hxb0aGRnfnE-DBi3LkcjAzRPyAf0_Hw0vVFfjGeb0,2353
+pytz/zoneinfo/America/Kentucky/Louisville,sha256=-yqgeeHZdq6oP3_WzVvYOmqV9HQv8y7ZWmc9bzHvJAY,2772
+pytz/zoneinfo/America/Kentucky/Monticello,sha256=NJMKjG7jjlRzZhndMPw51bYW0D3jviW2Qbl70YcU0Gg,2352
+pytz/zoneinfo/America/Knox_IN,sha256=BiALShjiOLg1o8mMRWJ1jyTlJkgvwzte7B9WSOvTUNg,2428
+pytz/zoneinfo/America/Kralendijk,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/La_Paz,sha256=PAGF2VU_QOw2xT1Cqdp2P8Aj9hXMVWlCByV7cvfIQ_k,232
+pytz/zoneinfo/America/Lima,sha256=JHDCg95uw6BEu4a4Gfyikm1s8rm8AsYPG8dJxQQNZFs,406
+pytz/zoneinfo/America/Los_Angeles,sha256=VOy1PikdjiVdJ7lukVGzwl8uDxV_KYqznkTm5BLEiDM,2836
+pytz/zoneinfo/America/Louisville,sha256=-yqgeeHZdq6oP3_WzVvYOmqV9HQv8y7ZWmc9bzHvJAY,2772
+pytz/zoneinfo/America/Lower_Princes,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/Maceio,sha256=pzjNghmeHhvF4aI3cDq2G_5t71BSNGIbRAF5NmJyDmw,744
+pytz/zoneinfo/America/Managua,sha256=xBzF01AHn2E2fD8Qdy-DHFe36UqoeNpKPfChduBKWdk,430
+pytz/zoneinfo/America/Manaus,sha256=lp6RlkcXJQ7mSsKqnEgC8svJVrFDJk_16xxvfpNSpK4,604
+pytz/zoneinfo/America/Marigot,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/Martinique,sha256=fMs80kOU2YFvC0f9y2eje97JeAtTYBamXrnlTunNLzQ,232
+pytz/zoneinfo/America/Matamoros,sha256=RlEMOT_zvCLQ8s7TNvRE2PnC4H9JrxO7MGxmfu5xPPI,1390
+pytz/zoneinfo/America/Mazatlan,sha256=aIyre-8trAXSHtqxbuu6gDDkWCUjI_SdAKPIjz74M2E,1526
+pytz/zoneinfo/America/Mendoza,sha256=5DJiYYeQpcLBR_IoIJtk43IswJeGYawx5GykszuJ-Nw,1076
+pytz/zoneinfo/America/Menominee,sha256=Arv9WLbfhNcpRsUjHDU757BEdwlp08Gt30AixG3gZ04,2274
+pytz/zoneinfo/America/Merida,sha256=BJQ5mzAT-akb_EA7WqGdNheCorDqLBnDS_4X3YJz0rc,1422
+pytz/zoneinfo/America/Metlakatla,sha256=twmieGTVY2V-U8nFxqvx7asYv8GVjeWdLtrOI7UApVI,1423
+pytz/zoneinfo/America/Mexico_City,sha256=DSpTe5TT0KBsxGx79Rs7ah-zJpiGOJKwPjztovRN0b4,1584
+pytz/zoneinfo/America/Miquelon,sha256=LNbkN87EnZUa41Xizko5VIN55EyQvf5Kk5b5AfNQG8Q,1666
+pytz/zoneinfo/America/Moncton,sha256=Wmv-bk9aKKcWWzOpc1UFu67HOfwaIk2Wmh3LgqGctys,3154
+pytz/zoneinfo/America/Monterrey,sha256=HA4yn9jQHk9i0PqiB7fSoFdzXtB1DT1cheGRPXrQNdQ,1390
+pytz/zoneinfo/America/Montevideo,sha256=4jcgTegK5X8F0yNYzk-3oySZ4U9XQ09UbTJ_mlu8N70,1510
+pytz/zoneinfo/America/Montreal,sha256=ggOSzbHkmfgu9wTQzP0MUKsrKMbgveuAeThh1eFl1a0,3494
+pytz/zoneinfo/America/Montserrat,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/Nassau,sha256=ggOSzbHkmfgu9wTQzP0MUKsrKMbgveuAeThh1eFl1a0,3494
+pytz/zoneinfo/America/New_York,sha256=7AoiEGjr3wV4P7C4Qs35COZqwr2mjNDq7ocpsSPFOM8,3536
+pytz/zoneinfo/America/Nipigon,sha256=EGPXcOin8mfzFTkYJm4ICpY7fyE24I2pXg4ejafSMyU,2122
+pytz/zoneinfo/America/Nome,sha256=2izM3-P-PqJ9za6MdhzFfMvPFNq7Gim69tAvEwPeY2s,2367
+pytz/zoneinfo/America/Noronha,sha256=3R4lLV8jg5SljhC5OVVCk51Y77Efjo6zCe-oppg_FFo,716
+pytz/zoneinfo/America/North_Dakota/Beulah,sha256=PHlzEk3wsNXYsfMZZSio7ZfdnyxPFpOhK3dS-1AJKGg,2380
+pytz/zoneinfo/America/North_Dakota/Center,sha256=PaM52_JOVMEpVdw5qiOlhkp3qA0xp0d6Z9neOatmLKo,2380
+pytz/zoneinfo/America/North_Dakota/New_Salem,sha256=o0xmH1FUh3lVFLtP5Lb9c0PfSyaPTsRvQSQYwnn_yls,2380
+pytz/zoneinfo/America/Nuuk,sha256=FtlXWP_hBNuwBHkI2b1yne_tSUJpwLtWLyTHZoFZkmM,1878
+pytz/zoneinfo/America/Ojinaga,sha256=cO3V-x_1Q-mpbJgKNd6-WTfxDEHBV1aqS4wzVl5A0Q4,1484
+pytz/zoneinfo/America/Panama,sha256=kayA_pdpMcSQ0FjIzotdcf-m1JYfbKE-qcFT8LC8zqA,182
+pytz/zoneinfo/America/Pangnirtung,sha256=P9Kw_I-NxcUYJIr1j40jTn9q7F8TPAE_FqXsfLYF86A,2094
+pytz/zoneinfo/America/Paramaribo,sha256=Hm5tDwUmnoTrTUPEO4WArfSF74ZjywVEocy4kL51FzA,262
+pytz/zoneinfo/America/Phoenix,sha256=nEOwYOnGxENw9zW8m50PGxbtVfTrX3QYAo4x4LgOLfI,328
+pytz/zoneinfo/America/Port-au-Prince,sha256=09ZAJd4IOiMpfdpUuF1U44R_hRt6BvpAkFXOnYO9yOM,1434
+pytz/zoneinfo/America/Port_of_Spain,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/Porto_Acre,sha256=17onkm8P_VgMkErjK9rr0qwNni7qp9tgcUZ93g3ltOs,628
+pytz/zoneinfo/America/Porto_Velho,sha256=ZRfzgGEu26hnl3JPtiZLOSFGj_WBSbOKdiLC1xIyc5c,576
+pytz/zoneinfo/America/Puerto_Rico,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/Punta_Arenas,sha256=kpqStczF3X0yK0lwOcxmwbQM8ZV9MrNktm7orJF-EJc,1902
+pytz/zoneinfo/America/Rainy_River,sha256=r6kx6lD2IzCdygkj-DKyL2tPSn7k0Zil7PSHCBFKOa0,2122
+pytz/zoneinfo/America/Rankin_Inlet,sha256=KpQX97-EuF4MNyxQrtOKP616CK_vjniM-lo14WGVz0c,1892
+pytz/zoneinfo/America/Recife,sha256=ijFN2ZzZe5oBYdl8Ag3SwmGjj2JeVYYX2Vo767g2s6I,716
+pytz/zoneinfo/America/Regina,sha256=yjqT08pHbICYe83H8JmtaDBvCFqRv7Tfze3Y8xuXukw,980
+pytz/zoneinfo/America/Resolute,sha256=VP_u5XsepfSwx7Ou9zjGw2p5Qi10AIA54sP1J2DkppM,1892
+pytz/zoneinfo/America/Rio_Branco,sha256=17onkm8P_VgMkErjK9rr0qwNni7qp9tgcUZ93g3ltOs,628
+pytz/zoneinfo/America/Rosario,sha256=1XqIP8Qo2bPR7909hrAI-qAttybmwEW4ms7FjZA5Yfw,1076
+pytz/zoneinfo/America/Santa_Isabel,sha256=OHHtvy3J70z6wvKBHgPqMEnGs6SXp8fkf0WX9ZiOODk,2342
+pytz/zoneinfo/America/Santarem,sha256=Gl_lI3pPZ57UIYXWcmaTpFqWDA5re6bHh1nWs_Z0-Nc,602
+pytz/zoneinfo/America/Santiago,sha256=GB14PW0xABV283dXc8qL-nnDW-ViFUR3bne7sg0Aido,2529
+pytz/zoneinfo/America/Santo_Domingo,sha256=DKtaEj8fQ92ybITTWU4Bm160S9pzJmUVbjaWRnenxU4,458
+pytz/zoneinfo/America/Sao_Paulo,sha256=cO3VGekMGdSf1y4f_UgkpDMRes26-l1oGUoDglIiUQg,1444
+pytz/zoneinfo/America/Scoresbysund,sha256=dfHb86egoiNykb3bR3OHXpGFPm_Apck8BLiVTCqVAVc,1916
+pytz/zoneinfo/America/Shiprock,sha256=6_yPo1_mvnt9DgpPzr0QdHsjdsfUG6ALnagQLML1DSM,2444
+pytz/zoneinfo/America/Sitka,sha256=aiS7Fk37hZpzZ9VkeJQeF-BqTLRC1QOTCgMAJwT8UxA,2329
+pytz/zoneinfo/America/St_Barthelemy,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/St_Johns,sha256=r1-17uKv27eZ3JsVkw_DLZQbo6wvjuuVu7C2pDsmOgI,3655
+pytz/zoneinfo/America/St_Kitts,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/St_Lucia,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/St_Thomas,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/St_Vincent,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/Swift_Current,sha256=RRKOF7vZC8VvYxD8PP4J1_hUPayKBP7Lu80avRkfPDY,560
+pytz/zoneinfo/America/Tegucigalpa,sha256=EzOz7ntTlreMq69JZ2CcAb8Ps98V9bUMN480tpPIyw4,252
+pytz/zoneinfo/America/Thule,sha256=8xuPRaZU8RgO5ECqFYHYmnHioc81sBOailkVu8Y02i8,1502
+pytz/zoneinfo/America/Thunder_Bay,sha256=cJ9lcf2mDZttEx_ttYYoZAJfuGhSsDgNV2PI-ggWdPE,2202
+pytz/zoneinfo/America/Tijuana,sha256=OHHtvy3J70z6wvKBHgPqMEnGs6SXp8fkf0WX9ZiOODk,2342
+pytz/zoneinfo/America/Toronto,sha256=ggOSzbHkmfgu9wTQzP0MUKsrKMbgveuAeThh1eFl1a0,3494
+pytz/zoneinfo/America/Tortola,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/Vancouver,sha256=sknKH0jSPWam-DHfM35qXs8Nam7d5TFlkUI9Sgxryyg,2892
+pytz/zoneinfo/America/Virgin,sha256=hJHlV_-AGoMGUWuMpZRv9fLmghrzFHfrR9fRkcxaZJc,246
+pytz/zoneinfo/America/Whitehorse,sha256=Kfv607qGHJxXGBP1nPJyNg2_duWrmxhZGFQr82ukgq8,1614
+pytz/zoneinfo/America/Winnipeg,sha256=7P-_YQrneFcon7QKSTOnkiGjEppFDn3Z48MJ1qq8VBw,2868
+pytz/zoneinfo/America/Yakutat,sha256=tFwnKbvwhyyn4LNTAn5ye_JWDdxjCerNDt7oOwUwO2M,2305
+pytz/zoneinfo/America/Yellowknife,sha256=pfFvC8NEy373KbO6r6ec-Gw_O0D2h64mXU1X1AsUDgE,1966
+pytz/zoneinfo/Antarctica/Casey,sha256=a_ShNA5q27F-GNPiFPttIhhdHc1MP485jX6pwRjZ_t0,384
+pytz/zoneinfo/Antarctica/Davis,sha256=6PokyOaaISRTN13sisuGgdt5vG5A2YqNooJpfLTb5SQ,297
+pytz/zoneinfo/Antarctica/DumontDUrville,sha256=ei_XjmiRDLh-RU94uvz9CCIIRFH1r0X7WL-sB-6DF60,186
+pytz/zoneinfo/Antarctica/Macquarie,sha256=ie7RlaU8RHTorVVj-MX8StKMqx_oXf4UH2PUqpzcwe0,2260
+pytz/zoneinfo/Antarctica/Mawson,sha256=9TW1g_z0tk5EfeB7K69VJo8agO7-K9ZxWbiqNKnUZNE,199
+pytz/zoneinfo/Antarctica/McMurdo,sha256=gADjoyPo_QISQU6UJrAgcHp3HDaMoOFRdH-d23uBSyc,2437
+pytz/zoneinfo/Antarctica/Palmer,sha256=DW_DXByXg5MnMZ-w1bNdu8b0lKOYD_EgrPRd5EcyEm4,1418
+pytz/zoneinfo/Antarctica/Rothera,sha256=QQI1m1IN4_2e6Bb0z-rOYaOwxp4XjMJDOKM9SFDUPKg,164
+pytz/zoneinfo/Antarctica/South_Pole,sha256=gADjoyPo_QISQU6UJrAgcHp3HDaMoOFRdH-d23uBSyc,2437
+pytz/zoneinfo/Antarctica/Syowa,sha256=rq9KPj8l0FBnnKn93WkMeA1IngNtTzk5_oV4sEZhc4w,165
+pytz/zoneinfo/Antarctica/Troll,sha256=3zrh-P_jMCss9GGwHJJHkypZZydq4mkgo_TDqctn3c4,1162
+pytz/zoneinfo/Antarctica/Vostok,sha256=6tx86WD3MVGJBCbOJUCoA6YlGwCn2BT4B85Zss0vz4Y,165
+pytz/zoneinfo/Arctic/Longyearbyen,sha256=UdCERhj1JYpx3ojmilaRoyVoR4qMA1-PEv6hGwnpsJA,2228
+pytz/zoneinfo/Asia/Aden,sha256=rq9KPj8l0FBnnKn93WkMeA1IngNtTzk5_oV4sEZhc4w,165
+pytz/zoneinfo/Asia/Almaty,sha256=rBIl_pqZNmKZabjEa4mcsLahl9PbAdZJpQMQLVmcfBU,997
+pytz/zoneinfo/Asia/Amman,sha256=S4wAXF0MX5MpeQyIHI08w_fG_735xdah3aRS9VRPFjE,1853
+pytz/zoneinfo/Asia/Anadyr,sha256=hDDTly45ejoVVP9Al07TmKpTACNGJaIPlcXLRbsG_4g,1188
+pytz/zoneinfo/Asia/Aqtau,sha256=A5exZN256JagFJTcasgdCrQ8giOqZ2EFMRVYBWTaqZA,983
+pytz/zoneinfo/Asia/Aqtobe,sha256=LQ7P5LEEe7jbWbjqvzmM79c0o6AdZeCExQS-fOWp8yw,1011
+pytz/zoneinfo/Asia/Ashgabat,sha256=L4DYV2mZWycsYeHIypXzO6ZNY3tD8wjgxfPR2ZPW26c,619
+pytz/zoneinfo/Asia/Ashkhabad,sha256=L4DYV2mZWycsYeHIypXzO6ZNY3tD8wjgxfPR2ZPW26c,619
+pytz/zoneinfo/Asia/Atyrau,sha256=3uEo89ORyDJqQ_TtaQdIf9UPaB8WqIRQVi0geeY9gVE,991
+pytz/zoneinfo/Asia/Baghdad,sha256=lQMSUnOuijbcoTaCqMNnYhnvKtS2IVP_kXFAzePVNDU,983
+pytz/zoneinfo/Asia/Bahrain,sha256=V0rFJdLHIrToJ5Wl28VzVowwCVZoY8ZZSeNp-7kOvjY,199
+pytz/zoneinfo/Asia/Baku,sha256=vhHnliaOdRyNudl0sFJFdLynEg0Hc0I-IiZNfbDeCbM,1227
+pytz/zoneinfo/Asia/Bangkok,sha256=eYq0vh89N1j069URoQvtBu0ndEal6FPrtbF8WCKKpDw,199
+pytz/zoneinfo/Asia/Barnaul,sha256=2c1Cq8XYlBgybRQMP8w0NCf7kaLDrPZtGn4M5iJZbJo,1221
+pytz/zoneinfo/Asia/Beirut,sha256=_Z_2ZAg_iL9vU51JDB8CB04uXBDrf1kLIis-JnXaS2o,2154
+pytz/zoneinfo/Asia/Bishkek,sha256=do_4ki1JvSKupUrvlz9jRkHspDhdvk1D2IkByFskjJM,983
+pytz/zoneinfo/Asia/Brunei,sha256=BMMjwEmZ9rMoNpWfg8IrlLhRbMKbdW48padRF-FGolc,203
+pytz/zoneinfo/Asia/Calcutta,sha256=6Qw0EDbLcgMgDik8s7UTJn4QSjmllPNeGVJU5rwKF88,285
+pytz/zoneinfo/Asia/Chita,sha256=4ICOcAVAEWnP-cdf_YJu1_kCYnYPG2_vYfSbuNI-VwI,1221
+pytz/zoneinfo/Asia/Choibalsan,sha256=sJQAAjiT9VyG73dYhpYkq4tcmfITcPpiAa8YXsDlKag,949
+pytz/zoneinfo/Asia/Chongqing,sha256=ZP_C5DqUQ1oEPAQNHTr36S0DGtx453N68YYbqk7u8-Y,561
+pytz/zoneinfo/Asia/Chungking,sha256=ZP_C5DqUQ1oEPAQNHTr36S0DGtx453N68YYbqk7u8-Y,561
+pytz/zoneinfo/Asia/Colombo,sha256=HGea9jswIIgz7k20LTzbKtQyUun67IP5HvsZrmAJZJY,372
+pytz/zoneinfo/Asia/Dacca,sha256=3K5llGhcpCdZMMcJuomICVv7lZlDRpU4PUb5DtFx8l4,337
+pytz/zoneinfo/Asia/Damascus,sha256=6mcB6bxH1KsLqzb_LmJUT3tUDnq9_ScLFKoMFkcZy3A,2294
+pytz/zoneinfo/Asia/Dhaka,sha256=3K5llGhcpCdZMMcJuomICVv7lZlDRpU4PUb5DtFx8l4,337
+pytz/zoneinfo/Asia/Dili,sha256=ptjbacc9JK0pv2JpD-gHMglrwYNj9LMMIua0U0ZTMUc,227
+pytz/zoneinfo/Asia/Dubai,sha256=-ga0m3ua9Y6kSWREz2_VdtcVAkq83WrW3vxjBI7WNGs,165
+pytz/zoneinfo/Asia/Dushanbe,sha256=FUk9Tt_GimfRulcWamEvuOvA7FQ52YfZqQ2w88qMx6M,591
+pytz/zoneinfo/Asia/Famagusta,sha256=CFrcygd8ude5x6OEtfM_Dw0KYHoxpPPzq46KoHVxjjc,2028
+pytz/zoneinfo/Asia/Gaza,sha256=LPrVQ4DY43CC-2CnMLx6VEt4IP26P6Jw0HRlhbEbXHo,2422
+pytz/zoneinfo/Asia/Harbin,sha256=ZP_C5DqUQ1oEPAQNHTr36S0DGtx453N68YYbqk7u8-Y,561
+pytz/zoneinfo/Asia/Hebron,sha256=dXAP5ZeuPWJZLh6Y41hEz3owiuqElZxn1SKtdWaeKtw,2450
+pytz/zoneinfo/Asia/Ho_Chi_Minh,sha256=L5TXNg6-odIIn-JAyLTR8fKFiUFBNFwy0HzwZchbnm4,351
+pytz/zoneinfo/Asia/Hong_Kong,sha256=UcnFEc9S8hMWl9giVXni4TAhLPWX0H12XvwSt4AJHew,1203
+pytz/zoneinfo/Asia/Hovd,sha256=JUnOos7PNTi2VRKxD6XnaVR3NpuhsX_Pi18rIzVe1xw,891
+pytz/zoneinfo/Asia/Irkutsk,sha256=iUJZCVBjpfB4rNKJOr6g0zUZtccYYk_Gk0wTklx8Yj0,1243
+pytz/zoneinfo/Asia/Istanbul,sha256=2S0A_f7VxvyErJMMCPqK33AChA29IVkMr1o-SpMtMxk,1947
+pytz/zoneinfo/Asia/Jakarta,sha256=_WRgz6Zb6wxIXtMwpKjG4w4PJtDRzkhdrw-3a4NCBFA,355
+pytz/zoneinfo/Asia/Jayapura,sha256=ihzUd-L8HUVqG-Na10MyPE-YYwjVFj-xerqjTN4EJZs,221
+pytz/zoneinfo/Asia/Jerusalem,sha256=JUuWQmW5Tha0pJjw61Q5aN7CX0z4D7ops9OOSnda6Dc,2388
+pytz/zoneinfo/Asia/Kabul,sha256=ial7SvweHTQXDl79MnXm6QHtiw2i7Zt1e5urLXU8Sq8,208
+pytz/zoneinfo/Asia/Kamchatka,sha256=pBA0RbynKTKsMCmf2hJMZ_hgVUPemms-VceMMJ7QC64,1166
+pytz/zoneinfo/Asia/Karachi,sha256=iB-mWMTXUyfBwAkZdz8_UmEw0xsgxIub-KNI7akzhkk,379
+pytz/zoneinfo/Asia/Kashgar,sha256=AEXDJ5PxQOhePZZw1QZl98moDNa-bW3I3WVNQZHBPYA,165
+pytz/zoneinfo/Asia/Kathmandu,sha256=TUeW7rDSifOTSsNxvo9igIYZfGITEZUf-0EjglyRDWs,212
+pytz/zoneinfo/Asia/Katmandu,sha256=TUeW7rDSifOTSsNxvo9igIYZfGITEZUf-0EjglyRDWs,212
+pytz/zoneinfo/Asia/Khandyga,sha256=XYzE2tsE5Say9pg0cHDQkEE9aTuy2piFSLAGx_d-dmM,1271
+pytz/zoneinfo/Asia/Kolkata,sha256=6Qw0EDbLcgMgDik8s7UTJn4QSjmllPNeGVJU5rwKF88,285
+pytz/zoneinfo/Asia/Krasnoyarsk,sha256=nzRw4PI2AiK_Ge854b8U7TSDw0LGQy3ca5YuOOU2XwI,1207
+pytz/zoneinfo/Asia/Kuala_Lumpur,sha256=RfiIYo6sMEkSA8m5iUmyOyJzKZrgRs8ehGuDZwoq88k,383
+pytz/zoneinfo/Asia/Kuching,sha256=KsAtQ0aocINozixwW7CkorY-1PTLlsj7UUnQGQMEYTQ,483
+pytz/zoneinfo/Asia/Kuwait,sha256=rq9KPj8l0FBnnKn93WkMeA1IngNtTzk5_oV4sEZhc4w,165
+pytz/zoneinfo/Asia/Macao,sha256=MvAkRyRsrA2r052ItlyF5bh2FheRjI0jPwg0uIiH2Yk,1227
+pytz/zoneinfo/Asia/Macau,sha256=MvAkRyRsrA2r052ItlyF5bh2FheRjI0jPwg0uIiH2Yk,1227
+pytz/zoneinfo/Asia/Magadan,sha256=cqwjKQt8TlznM1w2DezAZuz1EjeOfLxPeSY19i9zkfQ,1222
+pytz/zoneinfo/Asia/Makassar,sha256=OhJtCqSTEU-u5n0opBVO5Bu-wQzcYPy9S_6aAhJXgOw,254
+pytz/zoneinfo/Asia/Manila,sha256=ujfq0kl1EhxcYSOrG-FS750aNaYUt1TT4bFuK4EcL_c,328
+pytz/zoneinfo/Asia/Muscat,sha256=-ga0m3ua9Y6kSWREz2_VdtcVAkq83WrW3vxjBI7WNGs,165
+pytz/zoneinfo/Asia/Nicosia,sha256=0Unm0IFT7HyGeQ7F3vTa_-klfysCgrulqFO6BD1plZU,2002
+pytz/zoneinfo/Asia/Novokuznetsk,sha256=vQGcqKdmYmWDdl73QPZTcyadnope1RPJ4oBgZelQu90,1165
+pytz/zoneinfo/Asia/Novosibirsk,sha256=ApL3s20HX2eIAno03HCa2RXdlLotVb9JvnZl7W1sM00,1221
+pytz/zoneinfo/Asia/Omsk,sha256=wxbEesfe7dJOkNPffqTwT6wuTSSTM6E9f0uFMAyzMCM,1207
+pytz/zoneinfo/Asia/Oral,sha256=iMjqD4LvDgyxN15v7CqyEdBDyBFaOlChwX1wHz2JiVQ,1005
+pytz/zoneinfo/Asia/Phnom_Penh,sha256=eYq0vh89N1j069URoQvtBu0ndEal6FPrtbF8WCKKpDw,199
+pytz/zoneinfo/Asia/Pontianak,sha256=inOXwuKtjKv1z_eliPZSIqjSt6whtuxhPeG1YpjU_BQ,353
+pytz/zoneinfo/Asia/Pyongyang,sha256=_-g3GnDAtfDX4XAktXH9jFouLUDmOovnjoOfvRpUDsE,237
+pytz/zoneinfo/Asia/Qatar,sha256=V0rFJdLHIrToJ5Wl28VzVowwCVZoY8ZZSeNp-7kOvjY,199
+pytz/zoneinfo/Asia/Qostanay,sha256=UGYEvmZfAAS9D6EMGd0n6-r_Az_zgTDSWLPeHzFLfu0,1011
+pytz/zoneinfo/Asia/Qyzylorda,sha256=aiSRxwoUbQ-TBHf2wcyaOhQb86j3jQpXwcQaSPnAtwU,1025
+pytz/zoneinfo/Asia/Rangoon,sha256=ZHuX-XVHr8dGJjrPQ5cW7b8jQUv3ihyd-VzN545mlMA,268
+pytz/zoneinfo/Asia/Riyadh,sha256=rq9KPj8l0FBnnKn93WkMeA1IngNtTzk5_oV4sEZhc4w,165
+pytz/zoneinfo/Asia/Saigon,sha256=L5TXNg6-odIIn-JAyLTR8fKFiUFBNFwy0HzwZchbnm4,351
+pytz/zoneinfo/Asia/Sakhalin,sha256=95AdPwOgSe0g9wdx67kKLDbjvY3FtpeVBoAWbJVco0w,1202
+pytz/zoneinfo/Asia/Samarkand,sha256=BBe6Gg_KlSQuS5hAyvvhZWmClcLJaFjnCNGC391HHQM,577
+pytz/zoneinfo/Asia/Seoul,sha256=LI9LsV3XcJC0l-KoQf8zI-y7rk-du57erS-N2Ptdi7Q,617
+pytz/zoneinfo/Asia/Shanghai,sha256=ZP_C5DqUQ1oEPAQNHTr36S0DGtx453N68YYbqk7u8-Y,561
+pytz/zoneinfo/Asia/Singapore,sha256=hIgr_LHMTWh3GgeG-MmLHBp-9anUxQcfMlKFtX8WvmU,383
+pytz/zoneinfo/Asia/Srednekolymsk,sha256=0DllW8q5VgXEMV5c_nLJElZsNpauvNhNACQpcgdqEl0,1208
+pytz/zoneinfo/Asia/Taipei,sha256=DMmQwOpPql25ue3Nf8vAKKT4em06D1Z9rHbLIitxixk,761
+pytz/zoneinfo/Asia/Tashkent,sha256=LS-yTxh0v1vmJoQ9I6fY-IERk7ukPmovVx2Ut_-b-Ig,591
+pytz/zoneinfo/Asia/Tbilisi,sha256=w6UNxgyn4BVVTF5WkAtxo_u7nnIY26makKQ5nRgifds,1035
+pytz/zoneinfo/Asia/Tehran,sha256=ATT50Q0hK6uSba5_WnOE3Px0OWxIwxaqK5Oi10P2A-M,2582
+pytz/zoneinfo/Asia/Tel_Aviv,sha256=JUuWQmW5Tha0pJjw61Q5aN7CX0z4D7ops9OOSnda6Dc,2388
+pytz/zoneinfo/Asia/Thimbu,sha256=uia8or5dtDkxVUZrcLwkjbTz9C7ZhLq0T4jlE4YvuvQ,203
+pytz/zoneinfo/Asia/Thimphu,sha256=uia8or5dtDkxVUZrcLwkjbTz9C7ZhLq0T4jlE4YvuvQ,203
+pytz/zoneinfo/Asia/Tokyo,sha256=oCueZgRNxcNcX3ZGdif9y6Su4cyVhga4XHdwlcrYLOs,309
+pytz/zoneinfo/Asia/Tomsk,sha256=77YgdJLxETRKjQjnaHHf54xBAqNywTDwQQmZ5v6Aq28,1221
+pytz/zoneinfo/Asia/Ujung_Pandang,sha256=OhJtCqSTEU-u5n0opBVO5Bu-wQzcYPy9S_6aAhJXgOw,254
+pytz/zoneinfo/Asia/Ulaanbaatar,sha256=uyQSzIBl0f2TXHrmUm3VPs1C9ro013hYmAlx6yUjh3Y,891
+pytz/zoneinfo/Asia/Ulan_Bator,sha256=uyQSzIBl0f2TXHrmUm3VPs1C9ro013hYmAlx6yUjh3Y,891
+pytz/zoneinfo/Asia/Urumqi,sha256=AEXDJ5PxQOhePZZw1QZl98moDNa-bW3I3WVNQZHBPYA,165
+pytz/zoneinfo/Asia/Ust-Nera,sha256=JAZhRAPdbOL9AL-WHOL8aZjxdZxLmGDNBGMCw9TKtR8,1252
+pytz/zoneinfo/Asia/Vientiane,sha256=eYq0vh89N1j069URoQvtBu0ndEal6FPrtbF8WCKKpDw,199
+pytz/zoneinfo/Asia/Vladivostok,sha256=Wokhgtj2nwUj992h7SyfB_fRNHAKfPNzhsf_oZpim8c,1208
+pytz/zoneinfo/Asia/Yakutsk,sha256=RVCIl52EvMrp2RG2hg2cjDSr9QhsscaAT-NV81xw7zc,1207
+pytz/zoneinfo/Asia/Yangon,sha256=ZHuX-XVHr8dGJjrPQ5cW7b8jQUv3ihyd-VzN545mlMA,268
+pytz/zoneinfo/Asia/Yekaterinburg,sha256=NzVc2DiPeyw0FdMHwSPQJF9k3tvWdtrETZiN58pyxLk,1243
+pytz/zoneinfo/Asia/Yerevan,sha256=k0WHtWQW_cBCjcEv8nP01cVPeTVDlf18lQ0_u6cin1o,1151
+pytz/zoneinfo/Atlantic/Azores,sha256=Q5Jqbe5h6WDi16jwK_B74gWWg58o0TgrdgB_cwbhzz0,3512
+pytz/zoneinfo/Atlantic/Bermuda,sha256=LNGKfMsnYvwImjTyzXrLhMOHHDu7qI67RbYNKvvI15I,2396
+pytz/zoneinfo/Atlantic/Canary,sha256=ymK9ufqphvNjDK3hzikN4GfkcR3QeCBiPKyVc6FjlbA,1897
+pytz/zoneinfo/Atlantic/Cape_Verde,sha256=ESQvE3deMI-lx9mG0yJLEsFX5KRl-7c6gD5O2h0Zm9Q,270
+pytz/zoneinfo/Atlantic/Faeroe,sha256=NibdZPZtapnYR_myIZnMdTaSKGsOBGgujj0_T2NvAzs,1815
+pytz/zoneinfo/Atlantic/Faroe,sha256=NibdZPZtapnYR_myIZnMdTaSKGsOBGgujj0_T2NvAzs,1815
+pytz/zoneinfo/Atlantic/Jan_Mayen,sha256=UdCERhj1JYpx3ojmilaRoyVoR4qMA1-PEv6hGwnpsJA,2228
+pytz/zoneinfo/Atlantic/Madeira,sha256=21Zcy0xRqDN3oY8jmjjO-LI7aC3G9mcS9ytaYg0g7ik,3503
+pytz/zoneinfo/Atlantic/Reykjavik,sha256=mSkaRBGZLeUrm88EeHcaWnEd35Wn-Ag2G10HtI3G2fg,1162
+pytz/zoneinfo/Atlantic/South_Georgia,sha256=QZ72fRKp6Kgvy7DfyHGht1MVnzGgSPujLQd4XMjNrrc,164
+pytz/zoneinfo/Atlantic/St_Helena,sha256=0u-sTl8j2IyV1ywdtCgHFw9S9D3ZiiBa9akqkbny2Zc,148
+pytz/zoneinfo/Atlantic/Stanley,sha256=exKMLw-P952wS1FTxVjnUU1mkD2OvKUDwtDt8IGgf8w,1214
+pytz/zoneinfo/Australia/ACT,sha256=QsOFdYWxbbL4_9R7oZ-qYPRzNA3o1P6TIOp76GFgWQY,2190
+pytz/zoneinfo/Australia/Adelaide,sha256=ld2EbxU75oVgmPe703z-I6aqLg0Kmv62ZcCGzkT5R20,2208
+pytz/zoneinfo/Australia/Brisbane,sha256=eW6Qzze2t0-speJmmvt1JMzbkSadIKdE84XHc7JUtGc,419
+pytz/zoneinfo/Australia/Broken_Hill,sha256=3k_3ljTvS5GSfo7Xh6w71UgR3aAwYPBsnCJ-mlEYCqQ,2229
+pytz/zoneinfo/Australia/Canberra,sha256=QsOFdYWxbbL4_9R7oZ-qYPRzNA3o1P6TIOp76GFgWQY,2190
+pytz/zoneinfo/Australia/Currie,sha256=GLQSzgIfsWxOvmKOrhpfofWqINQf6h36NYy3mcq6gcg,2358
+pytz/zoneinfo/Australia/Darwin,sha256=fn0IZhIW98FAnzLig-_GBtW5LA54jajdeeUzg4tCGvo,325
+pytz/zoneinfo/Australia/Eucla,sha256=LxEuFWyMse_cALVtRWCkf6sIIEk13jQ4JXW8k2agSd8,470
+pytz/zoneinfo/Australia/Hobart,sha256=GLQSzgIfsWxOvmKOrhpfofWqINQf6h36NYy3mcq6gcg,2358
+pytz/zoneinfo/Australia/LHI,sha256=Luf0Lx_iJHuh3kZd4LxRjf36tLF5-wW2UFMVNKNT7gg,1860
+pytz/zoneinfo/Australia/Lindeman,sha256=xM6Udx22oLNoLR1Y7GQhHOYov8nw3xQNqgc_NVQ2JK4,475
+pytz/zoneinfo/Australia/Lord_Howe,sha256=Luf0Lx_iJHuh3kZd4LxRjf36tLF5-wW2UFMVNKNT7gg,1860
+pytz/zoneinfo/Australia/Melbourne,sha256=lvx_MQcunMc6u2smIrl8X427bLsXvjkgpCSdjYCTNBM,2190
+pytz/zoneinfo/Australia/NSW,sha256=QsOFdYWxbbL4_9R7oZ-qYPRzNA3o1P6TIOp76GFgWQY,2190
+pytz/zoneinfo/Australia/North,sha256=fn0IZhIW98FAnzLig-_GBtW5LA54jajdeeUzg4tCGvo,325
+pytz/zoneinfo/Australia/Perth,sha256=Al1DOUh4U_ofMUQSeVlzSyD3x7SUjP9dchSaBUGmeWg,446
+pytz/zoneinfo/Australia/Queensland,sha256=eW6Qzze2t0-speJmmvt1JMzbkSadIKdE84XHc7JUtGc,419
+pytz/zoneinfo/Australia/South,sha256=ld2EbxU75oVgmPe703z-I6aqLg0Kmv62ZcCGzkT5R20,2208
+pytz/zoneinfo/Australia/Sydney,sha256=QsOFdYWxbbL4_9R7oZ-qYPRzNA3o1P6TIOp76GFgWQY,2190
+pytz/zoneinfo/Australia/Tasmania,sha256=GLQSzgIfsWxOvmKOrhpfofWqINQf6h36NYy3mcq6gcg,2358
+pytz/zoneinfo/Australia/Victoria,sha256=lvx_MQcunMc6u2smIrl8X427bLsXvjkgpCSdjYCTNBM,2190
+pytz/zoneinfo/Australia/West,sha256=Al1DOUh4U_ofMUQSeVlzSyD3x7SUjP9dchSaBUGmeWg,446
+pytz/zoneinfo/Australia/Yancowinna,sha256=3k_3ljTvS5GSfo7Xh6w71UgR3aAwYPBsnCJ-mlEYCqQ,2229
+pytz/zoneinfo/Brazil/Acre,sha256=17onkm8P_VgMkErjK9rr0qwNni7qp9tgcUZ93g3ltOs,628
+pytz/zoneinfo/Brazil/DeNoronha,sha256=3R4lLV8jg5SljhC5OVVCk51Y77Efjo6zCe-oppg_FFo,716
+pytz/zoneinfo/Brazil/East,sha256=cO3VGekMGdSf1y4f_UgkpDMRes26-l1oGUoDglIiUQg,1444
+pytz/zoneinfo/Brazil/West,sha256=lp6RlkcXJQ7mSsKqnEgC8svJVrFDJk_16xxvfpNSpK4,604
+pytz/zoneinfo/CET,sha256=o4omkrM_IsITxooUo8krM921XfBdvRs9JhwGXGd-Ypg,2094
+pytz/zoneinfo/CST6CDT,sha256=WGbtZ1FwjRX6Jeo_TCXKsfeDs4V9uhXGJfcnLJhk3s0,2310
+pytz/zoneinfo/Canada/Atlantic,sha256=TZpmc5PwWoLfTfQoQ_b3U17BE2iVKSeNkR0Ho8mbTn8,3424
+pytz/zoneinfo/Canada/Central,sha256=7P-_YQrneFcon7QKSTOnkiGjEppFDn3Z48MJ1qq8VBw,2868
+pytz/zoneinfo/Canada/Eastern,sha256=ggOSzbHkmfgu9wTQzP0MUKsrKMbgveuAeThh1eFl1a0,3494
+pytz/zoneinfo/Canada/Mountain,sha256=-TkIfc3QlvaCf0p8COZ43Y1HRBAl-nARUi-JdXeK1vE,2332
+pytz/zoneinfo/Canada/Newfoundland,sha256=r1-17uKv27eZ3JsVkw_DLZQbo6wvjuuVu7C2pDsmOgI,3655
+pytz/zoneinfo/Canada/Pacific,sha256=sknKH0jSPWam-DHfM35qXs8Nam7d5TFlkUI9Sgxryyg,2892
+pytz/zoneinfo/Canada/Saskatchewan,sha256=yjqT08pHbICYe83H8JmtaDBvCFqRv7Tfze3Y8xuXukw,980
+pytz/zoneinfo/Canada/Yukon,sha256=Kfv607qGHJxXGBP1nPJyNg2_duWrmxhZGFQr82ukgq8,1614
+pytz/zoneinfo/Chile/Continental,sha256=GB14PW0xABV283dXc8qL-nnDW-ViFUR3bne7sg0Aido,2529
+pytz/zoneinfo/Chile/EasterIsland,sha256=paHp1QRXIa02kgd0-4V6vWXdqcwheow-hJQD9VqacfQ,2233
+pytz/zoneinfo/Cuba,sha256=HUQeAuKBsEkI5SLZjqynXICOUVOajkKzKH5r-Ov5Odc,2416
+pytz/zoneinfo/EET,sha256=gGVsW5-qnI7ty8vqVK1ADWhunrvAT8kUC79GUf-_7G8,1908
+pytz/zoneinfo/EST,sha256=uKE_VPKfxGyYEsyqV_DdE2MW55vs_qUioOdIn5Goobc,114
+pytz/zoneinfo/EST5EDT,sha256=fwzEMT1jgnY2dDjd0EqDl26_7LC-oF48Bd4ng5311H0,2310
+pytz/zoneinfo/Egypt,sha256=L6zLQLnQtLkEELOGfm6USaHY33qAEPgGV822-iU1vxc,1955
+pytz/zoneinfo/Eire,sha256=-JSA3vsi44F1DE8supVjSppH2Vpp12WjJI0_COtAmqU,3492
+pytz/zoneinfo/Etc/GMT,sha256=bZ83iIPAefhsA4elVHqSxEmGnYBuB94QCEqwTwJJAY0,114
+pytz/zoneinfo/Etc/GMT+0,sha256=bZ83iIPAefhsA4elVHqSxEmGnYBuB94QCEqwTwJJAY0,114
+pytz/zoneinfo/Etc/GMT+1,sha256=1Qzl2X9rQ_RXEf11yH09wQZCr_ph6UdFP7E0yu9s-IQ,116
+pytz/zoneinfo/Etc/GMT+10,sha256=JEQyQyQlkC0o6ZTdeVjZhCIOh6cK5TF7H00Pkls-sUI,117
+pytz/zoneinfo/Etc/GMT+11,sha256=tWvcvYMFCaE60nJVvDrrov7stJvs1KQYOyrhl3dzcUs,117
+pytz/zoneinfo/Etc/GMT+12,sha256=b70HEhErq8IJmq8x7cOZy4eR__3fq5uHHpjvPBEHqMA,117
+pytz/zoneinfo/Etc/GMT+2,sha256=T6Ep5zhslBKbYaECFUB6gUKh3iTZPyMoW1kjhonxrUo,116
+pytz/zoneinfo/Etc/GMT+3,sha256=QGoYrE04bUJ-OzL37dt2MZT5FxWNLpJDPVXgJbstYZA,116
+pytz/zoneinfo/Etc/GMT+4,sha256=RWrkNki-wV7X-coe0VvufBe6LrWVpkPJgia5QQYEnBo,116
+pytz/zoneinfo/Etc/GMT+5,sha256=oRmeC41dgYXT-zzyZIRKXN9IvdL2Da5nTuwmG2_prIA,116
+pytz/zoneinfo/Etc/GMT+6,sha256=d6dAnwiejyFI2n7AzFlFW0aFAT6zYNEjBIEG0uu0sbQ,116
+pytz/zoneinfo/Etc/GMT+7,sha256=TqjYbzd0YHpx1wisFg08J19wTpg6ztJLLongZY_lozs,116
+pytz/zoneinfo/Etc/GMT+8,sha256=th_8bIMmYgRPCesBrbmBhRr0jQO7whd70LiY9HfwJyk,116
+pytz/zoneinfo/Etc/GMT+9,sha256=Qq5E6iUS7JMJIymT7YoqlI8MtqtVy0mr9t6zWFtWc9Y,116
+pytz/zoneinfo/Etc/GMT-0,sha256=bZ83iIPAefhsA4elVHqSxEmGnYBuB94QCEqwTwJJAY0,114
+pytz/zoneinfo/Etc/GMT-1,sha256=73F1eU8uAQGP3mcoB2q99CjfManGFHk3fefljp9pYC4,117
+pytz/zoneinfo/Etc/GMT-10,sha256=fKWWNwLBOp1OkKjtc1w9LIXJR1mTTD-JdvYflRy1IrU,118
+pytz/zoneinfo/Etc/GMT-11,sha256=D2S79n6psa9t9_2vj5wIrFpHH2OJLcCKP6vtwzFZINY,118
+pytz/zoneinfo/Etc/GMT-12,sha256=me4V6lmWI8gSr8H7N41WAD0Eww1anh_EF34Qr9UoSnI,118
+pytz/zoneinfo/Etc/GMT-13,sha256=xbmbG1BQA6Dlpa_iUwEGyJxW4a3t6lmawdPKAE8vbR8,118
+pytz/zoneinfo/Etc/GMT-14,sha256=PpXoREBh02qFpvxVMj2pV9IAzSQvBE7XPvnN9qSZ-Kc,118
+pytz/zoneinfo/Etc/GMT-2,sha256=ve6hWLdeuiLhqagaWLqMD6HNybS1chRwjudfTZ2bYBE,117
+pytz/zoneinfo/Etc/GMT-3,sha256=N77jILanuLDVkLsdujXZSu-dsHiwN5MIpwh7fMUifso,117
+pytz/zoneinfo/Etc/GMT-4,sha256=LSko5fVHqPl5zfwjGqkbMa_OFnvtpT6o_4xYxNz9n5o,117
+pytz/zoneinfo/Etc/GMT-5,sha256=uLaSR5Mb18HRTsAA5SveY9PAJ97dO8QzIWqNXe3wZb4,117
+pytz/zoneinfo/Etc/GMT-6,sha256=JSN-RUAphJ50fpIv7cYC6unrtrz9S1Wma-piDHlGe7c,117
+pytz/zoneinfo/Etc/GMT-7,sha256=vVAOF8xU9T9ESnw68c0SFXpcvkoopaiwTR0zbefHHSU,117
+pytz/zoneinfo/Etc/GMT-8,sha256=S7xFQbFMpiDZy4v5L4D9fCrjRIzzoLC5p8Se23xi7us,117
+pytz/zoneinfo/Etc/GMT-9,sha256=I5vHNmUK-Yyg_S1skFN44VGVzBgktjFgVQiDIKO4aMI,117
+pytz/zoneinfo/Etc/GMT0,sha256=bZ83iIPAefhsA4elVHqSxEmGnYBuB94QCEqwTwJJAY0,114
+pytz/zoneinfo/Etc/Greenwich,sha256=bZ83iIPAefhsA4elVHqSxEmGnYBuB94QCEqwTwJJAY0,114
+pytz/zoneinfo/Etc/UCT,sha256=i4WEZ5GrLIpUY8g6W-PAQ-JXDXRIQ01BOYlp7Ufj5vI,114
+pytz/zoneinfo/Etc/UTC,sha256=i4WEZ5GrLIpUY8g6W-PAQ-JXDXRIQ01BOYlp7Ufj5vI,114
+pytz/zoneinfo/Etc/Universal,sha256=i4WEZ5GrLIpUY8g6W-PAQ-JXDXRIQ01BOYlp7Ufj5vI,114
+pytz/zoneinfo/Etc/Zulu,sha256=i4WEZ5GrLIpUY8g6W-PAQ-JXDXRIQ01BOYlp7Ufj5vI,114
+pytz/zoneinfo/Europe/Amsterdam,sha256=pw8HngVt3bU5QrRzu70qOmf69TIyklkglvVUte9ntKo,2910
+pytz/zoneinfo/Europe/Andorra,sha256=gTB5jCQmvIw3JJi1_vAcOYuhtzPBR6RXUx9gVV6p6ug,1742
+pytz/zoneinfo/Europe/Astrakhan,sha256=ywtzL92KVfoybOmAhE9eHqmMcvJZm5b0js5GDdWIJEQ,1165
+pytz/zoneinfo/Europe/Athens,sha256=XDY-FBUddRyQHN8GxQLZ4awjuOlWlzlUdjv7OdXFNzA,2262
+pytz/zoneinfo/Europe/Belfast,sha256=xp08wV44TZMmAdBqppttDChQAb8tRN03GcEht99RYtY,3648
+pytz/zoneinfo/Europe/Belgrade,sha256=OpWtsGFWBE_S-mYoQcAmjCta9HwbGQANnSmVY9OHCTo,1920
+pytz/zoneinfo/Europe/Berlin,sha256=XuR19xoPwaMvrrhJ-MOcbnqmbW1B7HQrl7OnQ2s7BwE,2298
+pytz/zoneinfo/Europe/Bratislava,sha256=G9fdhUXmzx651BnyZ6V7AOYIV9EV5aMJMm44eJaLLZw,2301
+pytz/zoneinfo/Europe/Brussels,sha256=gS9Vrrbozend9HhuFetCVrIegs9fXSjaG60X2UVwysA,2933
+pytz/zoneinfo/Europe/Bucharest,sha256=nfg6-bU2D6DMEWb9EMIBR5kxnNsbDSx0UKfHH_ZzqFc,2184
+pytz/zoneinfo/Europe/Budapest,sha256=lNwqxWciBvw9ei81VQwIKHbC_ZDJjpgHU6HFg4wCUkY,2368
+pytz/zoneinfo/Europe/Busingen,sha256=K5QY7Ujj2VUchKR4bhhb0hgdAJhmwED71ykXDQOGKe8,1909
+pytz/zoneinfo/Europe/Chisinau,sha256=p1J_rqFE13pL8cpBRrEFe-teCI8f0fKK4uTUy_4diF4,2390
+pytz/zoneinfo/Europe/Copenhagen,sha256=q7iAbkd7y9QvbAi6XGZEUOTwNDCRYWRu9VQCxUrZ01U,2137
+pytz/zoneinfo/Europe/Dublin,sha256=-JSA3vsi44F1DE8supVjSppH2Vpp12WjJI0_COtAmqU,3492
+pytz/zoneinfo/Europe/Gibraltar,sha256=egOcazf2u1njGZ0tDj-f1NzZT_K5rpUKSqtShxO7U6c,3052
+pytz/zoneinfo/Europe/Guernsey,sha256=xp08wV44TZMmAdBqppttDChQAb8tRN03GcEht99RYtY,3648
+pytz/zoneinfo/Europe/Helsinki,sha256=GEkB7LsVhmegt7YuuWheCDvDGC7b7Nw9bTdDGS9qkJc,1900
+pytz/zoneinfo/Europe/Isle_of_Man,sha256=xp08wV44TZMmAdBqppttDChQAb8tRN03GcEht99RYtY,3648
+pytz/zoneinfo/Europe/Istanbul,sha256=2S0A_f7VxvyErJMMCPqK33AChA29IVkMr1o-SpMtMxk,1947
+pytz/zoneinfo/Europe/Jersey,sha256=xp08wV44TZMmAdBqppttDChQAb8tRN03GcEht99RYtY,3648
+pytz/zoneinfo/Europe/Kaliningrad,sha256=s7GXSe1YvMcs7AiUhHNTA6I4nAOQn_Kmz_ZqJYO-LMM,1493
+pytz/zoneinfo/Europe/Kiev,sha256=iVkTPFkl2tADYapa1HASlaV3tT2VsJpTPTTJC_9HtAk,2088
+pytz/zoneinfo/Europe/Kirov,sha256=Sr4HEUwk3tPTXioeCLhvlgKbCAFU7Gy2UB3f--uWLDc,1153
+pytz/zoneinfo/Europe/Lisbon,sha256=mpUpxGexMhbOBImDLSQs5-GAk7pm7tg4qYW044Kkle0,3497
+pytz/zoneinfo/Europe/Ljubljana,sha256=OpWtsGFWBE_S-mYoQcAmjCta9HwbGQANnSmVY9OHCTo,1920
+pytz/zoneinfo/Europe/London,sha256=xp08wV44TZMmAdBqppttDChQAb8tRN03GcEht99RYtY,3648
+pytz/zoneinfo/Europe/Luxembourg,sha256=974Dvf_X1QISKG1zIiTJJIfGavobO21HUVS-HfysOcY,2946
+pytz/zoneinfo/Europe/Madrid,sha256=MTTMnrbDDtexRikd72-FbQEpCZjc63_UtBIiDomD95c,2614
+pytz/zoneinfo/Europe/Malta,sha256=xRwBfrV8hOihGtqcek5_B6l5hjc206g3yfbEWXIaUis,2620
+pytz/zoneinfo/Europe/Mariehamn,sha256=GEkB7LsVhmegt7YuuWheCDvDGC7b7Nw9bTdDGS9qkJc,1900
+pytz/zoneinfo/Europe/Minsk,sha256=mn86zdrNWpJYDfE51Iy9n1-Zi2piTyb9EPaS2A-uGJQ,1321
+pytz/zoneinfo/Europe/Monaco,sha256=50uVZXYXXqfnr-K4tsSNl26CZbRju65C-STp818wX84,2944
+pytz/zoneinfo/Europe/Moscow,sha256=KmkofRcj6T8Ph28PJChm8JVp13uRvef6TZ0GuPzUiDw,1535
+pytz/zoneinfo/Europe/Nicosia,sha256=0Unm0IFT7HyGeQ7F3vTa_-klfysCgrulqFO6BD1plZU,2002
+pytz/zoneinfo/Europe/Oslo,sha256=UdCERhj1JYpx3ojmilaRoyVoR4qMA1-PEv6hGwnpsJA,2228
+pytz/zoneinfo/Europe/Paris,sha256=q3ehSIot1GZ6TyMHIjbg0oRf4ghAXuwbSDSYVim6evg,2962
+pytz/zoneinfo/Europe/Podgorica,sha256=OpWtsGFWBE_S-mYoQcAmjCta9HwbGQANnSmVY9OHCTo,1920
+pytz/zoneinfo/Europe/Prague,sha256=G9fdhUXmzx651BnyZ6V7AOYIV9EV5aMJMm44eJaLLZw,2301
+pytz/zoneinfo/Europe/Riga,sha256=hJ2_0m1taW9IuA-hMyP5n-WX7YOrR0heKszJhgljRWk,2198
+pytz/zoneinfo/Europe/Rome,sha256=-X5F_d3Dz0kBRWiUTXUN-fgeCHbUEHLaaHIwEPZEdUQ,2641
+pytz/zoneinfo/Europe/Samara,sha256=z2innqSZ8_lkEy8cIyF9JM_FfnO2sWZaqeFqOh8pD7M,1215
+pytz/zoneinfo/Europe/San_Marino,sha256=-X5F_d3Dz0kBRWiUTXUN-fgeCHbUEHLaaHIwEPZEdUQ,2641
+pytz/zoneinfo/Europe/Sarajevo,sha256=OpWtsGFWBE_S-mYoQcAmjCta9HwbGQANnSmVY9OHCTo,1920
+pytz/zoneinfo/Europe/Saratov,sha256=BMej49HlQG24CWCh5VOENrB3jPuJPScPszRtb7MrJ3I,1183
+pytz/zoneinfo/Europe/Simferopol,sha256=_M6LXB5Rqh932nKIJotGjT8YNszAOb7RjHN5ng-uW1Y,1453
+pytz/zoneinfo/Europe/Skopje,sha256=OpWtsGFWBE_S-mYoQcAmjCta9HwbGQANnSmVY9OHCTo,1920
+pytz/zoneinfo/Europe/Sofia,sha256=hCQKXfMNrnA5xHNw_uzTjKzVw4-Bvsq5oGO4yUCv5tY,2077
+pytz/zoneinfo/Europe/Stockholm,sha256=Xgp4GSh8-pzdeJeP8TQ20jWDDUj17R69h6RYTbLYd2g,1909
+pytz/zoneinfo/Europe/Tallinn,sha256=4a6JC0aIpMzqIV7O35zoG0LLJwkQq5AoXZ2ivkic6-w,2148
+pytz/zoneinfo/Europe/Tirane,sha256=ztlZyCS9WCXeVW8nBun3Tyi5HUY0EtFbiBbEc1gucuw,2084
+pytz/zoneinfo/Europe/Tiraspol,sha256=p1J_rqFE13pL8cpBRrEFe-teCI8f0fKK4uTUy_4diF4,2390
+pytz/zoneinfo/Europe/Ulyanovsk,sha256=nFsgcVTmTiiFzHtyJDRnO-3H4GRAfAeceb6b2jFHLUQ,1267
+pytz/zoneinfo/Europe/Uzhgorod,sha256=TIG1rC4QR7nz-vO1VtmN9mDMVjKPDKi7mEB9KpfJOBA,2050
+pytz/zoneinfo/Europe/Vaduz,sha256=K5QY7Ujj2VUchKR4bhhb0hgdAJhmwED71ykXDQOGKe8,1909
+pytz/zoneinfo/Europe/Vatican,sha256=-X5F_d3Dz0kBRWiUTXUN-fgeCHbUEHLaaHIwEPZEdUQ,2641
+pytz/zoneinfo/Europe/Vienna,sha256=ZmI3kADE6bnrJEccqh73XXBY36L1G4DkpiTQImtNrUk,2200
+pytz/zoneinfo/Europe/Vilnius,sha256=UFzRX3orCTB8d9IzlxJPy5eUA2oBPuCu1UJl-2D7C3U,2162
+pytz/zoneinfo/Europe/Volgograd,sha256=XZNEUXwnmGdOTld_9Lug2CFfXbFCJFZC45nOMb59FRk,1165
+pytz/zoneinfo/Europe/Warsaw,sha256=TiLDPbeVF0ckgLVEkaSeDaKZ8wctdJDOl_HE_Wd5rKs,2654
+pytz/zoneinfo/Europe/Zagreb,sha256=OpWtsGFWBE_S-mYoQcAmjCta9HwbGQANnSmVY9OHCTo,1920
+pytz/zoneinfo/Europe/Zaporozhye,sha256=V0dhGl3gET8OftMezf8CVy-W00Z7FtuEev5TjI2Rnyw,2106
+pytz/zoneinfo/Europe/Zurich,sha256=K5QY7Ujj2VUchKR4bhhb0hgdAJhmwED71ykXDQOGKe8,1909
+pytz/zoneinfo/Factory,sha256=aFFlKx93HXoJoF4SSuTlD8cZtJA-ne5oKzAa6eX2V4k,116
+pytz/zoneinfo/GB,sha256=xp08wV44TZMmAdBqppttDChQAb8tRN03GcEht99RYtY,3648
+pytz/zoneinfo/GB-Eire,sha256=xp08wV44TZMmAdBqppttDChQAb8tRN03GcEht99RYtY,3648
+pytz/zoneinfo/GMT,sha256=bZ83iIPAefhsA4elVHqSxEmGnYBuB94QCEqwTwJJAY0,114
+pytz/zoneinfo/GMT+0,sha256=bZ83iIPAefhsA4elVHqSxEmGnYBuB94QCEqwTwJJAY0,114
+pytz/zoneinfo/GMT-0,sha256=bZ83iIPAefhsA4elVHqSxEmGnYBuB94QCEqwTwJJAY0,114
+pytz/zoneinfo/GMT0,sha256=bZ83iIPAefhsA4elVHqSxEmGnYBuB94QCEqwTwJJAY0,114
+pytz/zoneinfo/Greenwich,sha256=bZ83iIPAefhsA4elVHqSxEmGnYBuB94QCEqwTwJJAY0,114
+pytz/zoneinfo/HST,sha256=1YkCncvgL9Z5CmUo4Vk8VbQmgA7ZAQ0PtE37j1yOli8,115
+pytz/zoneinfo/Hongkong,sha256=UcnFEc9S8hMWl9giVXni4TAhLPWX0H12XvwSt4AJHew,1203
+pytz/zoneinfo/Iceland,sha256=mSkaRBGZLeUrm88EeHcaWnEd35Wn-Ag2G10HtI3G2fg,1162
+pytz/zoneinfo/Indian/Antananarivo,sha256=yJsuJTqJJqbOz37_NOS_zbf-JNr_IthHGMMN7sDqSWg,265
+pytz/zoneinfo/Indian/Chagos,sha256=23B26pwwK0gxW7TP76GltyY-RU_o6RGGSrF93pF7S1E,199
+pytz/zoneinfo/Indian/Christmas,sha256=J4I0WDX_LYAJxsx2vU0EdxFJQKRE-rRL1UvNQv09pCs,165
+pytz/zoneinfo/Indian/Cocos,sha256=PX-k8JpghajjvhljtBjWozaiu9NhUSpVeoACy2cAxN8,174
+pytz/zoneinfo/Indian/Comoro,sha256=yJsuJTqJJqbOz37_NOS_zbf-JNr_IthHGMMN7sDqSWg,265
+pytz/zoneinfo/Indian/Kerguelen,sha256=oIvd6bmQFMLUefoBn4c1fQTOAawGcrPcmge2jU7BsYo,165
+pytz/zoneinfo/Indian/Mahe,sha256=ZNXjaoL_o6572xXgsgSmbd5D_SkaCaayolpSN1je82w,165
+pytz/zoneinfo/Indian/Maldives,sha256=dUQBbrmoB3odWsMt3K1YUnB447A6nkW3aR1aHzdLF7M,199
+pytz/zoneinfo/Indian/Mauritius,sha256=k6vWUVcfU3gS1K12e_aMw6BeSdMvdLyCJRCAL7CD0go,241
+pytz/zoneinfo/Indian/Mayotte,sha256=yJsuJTqJJqbOz37_NOS_zbf-JNr_IthHGMMN7sDqSWg,265
+pytz/zoneinfo/Indian/Reunion,sha256=lHnSVh7CYCuDBEM4dYsWDk006BSAznkCPxjiTtL_WiI,165
+pytz/zoneinfo/Iran,sha256=ATT50Q0hK6uSba5_WnOE3Px0OWxIwxaqK5Oi10P2A-M,2582
+pytz/zoneinfo/Israel,sha256=JUuWQmW5Tha0pJjw61Q5aN7CX0z4D7ops9OOSnda6Dc,2388
+pytz/zoneinfo/Jamaica,sha256=wlagieUPRf5-beie-h7QsONbNzjGsm8vMs8uf28pw28,482
+pytz/zoneinfo/Japan,sha256=oCueZgRNxcNcX3ZGdif9y6Su4cyVhga4XHdwlcrYLOs,309
+pytz/zoneinfo/Kwajalein,sha256=L4nH3qxv5EBKVRxYt67b9IfZfBzg5KJk19iu7x3oBMk,316
+pytz/zoneinfo/Libya,sha256=W1dptGD70T7ppGoo0fczFQeDiIp0nultLNPV66MwB2c,625
+pytz/zoneinfo/MET,sha256=i3CKSuP4N_PAj7o-Cbk8zPEdFs0CWWBCAfg2JXDx5V8,2094
+pytz/zoneinfo/MST,sha256=6IQwvtT12Bz1pTiqFuoVxNY-4ViS7ZrYHo5nPWwzKPw,114
+pytz/zoneinfo/MST7MDT,sha256=910Ek32FKoSyZWY_H19VHaVvqb-JsvnWTOOHvhrKsE0,2310
+pytz/zoneinfo/Mexico/BajaNorte,sha256=OHHtvy3J70z6wvKBHgPqMEnGs6SXp8fkf0WX9ZiOODk,2342
+pytz/zoneinfo/Mexico/BajaSur,sha256=aIyre-8trAXSHtqxbuu6gDDkWCUjI_SdAKPIjz74M2E,1526
+pytz/zoneinfo/Mexico/General,sha256=DSpTe5TT0KBsxGx79Rs7ah-zJpiGOJKwPjztovRN0b4,1584
+pytz/zoneinfo/NZ,sha256=gADjoyPo_QISQU6UJrAgcHp3HDaMoOFRdH-d23uBSyc,2437
+pytz/zoneinfo/NZ-CHAT,sha256=lkVqaSF1WWpv_B2K-k2uJp2setRVK6XbjsQ38gDGVEg,2068
+pytz/zoneinfo/Navajo,sha256=6_yPo1_mvnt9DgpPzr0QdHsjdsfUG6ALnagQLML1DSM,2444
+pytz/zoneinfo/PRC,sha256=ZP_C5DqUQ1oEPAQNHTr36S0DGtx453N68YYbqk7u8-Y,561
+pytz/zoneinfo/PST8PDT,sha256=Q7TCLkE69a6g7mPoPAkqhg-0dStyiAC0jVlM72KG_R8,2310
+pytz/zoneinfo/Pacific/Apia,sha256=cm6S6D0VdHsdqLJkupUJH6pLynao5QlwpMmRI9m5ZH4,612
+pytz/zoneinfo/Pacific/Auckland,sha256=gADjoyPo_QISQU6UJrAgcHp3HDaMoOFRdH-d23uBSyc,2437
+pytz/zoneinfo/Pacific/Bougainville,sha256=ZKDa_S_2gSlmOWizV1DqxH3wbE58rfK1vKZHZqrrtjI,268
+pytz/zoneinfo/Pacific/Chatham,sha256=lkVqaSF1WWpv_B2K-k2uJp2setRVK6XbjsQ38gDGVEg,2068
+pytz/zoneinfo/Pacific/Chuuk,sha256=6IYDKViuRDC_RVx1AJOxazVET6cZtdv_LFE6xbtGItI,269
+pytz/zoneinfo/Pacific/Easter,sha256=paHp1QRXIa02kgd0-4V6vWXdqcwheow-hJQD9VqacfQ,2233
+pytz/zoneinfo/Pacific/Efate,sha256=pG4NMVeM3hBJTZnZmqeLqz3Q5oCggTW4HO-R9Fe926A,538
+pytz/zoneinfo/Pacific/Enderbury,sha256=UvE7fVt5vGS7loKX10ibhNilliiNqwvQAXV9NRhYhgM,234
+pytz/zoneinfo/Pacific/Fakaofo,sha256=gow-SgE5r5c8J_Ag5nvJ5SUPDg6yH8pth_a-QLDcPv8,200
+pytz/zoneinfo/Pacific/Fiji,sha256=W6rxVK44zQaoLWLexVRoav16jMcuWYbNskIa5Ld9H-Q,1077
+pytz/zoneinfo/Pacific/Funafuti,sha256=P-XYwlWQpWvS3Q_TYFe37BrgxKJy5tg7PHEQNCDGv5U,166
+pytz/zoneinfo/Pacific/Galapagos,sha256=MdtlC-ffp8reICzDxsQ8tWMsTkq5ZcN-j3OyyhjokV8,238
+pytz/zoneinfo/Pacific/Gambier,sha256=z6eYF8sszLjkfpqmWnbBBAUB-ibaR5nodKaAYbvXOe0,164
+pytz/zoneinfo/Pacific/Guadalcanal,sha256=6GX-XpxcCyA64qUMdxJMFMq4sPk0ZjhexqGbryzfgjE,166
+pytz/zoneinfo/Pacific/Guam,sha256=Ex9znmf6rNfGze6gNpZJCMr1TT4rkl2SnrhecrdJufI,494
+pytz/zoneinfo/Pacific/Honolulu,sha256=fwPRv1Jk56sCOi75uZfd_Iy2k2aSQHx3B2K5xUlSPzM,329
+pytz/zoneinfo/Pacific/Johnston,sha256=fwPRv1Jk56sCOi75uZfd_Iy2k2aSQHx3B2K5xUlSPzM,329
+pytz/zoneinfo/Pacific/Kanton,sha256=UvE7fVt5vGS7loKX10ibhNilliiNqwvQAXV9NRhYhgM,234
+pytz/zoneinfo/Pacific/Kiritimati,sha256=VHR3iuwiv3tx65WtitVHCoQEg3VJd812VZ5djuSyUxc,238
+pytz/zoneinfo/Pacific/Kosrae,sha256=Vm5AKI6NvuYSz58s8922WNIiWoqPcix2JOJOix1mlSU,351
+pytz/zoneinfo/Pacific/Kwajalein,sha256=L4nH3qxv5EBKVRxYt67b9IfZfBzg5KJk19iu7x3oBMk,316
+pytz/zoneinfo/Pacific/Majuro,sha256=Dwqh7gXoz7Duwu1n7XF8yEjhM4ULEs42LSQyy7F-qzQ,310
+pytz/zoneinfo/Pacific/Marquesas,sha256=uzsjVolutGRXp_FRnvXoU0ApDEb4ZaYoz_r60D7jufg,173
+pytz/zoneinfo/Pacific/Midway,sha256=fCYrYphYY6rUfxOw712y5cyRe104AC3pouqD3bCINFg,175
+pytz/zoneinfo/Pacific/Nauru,sha256=oGxocYsqssZ_EeQHf3cUP5cg0qtqzx1BzoEjVWjE_7g,252
+pytz/zoneinfo/Pacific/Niue,sha256=Kc0BRgsu7g2QTR9e37DuqRo1sUCWDFMowAQ4wO6YNQ0,203
+pytz/zoneinfo/Pacific/Norfolk,sha256=CdEXM9SKYC9Wn7aMxD2sV5i8zE88NQo25Z_L874JthI,880
+pytz/zoneinfo/Pacific/Noumea,sha256=FSanpAOCE7WHQeiop4QErKV9ZC3Tzu2GxkH8-tIXsHY,304
+pytz/zoneinfo/Pacific/Pago_Pago,sha256=fCYrYphYY6rUfxOw712y5cyRe104AC3pouqD3bCINFg,175
+pytz/zoneinfo/Pacific/Palau,sha256=CRW__McXPlOaxo2S9kHMHaBdjv7u59ZWEwYuJConzmQ,180
+pytz/zoneinfo/Pacific/Pitcairn,sha256=O65Ed1FOCF_0rEjpYPAquDwtAF3hxyJNiujgpgZV0kc,202
+pytz/zoneinfo/Pacific/Pohnpei,sha256=YqXrKwjhUnxWyV6PFg1L6_zu84MfPW82dypf0S7pHtQ,303
+pytz/zoneinfo/Pacific/Ponape,sha256=YqXrKwjhUnxWyV6PFg1L6_zu84MfPW82dypf0S7pHtQ,303
+pytz/zoneinfo/Pacific/Port_Moresby,sha256=ei_XjmiRDLh-RU94uvz9CCIIRFH1r0X7WL-sB-6DF60,186
+pytz/zoneinfo/Pacific/Rarotonga,sha256=3ur0jiBQqU20VyKMI3bSfA-HBaQ-HhjElTqsHWk1kic,603
+pytz/zoneinfo/Pacific/Saipan,sha256=Ex9znmf6rNfGze6gNpZJCMr1TT4rkl2SnrhecrdJufI,494
+pytz/zoneinfo/Pacific/Samoa,sha256=fCYrYphYY6rUfxOw712y5cyRe104AC3pouqD3bCINFg,175
+pytz/zoneinfo/Pacific/Tahiti,sha256=9iozXRFYDhBOLijmDk2mRS4Mb-LXWW1u7n790jBNKxM,165
+pytz/zoneinfo/Pacific/Tarawa,sha256=vT6UxW7KeGptdh80Fj9ASATGmLx8Wai630lML4mwg80,166
+pytz/zoneinfo/Pacific/Tongatapu,sha256=b0TbbaYBUDEkPIpcS-EnIKCZ5KSg2HNOGIZJ9Pa8TEI,372
+pytz/zoneinfo/Pacific/Truk,sha256=6IYDKViuRDC_RVx1AJOxazVET6cZtdv_LFE6xbtGItI,269
+pytz/zoneinfo/Pacific/Wake,sha256=dTJxldgcad-kGrODwo4cAHGRSsS-K3fjeZ62WEUhmFk,166
+pytz/zoneinfo/Pacific/Wallis,sha256=CAlw1H5gkER5lkvtmHY-ppoGL3hNmYxfMaXQpI0fTOE,166
+pytz/zoneinfo/Pacific/Yap,sha256=6IYDKViuRDC_RVx1AJOxazVET6cZtdv_LFE6xbtGItI,269
+pytz/zoneinfo/Poland,sha256=TiLDPbeVF0ckgLVEkaSeDaKZ8wctdJDOl_HE_Wd5rKs,2654
+pytz/zoneinfo/Portugal,sha256=mpUpxGexMhbOBImDLSQs5-GAk7pm7tg4qYW044Kkle0,3497
+pytz/zoneinfo/ROC,sha256=DMmQwOpPql25ue3Nf8vAKKT4em06D1Z9rHbLIitxixk,761
+pytz/zoneinfo/ROK,sha256=LI9LsV3XcJC0l-KoQf8zI-y7rk-du57erS-N2Ptdi7Q,617
+pytz/zoneinfo/Singapore,sha256=hIgr_LHMTWh3GgeG-MmLHBp-9anUxQcfMlKFtX8WvmU,383
+pytz/zoneinfo/Turkey,sha256=2S0A_f7VxvyErJMMCPqK33AChA29IVkMr1o-SpMtMxk,1947
+pytz/zoneinfo/UCT,sha256=i4WEZ5GrLIpUY8g6W-PAQ-JXDXRIQ01BOYlp7Ufj5vI,114
+pytz/zoneinfo/US/Alaska,sha256=oZA1NSPS2BWdymYpnCHFO8BlYVS-ll5KLg2Ez9CbETs,2371
+pytz/zoneinfo/US/Aleutian,sha256=IB1DhwJQAKbhPJ9jHLf8zW5Dad7HIkBS-dhv64E1OlM,2356
+pytz/zoneinfo/US/Arizona,sha256=nEOwYOnGxENw9zW8m50PGxbtVfTrX3QYAo4x4LgOLfI,328
+pytz/zoneinfo/US/Central,sha256=4aZFw-svkMyXmSpNufqzK-xveos-oVJDpEyI8Yu9HQE,3576
+pytz/zoneinfo/US/East-Indiana,sha256=GrNub1_3Um5Qh67wOx58_TEAz4fwAeAlk2AlMTVA_sI,1666
+pytz/zoneinfo/US/Eastern,sha256=7AoiEGjr3wV4P7C4Qs35COZqwr2mjNDq7ocpsSPFOM8,3536
+pytz/zoneinfo/US/Hawaii,sha256=fwPRv1Jk56sCOi75uZfd_Iy2k2aSQHx3B2K5xUlSPzM,329
+pytz/zoneinfo/US/Indiana-Starke,sha256=BiALShjiOLg1o8mMRWJ1jyTlJkgvwzte7B9WSOvTUNg,2428
+pytz/zoneinfo/US/Michigan,sha256=hecz8yqY2Cj5B61G3gLZdAVZvRgK9l0P90c_gN-uD5g,2230
+pytz/zoneinfo/US/Mountain,sha256=6_yPo1_mvnt9DgpPzr0QdHsjdsfUG6ALnagQLML1DSM,2444
+pytz/zoneinfo/US/Pacific,sha256=VOy1PikdjiVdJ7lukVGzwl8uDxV_KYqznkTm5BLEiDM,2836
+pytz/zoneinfo/US/Samoa,sha256=fCYrYphYY6rUfxOw712y5cyRe104AC3pouqD3bCINFg,175
+pytz/zoneinfo/UTC,sha256=i4WEZ5GrLIpUY8g6W-PAQ-JXDXRIQ01BOYlp7Ufj5vI,114
+pytz/zoneinfo/Universal,sha256=i4WEZ5GrLIpUY8g6W-PAQ-JXDXRIQ01BOYlp7Ufj5vI,114
+pytz/zoneinfo/W-SU,sha256=KmkofRcj6T8Ph28PJChm8JVp13uRvef6TZ0GuPzUiDw,1535
+pytz/zoneinfo/WET,sha256=Sc0l03EfVs_aIi17I4KyZJFkwiAHat5BgpjuuFDhgQ0,1905
+pytz/zoneinfo/Zulu,sha256=i4WEZ5GrLIpUY8g6W-PAQ-JXDXRIQ01BOYlp7Ufj5vI,114
+pytz/zoneinfo/iso3166.tab,sha256=BMh_yY7MXp8DMEy71jarFX3IJSNpwuEyIjIo2HKUXD4,4463
+pytz/zoneinfo/leapseconds,sha256=ZtGn3SbOza--xQx-nrM848xvo1ZaDJcBVXX-LJ63rdg,3388
+pytz/zoneinfo/tzdata.zi,sha256=vF1-feptEJLN_1GnApC1paO6lKH-YQvW3a5Rnn4avnw,112647
+pytz/zoneinfo/zone.tab,sha256=S1ouKxk9vkeA_AEHFmw2kJPVuJTUzPK0YrSZLGgURK4,19419
+pytz/zoneinfo/zone1970.tab,sha256=UlmR6Qfjsqd0DkTzQExCWGTeOfbtP0ruBBfI4_MKKQI,17593
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/WHEEL b/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..ef99c6cf3283b50a273ac4c6d009a0aa85597070
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/top_level.txt b/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..af44f198c687e245aada835efbab2f75ed2c9baf
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/top_level.txt
@@ -0,0 +1 @@
+pytz
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/zip-safe b/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/zip-safe
new file mode 100644
index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pytz-2021.3.dist-info/zip-safe
@@ -0,0 +1 @@
+
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/__init__.py b/monEnvTP/lib/python3.8/site-packages/pytz/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..6ef4366b7160e63631fe9d149c144ac2b31b3cb3
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pytz/__init__.py
@@ -0,0 +1,1559 @@
+'''
+datetime.tzinfo timezone definitions generated from the
+Olson timezone database:
+
+ ftp://elsie.nci.nih.gov/pub/tz*.tar.gz
+
+See the datetime section of the Python Library Reference for information
+on how to use these modules.
+'''
+
+import sys
+import datetime
+import os.path
+
+from pytz.exceptions import AmbiguousTimeError
+from pytz.exceptions import InvalidTimeError
+from pytz.exceptions import NonExistentTimeError
+from pytz.exceptions import UnknownTimeZoneError
+from pytz.lazy import LazyDict, LazyList, LazySet # noqa
+from pytz.tzinfo import unpickler, BaseTzInfo
+from pytz.tzfile import build_tzinfo
+
+
+# The IANA (nee Olson) database is updated several times a year.
+OLSON_VERSION = '2021c'
+VERSION = '2021.3' # pip compatible version number.
+__version__ = VERSION
+
+OLSEN_VERSION = OLSON_VERSION # Old releases had this misspelling
+
+__all__ = [
+ 'timezone', 'utc', 'country_timezones', 'country_names',
+ 'AmbiguousTimeError', 'InvalidTimeError',
+ 'NonExistentTimeError', 'UnknownTimeZoneError',
+ 'all_timezones', 'all_timezones_set',
+ 'common_timezones', 'common_timezones_set',
+ 'BaseTzInfo', 'FixedOffset',
+]
+
+
+if sys.version_info[0] > 2: # Python 3.x
+
+ # Python 3.x doesn't have unicode(), making writing code
+ # for Python 2.3 and Python 3.x a pain.
+ unicode = str
+
+ def ascii(s):
+ r"""
+ >>> ascii('Hello')
+ 'Hello'
+ >>> ascii('\N{TRADE MARK SIGN}') #doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ UnicodeEncodeError: ...
+ """
+ if type(s) == bytes:
+ s = s.decode('ASCII')
+ else:
+ s.encode('ASCII') # Raise an exception if not ASCII
+ return s # But the string - not a byte string.
+
+else: # Python 2.x
+
+ def ascii(s):
+ r"""
+ >>> ascii('Hello')
+ 'Hello'
+ >>> ascii(u'Hello')
+ 'Hello'
+ >>> ascii(u'\N{TRADE MARK SIGN}') #doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ UnicodeEncodeError: ...
+ """
+ return s.encode('ASCII')
+
+
+def open_resource(name):
+ """Open a resource from the zoneinfo subdir for reading.
+
+ Uses the pkg_resources module if available and no standard file
+ found at the calculated location.
+
+ It is possible to specify different location for zoneinfo
+ subdir by using the PYTZ_TZDATADIR environment variable.
+ """
+ name_parts = name.lstrip('/').split('/')
+ for part in name_parts:
+ if part == os.path.pardir or os.path.sep in part:
+ raise ValueError('Bad path segment: %r' % part)
+ zoneinfo_dir = os.environ.get('PYTZ_TZDATADIR', None)
+ if zoneinfo_dir is not None:
+ filename = os.path.join(zoneinfo_dir, *name_parts)
+ else:
+ filename = os.path.join(os.path.dirname(__file__),
+ 'zoneinfo', *name_parts)
+ if not os.path.exists(filename):
+ # http://bugs.launchpad.net/bugs/383171 - we avoid using this
+ # unless absolutely necessary to help when a broken version of
+ # pkg_resources is installed.
+ try:
+ from pkg_resources import resource_stream
+ except ImportError:
+ resource_stream = None
+
+ if resource_stream is not None:
+ return resource_stream(__name__, 'zoneinfo/' + name)
+ return open(filename, 'rb')
+
+
+def resource_exists(name):
+ """Return true if the given resource exists"""
+ try:
+ if os.environ.get('PYTZ_SKIPEXISTSCHECK', ''):
+ # In "standard" distributions, we can assume that
+ # all the listed timezones are present. As an
+ # import-speed optimization, you can set the
+ # PYTZ_SKIPEXISTSCHECK flag to skip checking
+ # for the presence of the resource file on disk.
+ return True
+ open_resource(name).close()
+ return True
+ except IOError:
+ return False
+
+
+_tzinfo_cache = {}
+
+
+def timezone(zone):
+ r''' Return a datetime.tzinfo implementation for the given timezone
+
+ >>> from datetime import datetime, timedelta
+ >>> utc = timezone('UTC')
+ >>> eastern = timezone('US/Eastern')
+ >>> eastern.zone
+ 'US/Eastern'
+ >>> timezone(unicode('US/Eastern')) is eastern
+ True
+ >>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc)
+ >>> loc_dt = utc_dt.astimezone(eastern)
+ >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
+ >>> loc_dt.strftime(fmt)
+ '2002-10-27 01:00:00 EST (-0500)'
+ >>> (loc_dt - timedelta(minutes=10)).strftime(fmt)
+ '2002-10-27 00:50:00 EST (-0500)'
+ >>> eastern.normalize(loc_dt - timedelta(minutes=10)).strftime(fmt)
+ '2002-10-27 01:50:00 EDT (-0400)'
+ >>> (loc_dt + timedelta(minutes=10)).strftime(fmt)
+ '2002-10-27 01:10:00 EST (-0500)'
+
+ Raises UnknownTimeZoneError if passed an unknown zone.
+
+ >>> try:
+ ... timezone('Asia/Shangri-La')
+ ... except UnknownTimeZoneError:
+ ... print('Unknown')
+ Unknown
+
+ >>> try:
+ ... timezone(unicode('\N{TRADE MARK SIGN}'))
+ ... except UnknownTimeZoneError:
+ ... print('Unknown')
+ Unknown
+
+ '''
+ if zone is None:
+ raise UnknownTimeZoneError(None)
+
+ if zone.upper() == 'UTC':
+ return utc
+
+ try:
+ zone = ascii(zone)
+ except UnicodeEncodeError:
+ # All valid timezones are ASCII
+ raise UnknownTimeZoneError(zone)
+
+ zone = _case_insensitive_zone_lookup(_unmunge_zone(zone))
+ if zone not in _tzinfo_cache:
+ if zone in all_timezones_set: # noqa
+ fp = open_resource(zone)
+ try:
+ _tzinfo_cache[zone] = build_tzinfo(zone, fp)
+ finally:
+ fp.close()
+ else:
+ raise UnknownTimeZoneError(zone)
+
+ return _tzinfo_cache[zone]
+
+
+def _unmunge_zone(zone):
+ """Undo the time zone name munging done by older versions of pytz."""
+ return zone.replace('_plus_', '+').replace('_minus_', '-')
+
+
+_all_timezones_lower_to_standard = None
+
+
+def _case_insensitive_zone_lookup(zone):
+ """case-insensitively matching timezone, else return zone unchanged"""
+ global _all_timezones_lower_to_standard
+ if _all_timezones_lower_to_standard is None:
+ _all_timezones_lower_to_standard = dict((tz.lower(), tz) for tz in all_timezones) # noqa
+ return _all_timezones_lower_to_standard.get(zone.lower()) or zone # noqa
+
+
+ZERO = datetime.timedelta(0)
+HOUR = datetime.timedelta(hours=1)
+
+
+class UTC(BaseTzInfo):
+ """UTC
+
+ Optimized UTC implementation. It unpickles using the single module global
+ instance defined beneath this class declaration.
+ """
+ zone = "UTC"
+
+ _utcoffset = ZERO
+ _dst = ZERO
+ _tzname = zone
+
+ def fromutc(self, dt):
+ if dt.tzinfo is None:
+ return self.localize(dt)
+ return super(utc.__class__, self).fromutc(dt)
+
+ def utcoffset(self, dt):
+ return ZERO
+
+ def tzname(self, dt):
+ return "UTC"
+
+ def dst(self, dt):
+ return ZERO
+
+ def __reduce__(self):
+ return _UTC, ()
+
+ def localize(self, dt, is_dst=False):
+ '''Convert naive time to local time'''
+ if dt.tzinfo is not None:
+ raise ValueError('Not naive datetime (tzinfo is already set)')
+ return dt.replace(tzinfo=self)
+
+ def normalize(self, dt, is_dst=False):
+ '''Correct the timezone information on the given datetime'''
+ if dt.tzinfo is self:
+ return dt
+ if dt.tzinfo is None:
+ raise ValueError('Naive time - no tzinfo set')
+ return dt.astimezone(self)
+
+ def __repr__(self):
+ return "<UTC>"
+
+ def __str__(self):
+ return "UTC"
+
+
+UTC = utc = UTC() # UTC is a singleton
+
+
+def _UTC():
+ """Factory function for utc unpickling.
+
+ Makes sure that unpickling a utc instance always returns the same
+ module global.
+
+ These examples belong in the UTC class above, but it is obscured; or in
+ the README.rst, but we are not depending on Python 2.4 so integrating
+ the README.rst examples with the unit tests is not trivial.
+
+ >>> import datetime, pickle
+ >>> dt = datetime.datetime(2005, 3, 1, 14, 13, 21, tzinfo=utc)
+ >>> naive = dt.replace(tzinfo=None)
+ >>> p = pickle.dumps(dt, 1)
+ >>> naive_p = pickle.dumps(naive, 1)
+ >>> len(p) - len(naive_p)
+ 17
+ >>> new = pickle.loads(p)
+ >>> new == dt
+ True
+ >>> new is dt
+ False
+ >>> new.tzinfo is dt.tzinfo
+ True
+ >>> utc is UTC is timezone('UTC')
+ True
+ >>> utc is timezone('GMT')
+ False
+ """
+ return utc
+
+
+_UTC.__safe_for_unpickling__ = True
+
+
+def _p(*args):
+ """Factory function for unpickling pytz tzinfo instances.
+
+ Just a wrapper around tzinfo.unpickler to save a few bytes in each pickle
+ by shortening the path.
+ """
+ return unpickler(*args)
+
+
+_p.__safe_for_unpickling__ = True
+
+
+class _CountryTimezoneDict(LazyDict):
+ """Map ISO 3166 country code to a list of timezone names commonly used
+ in that country.
+
+ iso3166_code is the two letter code used to identify the country.
+
+ >>> def print_list(list_of_strings):
+ ... 'We use a helper so doctests work under Python 2.3 -> 3.x'
+ ... for s in list_of_strings:
+ ... print(s)
+
+ >>> print_list(country_timezones['nz'])
+ Pacific/Auckland
+ Pacific/Chatham
+ >>> print_list(country_timezones['ch'])
+ Europe/Zurich
+ >>> print_list(country_timezones['CH'])
+ Europe/Zurich
+ >>> print_list(country_timezones[unicode('ch')])
+ Europe/Zurich
+ >>> print_list(country_timezones['XXX'])
+ Traceback (most recent call last):
+ ...
+ KeyError: 'XXX'
+
+ Previously, this information was exposed as a function rather than a
+ dictionary. This is still supported::
+
+ >>> print_list(country_timezones('nz'))
+ Pacific/Auckland
+ Pacific/Chatham
+ """
+ def __call__(self, iso3166_code):
+ """Backwards compatibility."""
+ return self[iso3166_code]
+
+ def _fill(self):
+ data = {}
+ zone_tab = open_resource('zone.tab')
+ try:
+ for line in zone_tab:
+ line = line.decode('UTF-8')
+ if line.startswith('#'):
+ continue
+ code, coordinates, zone = line.split(None, 4)[:3]
+ if zone not in all_timezones_set: # noqa
+ continue
+ try:
+ data[code].append(zone)
+ except KeyError:
+ data[code] = [zone]
+ self.data = data
+ finally:
+ zone_tab.close()
+
+
+country_timezones = _CountryTimezoneDict()
+
+
+class _CountryNameDict(LazyDict):
+ '''Dictionary proving ISO3166 code -> English name.
+
+ >>> print(country_names['au'])
+ Australia
+ '''
+ def _fill(self):
+ data = {}
+ zone_tab = open_resource('iso3166.tab')
+ try:
+ for line in zone_tab.readlines():
+ line = line.decode('UTF-8')
+ if line.startswith('#'):
+ continue
+ code, name = line.split(None, 1)
+ data[code] = name.strip()
+ self.data = data
+ finally:
+ zone_tab.close()
+
+
+country_names = _CountryNameDict()
+
+
+# Time-zone info based solely on fixed offsets
+
+class _FixedOffset(datetime.tzinfo):
+
+ zone = None # to match the standard pytz API
+
+ def __init__(self, minutes):
+ if abs(minutes) >= 1440:
+ raise ValueError("absolute offset is too large", minutes)
+ self._minutes = minutes
+ self._offset = datetime.timedelta(minutes=minutes)
+
+ def utcoffset(self, dt):
+ return self._offset
+
+ def __reduce__(self):
+ return FixedOffset, (self._minutes, )
+
+ def dst(self, dt):
+ return ZERO
+
+ def tzname(self, dt):
+ return None
+
+ def __repr__(self):
+ return 'pytz.FixedOffset(%d)' % self._minutes
+
+ def localize(self, dt, is_dst=False):
+ '''Convert naive time to local time'''
+ if dt.tzinfo is not None:
+ raise ValueError('Not naive datetime (tzinfo is already set)')
+ return dt.replace(tzinfo=self)
+
+ def normalize(self, dt, is_dst=False):
+ '''Correct the timezone information on the given datetime'''
+ if dt.tzinfo is self:
+ return dt
+ if dt.tzinfo is None:
+ raise ValueError('Naive time - no tzinfo set')
+ return dt.astimezone(self)
+
+
+def FixedOffset(offset, _tzinfos={}):
+ """return a fixed-offset timezone based off a number of minutes.
+
+ >>> one = FixedOffset(-330)
+ >>> one
+ pytz.FixedOffset(-330)
+ >>> str(one.utcoffset(datetime.datetime.now()))
+ '-1 day, 18:30:00'
+ >>> str(one.dst(datetime.datetime.now()))
+ '0:00:00'
+
+ >>> two = FixedOffset(1380)
+ >>> two
+ pytz.FixedOffset(1380)
+ >>> str(two.utcoffset(datetime.datetime.now()))
+ '23:00:00'
+ >>> str(two.dst(datetime.datetime.now()))
+ '0:00:00'
+
+ The datetime.timedelta must be between the range of -1 and 1 day,
+ non-inclusive.
+
+ >>> FixedOffset(1440)
+ Traceback (most recent call last):
+ ...
+ ValueError: ('absolute offset is too large', 1440)
+
+ >>> FixedOffset(-1440)
+ Traceback (most recent call last):
+ ...
+ ValueError: ('absolute offset is too large', -1440)
+
+ An offset of 0 is special-cased to return UTC.
+
+ >>> FixedOffset(0) is UTC
+ True
+
+ There should always be only one instance of a FixedOffset per timedelta.
+ This should be true for multiple creation calls.
+
+ >>> FixedOffset(-330) is one
+ True
+ >>> FixedOffset(1380) is two
+ True
+
+ It should also be true for pickling.
+
+ >>> import pickle
+ >>> pickle.loads(pickle.dumps(one)) is one
+ True
+ >>> pickle.loads(pickle.dumps(two)) is two
+ True
+ """
+ if offset == 0:
+ return UTC
+
+ info = _tzinfos.get(offset)
+ if info is None:
+ # We haven't seen this one before. we need to save it.
+
+ # Use setdefault to avoid a race condition and make sure we have
+ # only one
+ info = _tzinfos.setdefault(offset, _FixedOffset(offset))
+
+ return info
+
+
+FixedOffset.__safe_for_unpickling__ = True
+
+
+def _test():
+ import doctest
+ sys.path.insert(0, os.pardir)
+ import pytz
+ return doctest.testmod(pytz)
+
+
+if __name__ == '__main__':
+ _test()
+all_timezones = \
+['Africa/Abidjan',
+ 'Africa/Accra',
+ 'Africa/Addis_Ababa',
+ 'Africa/Algiers',
+ 'Africa/Asmara',
+ 'Africa/Asmera',
+ 'Africa/Bamako',
+ 'Africa/Bangui',
+ 'Africa/Banjul',
+ 'Africa/Bissau',
+ 'Africa/Blantyre',
+ 'Africa/Brazzaville',
+ 'Africa/Bujumbura',
+ 'Africa/Cairo',
+ 'Africa/Casablanca',
+ 'Africa/Ceuta',
+ 'Africa/Conakry',
+ 'Africa/Dakar',
+ 'Africa/Dar_es_Salaam',
+ 'Africa/Djibouti',
+ 'Africa/Douala',
+ 'Africa/El_Aaiun',
+ 'Africa/Freetown',
+ 'Africa/Gaborone',
+ 'Africa/Harare',
+ 'Africa/Johannesburg',
+ 'Africa/Juba',
+ 'Africa/Kampala',
+ 'Africa/Khartoum',
+ 'Africa/Kigali',
+ 'Africa/Kinshasa',
+ 'Africa/Lagos',
+ 'Africa/Libreville',
+ 'Africa/Lome',
+ 'Africa/Luanda',
+ 'Africa/Lubumbashi',
+ 'Africa/Lusaka',
+ 'Africa/Malabo',
+ 'Africa/Maputo',
+ 'Africa/Maseru',
+ 'Africa/Mbabane',
+ 'Africa/Mogadishu',
+ 'Africa/Monrovia',
+ 'Africa/Nairobi',
+ 'Africa/Ndjamena',
+ 'Africa/Niamey',
+ 'Africa/Nouakchott',
+ 'Africa/Ouagadougou',
+ 'Africa/Porto-Novo',
+ 'Africa/Sao_Tome',
+ 'Africa/Timbuktu',
+ 'Africa/Tripoli',
+ 'Africa/Tunis',
+ 'Africa/Windhoek',
+ 'America/Adak',
+ 'America/Anchorage',
+ 'America/Anguilla',
+ 'America/Antigua',
+ 'America/Araguaina',
+ 'America/Argentina/Buenos_Aires',
+ 'America/Argentina/Catamarca',
+ 'America/Argentina/ComodRivadavia',
+ 'America/Argentina/Cordoba',
+ 'America/Argentina/Jujuy',
+ 'America/Argentina/La_Rioja',
+ 'America/Argentina/Mendoza',
+ 'America/Argentina/Rio_Gallegos',
+ 'America/Argentina/Salta',
+ 'America/Argentina/San_Juan',
+ 'America/Argentina/San_Luis',
+ 'America/Argentina/Tucuman',
+ 'America/Argentina/Ushuaia',
+ 'America/Aruba',
+ 'America/Asuncion',
+ 'America/Atikokan',
+ 'America/Atka',
+ 'America/Bahia',
+ 'America/Bahia_Banderas',
+ 'America/Barbados',
+ 'America/Belem',
+ 'America/Belize',
+ 'America/Blanc-Sablon',
+ 'America/Boa_Vista',
+ 'America/Bogota',
+ 'America/Boise',
+ 'America/Buenos_Aires',
+ 'America/Cambridge_Bay',
+ 'America/Campo_Grande',
+ 'America/Cancun',
+ 'America/Caracas',
+ 'America/Catamarca',
+ 'America/Cayenne',
+ 'America/Cayman',
+ 'America/Chicago',
+ 'America/Chihuahua',
+ 'America/Coral_Harbour',
+ 'America/Cordoba',
+ 'America/Costa_Rica',
+ 'America/Creston',
+ 'America/Cuiaba',
+ 'America/Curacao',
+ 'America/Danmarkshavn',
+ 'America/Dawson',
+ 'America/Dawson_Creek',
+ 'America/Denver',
+ 'America/Detroit',
+ 'America/Dominica',
+ 'America/Edmonton',
+ 'America/Eirunepe',
+ 'America/El_Salvador',
+ 'America/Ensenada',
+ 'America/Fort_Nelson',
+ 'America/Fort_Wayne',
+ 'America/Fortaleza',
+ 'America/Glace_Bay',
+ 'America/Godthab',
+ 'America/Goose_Bay',
+ 'America/Grand_Turk',
+ 'America/Grenada',
+ 'America/Guadeloupe',
+ 'America/Guatemala',
+ 'America/Guayaquil',
+ 'America/Guyana',
+ 'America/Halifax',
+ 'America/Havana',
+ 'America/Hermosillo',
+ 'America/Indiana/Indianapolis',
+ 'America/Indiana/Knox',
+ 'America/Indiana/Marengo',
+ 'America/Indiana/Petersburg',
+ 'America/Indiana/Tell_City',
+ 'America/Indiana/Vevay',
+ 'America/Indiana/Vincennes',
+ 'America/Indiana/Winamac',
+ 'America/Indianapolis',
+ 'America/Inuvik',
+ 'America/Iqaluit',
+ 'America/Jamaica',
+ 'America/Jujuy',
+ 'America/Juneau',
+ 'America/Kentucky/Louisville',
+ 'America/Kentucky/Monticello',
+ 'America/Knox_IN',
+ 'America/Kralendijk',
+ 'America/La_Paz',
+ 'America/Lima',
+ 'America/Los_Angeles',
+ 'America/Louisville',
+ 'America/Lower_Princes',
+ 'America/Maceio',
+ 'America/Managua',
+ 'America/Manaus',
+ 'America/Marigot',
+ 'America/Martinique',
+ 'America/Matamoros',
+ 'America/Mazatlan',
+ 'America/Mendoza',
+ 'America/Menominee',
+ 'America/Merida',
+ 'America/Metlakatla',
+ 'America/Mexico_City',
+ 'America/Miquelon',
+ 'America/Moncton',
+ 'America/Monterrey',
+ 'America/Montevideo',
+ 'America/Montreal',
+ 'America/Montserrat',
+ 'America/Nassau',
+ 'America/New_York',
+ 'America/Nipigon',
+ 'America/Nome',
+ 'America/Noronha',
+ 'America/North_Dakota/Beulah',
+ 'America/North_Dakota/Center',
+ 'America/North_Dakota/New_Salem',
+ 'America/Nuuk',
+ 'America/Ojinaga',
+ 'America/Panama',
+ 'America/Pangnirtung',
+ 'America/Paramaribo',
+ 'America/Phoenix',
+ 'America/Port-au-Prince',
+ 'America/Port_of_Spain',
+ 'America/Porto_Acre',
+ 'America/Porto_Velho',
+ 'America/Puerto_Rico',
+ 'America/Punta_Arenas',
+ 'America/Rainy_River',
+ 'America/Rankin_Inlet',
+ 'America/Recife',
+ 'America/Regina',
+ 'America/Resolute',
+ 'America/Rio_Branco',
+ 'America/Rosario',
+ 'America/Santa_Isabel',
+ 'America/Santarem',
+ 'America/Santiago',
+ 'America/Santo_Domingo',
+ 'America/Sao_Paulo',
+ 'America/Scoresbysund',
+ 'America/Shiprock',
+ 'America/Sitka',
+ 'America/St_Barthelemy',
+ 'America/St_Johns',
+ 'America/St_Kitts',
+ 'America/St_Lucia',
+ 'America/St_Thomas',
+ 'America/St_Vincent',
+ 'America/Swift_Current',
+ 'America/Tegucigalpa',
+ 'America/Thule',
+ 'America/Thunder_Bay',
+ 'America/Tijuana',
+ 'America/Toronto',
+ 'America/Tortola',
+ 'America/Vancouver',
+ 'America/Virgin',
+ 'America/Whitehorse',
+ 'America/Winnipeg',
+ 'America/Yakutat',
+ 'America/Yellowknife',
+ 'Antarctica/Casey',
+ 'Antarctica/Davis',
+ 'Antarctica/DumontDUrville',
+ 'Antarctica/Macquarie',
+ 'Antarctica/Mawson',
+ 'Antarctica/McMurdo',
+ 'Antarctica/Palmer',
+ 'Antarctica/Rothera',
+ 'Antarctica/South_Pole',
+ 'Antarctica/Syowa',
+ 'Antarctica/Troll',
+ 'Antarctica/Vostok',
+ 'Arctic/Longyearbyen',
+ 'Asia/Aden',
+ 'Asia/Almaty',
+ 'Asia/Amman',
+ 'Asia/Anadyr',
+ 'Asia/Aqtau',
+ 'Asia/Aqtobe',
+ 'Asia/Ashgabat',
+ 'Asia/Ashkhabad',
+ 'Asia/Atyrau',
+ 'Asia/Baghdad',
+ 'Asia/Bahrain',
+ 'Asia/Baku',
+ 'Asia/Bangkok',
+ 'Asia/Barnaul',
+ 'Asia/Beirut',
+ 'Asia/Bishkek',
+ 'Asia/Brunei',
+ 'Asia/Calcutta',
+ 'Asia/Chita',
+ 'Asia/Choibalsan',
+ 'Asia/Chongqing',
+ 'Asia/Chungking',
+ 'Asia/Colombo',
+ 'Asia/Dacca',
+ 'Asia/Damascus',
+ 'Asia/Dhaka',
+ 'Asia/Dili',
+ 'Asia/Dubai',
+ 'Asia/Dushanbe',
+ 'Asia/Famagusta',
+ 'Asia/Gaza',
+ 'Asia/Harbin',
+ 'Asia/Hebron',
+ 'Asia/Ho_Chi_Minh',
+ 'Asia/Hong_Kong',
+ 'Asia/Hovd',
+ 'Asia/Irkutsk',
+ 'Asia/Istanbul',
+ 'Asia/Jakarta',
+ 'Asia/Jayapura',
+ 'Asia/Jerusalem',
+ 'Asia/Kabul',
+ 'Asia/Kamchatka',
+ 'Asia/Karachi',
+ 'Asia/Kashgar',
+ 'Asia/Kathmandu',
+ 'Asia/Katmandu',
+ 'Asia/Khandyga',
+ 'Asia/Kolkata',
+ 'Asia/Krasnoyarsk',
+ 'Asia/Kuala_Lumpur',
+ 'Asia/Kuching',
+ 'Asia/Kuwait',
+ 'Asia/Macao',
+ 'Asia/Macau',
+ 'Asia/Magadan',
+ 'Asia/Makassar',
+ 'Asia/Manila',
+ 'Asia/Muscat',
+ 'Asia/Nicosia',
+ 'Asia/Novokuznetsk',
+ 'Asia/Novosibirsk',
+ 'Asia/Omsk',
+ 'Asia/Oral',
+ 'Asia/Phnom_Penh',
+ 'Asia/Pontianak',
+ 'Asia/Pyongyang',
+ 'Asia/Qatar',
+ 'Asia/Qostanay',
+ 'Asia/Qyzylorda',
+ 'Asia/Rangoon',
+ 'Asia/Riyadh',
+ 'Asia/Saigon',
+ 'Asia/Sakhalin',
+ 'Asia/Samarkand',
+ 'Asia/Seoul',
+ 'Asia/Shanghai',
+ 'Asia/Singapore',
+ 'Asia/Srednekolymsk',
+ 'Asia/Taipei',
+ 'Asia/Tashkent',
+ 'Asia/Tbilisi',
+ 'Asia/Tehran',
+ 'Asia/Tel_Aviv',
+ 'Asia/Thimbu',
+ 'Asia/Thimphu',
+ 'Asia/Tokyo',
+ 'Asia/Tomsk',
+ 'Asia/Ujung_Pandang',
+ 'Asia/Ulaanbaatar',
+ 'Asia/Ulan_Bator',
+ 'Asia/Urumqi',
+ 'Asia/Ust-Nera',
+ 'Asia/Vientiane',
+ 'Asia/Vladivostok',
+ 'Asia/Yakutsk',
+ 'Asia/Yangon',
+ 'Asia/Yekaterinburg',
+ 'Asia/Yerevan',
+ 'Atlantic/Azores',
+ 'Atlantic/Bermuda',
+ 'Atlantic/Canary',
+ 'Atlantic/Cape_Verde',
+ 'Atlantic/Faeroe',
+ 'Atlantic/Faroe',
+ 'Atlantic/Jan_Mayen',
+ 'Atlantic/Madeira',
+ 'Atlantic/Reykjavik',
+ 'Atlantic/South_Georgia',
+ 'Atlantic/St_Helena',
+ 'Atlantic/Stanley',
+ 'Australia/ACT',
+ 'Australia/Adelaide',
+ 'Australia/Brisbane',
+ 'Australia/Broken_Hill',
+ 'Australia/Canberra',
+ 'Australia/Currie',
+ 'Australia/Darwin',
+ 'Australia/Eucla',
+ 'Australia/Hobart',
+ 'Australia/LHI',
+ 'Australia/Lindeman',
+ 'Australia/Lord_Howe',
+ 'Australia/Melbourne',
+ 'Australia/NSW',
+ 'Australia/North',
+ 'Australia/Perth',
+ 'Australia/Queensland',
+ 'Australia/South',
+ 'Australia/Sydney',
+ 'Australia/Tasmania',
+ 'Australia/Victoria',
+ 'Australia/West',
+ 'Australia/Yancowinna',
+ 'Brazil/Acre',
+ 'Brazil/DeNoronha',
+ 'Brazil/East',
+ 'Brazil/West',
+ 'CET',
+ 'CST6CDT',
+ 'Canada/Atlantic',
+ 'Canada/Central',
+ 'Canada/Eastern',
+ 'Canada/Mountain',
+ 'Canada/Newfoundland',
+ 'Canada/Pacific',
+ 'Canada/Saskatchewan',
+ 'Canada/Yukon',
+ 'Chile/Continental',
+ 'Chile/EasterIsland',
+ 'Cuba',
+ 'EET',
+ 'EST',
+ 'EST5EDT',
+ 'Egypt',
+ 'Eire',
+ 'Etc/GMT',
+ 'Etc/GMT+0',
+ 'Etc/GMT+1',
+ 'Etc/GMT+10',
+ 'Etc/GMT+11',
+ 'Etc/GMT+12',
+ 'Etc/GMT+2',
+ 'Etc/GMT+3',
+ 'Etc/GMT+4',
+ 'Etc/GMT+5',
+ 'Etc/GMT+6',
+ 'Etc/GMT+7',
+ 'Etc/GMT+8',
+ 'Etc/GMT+9',
+ 'Etc/GMT-0',
+ 'Etc/GMT-1',
+ 'Etc/GMT-10',
+ 'Etc/GMT-11',
+ 'Etc/GMT-12',
+ 'Etc/GMT-13',
+ 'Etc/GMT-14',
+ 'Etc/GMT-2',
+ 'Etc/GMT-3',
+ 'Etc/GMT-4',
+ 'Etc/GMT-5',
+ 'Etc/GMT-6',
+ 'Etc/GMT-7',
+ 'Etc/GMT-8',
+ 'Etc/GMT-9',
+ 'Etc/GMT0',
+ 'Etc/Greenwich',
+ 'Etc/UCT',
+ 'Etc/UTC',
+ 'Etc/Universal',
+ 'Etc/Zulu',
+ 'Europe/Amsterdam',
+ 'Europe/Andorra',
+ 'Europe/Astrakhan',
+ 'Europe/Athens',
+ 'Europe/Belfast',
+ 'Europe/Belgrade',
+ 'Europe/Berlin',
+ 'Europe/Bratislava',
+ 'Europe/Brussels',
+ 'Europe/Bucharest',
+ 'Europe/Budapest',
+ 'Europe/Busingen',
+ 'Europe/Chisinau',
+ 'Europe/Copenhagen',
+ 'Europe/Dublin',
+ 'Europe/Gibraltar',
+ 'Europe/Guernsey',
+ 'Europe/Helsinki',
+ 'Europe/Isle_of_Man',
+ 'Europe/Istanbul',
+ 'Europe/Jersey',
+ 'Europe/Kaliningrad',
+ 'Europe/Kiev',
+ 'Europe/Kirov',
+ 'Europe/Lisbon',
+ 'Europe/Ljubljana',
+ 'Europe/London',
+ 'Europe/Luxembourg',
+ 'Europe/Madrid',
+ 'Europe/Malta',
+ 'Europe/Mariehamn',
+ 'Europe/Minsk',
+ 'Europe/Monaco',
+ 'Europe/Moscow',
+ 'Europe/Nicosia',
+ 'Europe/Oslo',
+ 'Europe/Paris',
+ 'Europe/Podgorica',
+ 'Europe/Prague',
+ 'Europe/Riga',
+ 'Europe/Rome',
+ 'Europe/Samara',
+ 'Europe/San_Marino',
+ 'Europe/Sarajevo',
+ 'Europe/Saratov',
+ 'Europe/Simferopol',
+ 'Europe/Skopje',
+ 'Europe/Sofia',
+ 'Europe/Stockholm',
+ 'Europe/Tallinn',
+ 'Europe/Tirane',
+ 'Europe/Tiraspol',
+ 'Europe/Ulyanovsk',
+ 'Europe/Uzhgorod',
+ 'Europe/Vaduz',
+ 'Europe/Vatican',
+ 'Europe/Vienna',
+ 'Europe/Vilnius',
+ 'Europe/Volgograd',
+ 'Europe/Warsaw',
+ 'Europe/Zagreb',
+ 'Europe/Zaporozhye',
+ 'Europe/Zurich',
+ 'GB',
+ 'GB-Eire',
+ 'GMT',
+ 'GMT+0',
+ 'GMT-0',
+ 'GMT0',
+ 'Greenwich',
+ 'HST',
+ 'Hongkong',
+ 'Iceland',
+ 'Indian/Antananarivo',
+ 'Indian/Chagos',
+ 'Indian/Christmas',
+ 'Indian/Cocos',
+ 'Indian/Comoro',
+ 'Indian/Kerguelen',
+ 'Indian/Mahe',
+ 'Indian/Maldives',
+ 'Indian/Mauritius',
+ 'Indian/Mayotte',
+ 'Indian/Reunion',
+ 'Iran',
+ 'Israel',
+ 'Jamaica',
+ 'Japan',
+ 'Kwajalein',
+ 'Libya',
+ 'MET',
+ 'MST',
+ 'MST7MDT',
+ 'Mexico/BajaNorte',
+ 'Mexico/BajaSur',
+ 'Mexico/General',
+ 'NZ',
+ 'NZ-CHAT',
+ 'Navajo',
+ 'PRC',
+ 'PST8PDT',
+ 'Pacific/Apia',
+ 'Pacific/Auckland',
+ 'Pacific/Bougainville',
+ 'Pacific/Chatham',
+ 'Pacific/Chuuk',
+ 'Pacific/Easter',
+ 'Pacific/Efate',
+ 'Pacific/Enderbury',
+ 'Pacific/Fakaofo',
+ 'Pacific/Fiji',
+ 'Pacific/Funafuti',
+ 'Pacific/Galapagos',
+ 'Pacific/Gambier',
+ 'Pacific/Guadalcanal',
+ 'Pacific/Guam',
+ 'Pacific/Honolulu',
+ 'Pacific/Johnston',
+ 'Pacific/Kanton',
+ 'Pacific/Kiritimati',
+ 'Pacific/Kosrae',
+ 'Pacific/Kwajalein',
+ 'Pacific/Majuro',
+ 'Pacific/Marquesas',
+ 'Pacific/Midway',
+ 'Pacific/Nauru',
+ 'Pacific/Niue',
+ 'Pacific/Norfolk',
+ 'Pacific/Noumea',
+ 'Pacific/Pago_Pago',
+ 'Pacific/Palau',
+ 'Pacific/Pitcairn',
+ 'Pacific/Pohnpei',
+ 'Pacific/Ponape',
+ 'Pacific/Port_Moresby',
+ 'Pacific/Rarotonga',
+ 'Pacific/Saipan',
+ 'Pacific/Samoa',
+ 'Pacific/Tahiti',
+ 'Pacific/Tarawa',
+ 'Pacific/Tongatapu',
+ 'Pacific/Truk',
+ 'Pacific/Wake',
+ 'Pacific/Wallis',
+ 'Pacific/Yap',
+ 'Poland',
+ 'Portugal',
+ 'ROC',
+ 'ROK',
+ 'Singapore',
+ 'Turkey',
+ 'UCT',
+ 'US/Alaska',
+ 'US/Aleutian',
+ 'US/Arizona',
+ 'US/Central',
+ 'US/East-Indiana',
+ 'US/Eastern',
+ 'US/Hawaii',
+ 'US/Indiana-Starke',
+ 'US/Michigan',
+ 'US/Mountain',
+ 'US/Pacific',
+ 'US/Samoa',
+ 'UTC',
+ 'Universal',
+ 'W-SU',
+ 'WET',
+ 'Zulu']
+all_timezones = LazyList(
+ tz for tz in all_timezones if resource_exists(tz))
+
+all_timezones_set = LazySet(all_timezones)
+common_timezones = \
+['Africa/Abidjan',
+ 'Africa/Accra',
+ 'Africa/Addis_Ababa',
+ 'Africa/Algiers',
+ 'Africa/Asmara',
+ 'Africa/Bamako',
+ 'Africa/Bangui',
+ 'Africa/Banjul',
+ 'Africa/Bissau',
+ 'Africa/Blantyre',
+ 'Africa/Brazzaville',
+ 'Africa/Bujumbura',
+ 'Africa/Cairo',
+ 'Africa/Casablanca',
+ 'Africa/Ceuta',
+ 'Africa/Conakry',
+ 'Africa/Dakar',
+ 'Africa/Dar_es_Salaam',
+ 'Africa/Djibouti',
+ 'Africa/Douala',
+ 'Africa/El_Aaiun',
+ 'Africa/Freetown',
+ 'Africa/Gaborone',
+ 'Africa/Harare',
+ 'Africa/Johannesburg',
+ 'Africa/Juba',
+ 'Africa/Kampala',
+ 'Africa/Khartoum',
+ 'Africa/Kigali',
+ 'Africa/Kinshasa',
+ 'Africa/Lagos',
+ 'Africa/Libreville',
+ 'Africa/Lome',
+ 'Africa/Luanda',
+ 'Africa/Lubumbashi',
+ 'Africa/Lusaka',
+ 'Africa/Malabo',
+ 'Africa/Maputo',
+ 'Africa/Maseru',
+ 'Africa/Mbabane',
+ 'Africa/Mogadishu',
+ 'Africa/Monrovia',
+ 'Africa/Nairobi',
+ 'Africa/Ndjamena',
+ 'Africa/Niamey',
+ 'Africa/Nouakchott',
+ 'Africa/Ouagadougou',
+ 'Africa/Porto-Novo',
+ 'Africa/Sao_Tome',
+ 'Africa/Tripoli',
+ 'Africa/Tunis',
+ 'Africa/Windhoek',
+ 'America/Adak',
+ 'America/Anchorage',
+ 'America/Anguilla',
+ 'America/Antigua',
+ 'America/Araguaina',
+ 'America/Argentina/Buenos_Aires',
+ 'America/Argentina/Catamarca',
+ 'America/Argentina/Cordoba',
+ 'America/Argentina/Jujuy',
+ 'America/Argentina/La_Rioja',
+ 'America/Argentina/Mendoza',
+ 'America/Argentina/Rio_Gallegos',
+ 'America/Argentina/Salta',
+ 'America/Argentina/San_Juan',
+ 'America/Argentina/San_Luis',
+ 'America/Argentina/Tucuman',
+ 'America/Argentina/Ushuaia',
+ 'America/Aruba',
+ 'America/Asuncion',
+ 'America/Atikokan',
+ 'America/Bahia',
+ 'America/Bahia_Banderas',
+ 'America/Barbados',
+ 'America/Belem',
+ 'America/Belize',
+ 'America/Blanc-Sablon',
+ 'America/Boa_Vista',
+ 'America/Bogota',
+ 'America/Boise',
+ 'America/Cambridge_Bay',
+ 'America/Campo_Grande',
+ 'America/Cancun',
+ 'America/Caracas',
+ 'America/Cayenne',
+ 'America/Cayman',
+ 'America/Chicago',
+ 'America/Chihuahua',
+ 'America/Costa_Rica',
+ 'America/Creston',
+ 'America/Cuiaba',
+ 'America/Curacao',
+ 'America/Danmarkshavn',
+ 'America/Dawson',
+ 'America/Dawson_Creek',
+ 'America/Denver',
+ 'America/Detroit',
+ 'America/Dominica',
+ 'America/Edmonton',
+ 'America/Eirunepe',
+ 'America/El_Salvador',
+ 'America/Fort_Nelson',
+ 'America/Fortaleza',
+ 'America/Glace_Bay',
+ 'America/Goose_Bay',
+ 'America/Grand_Turk',
+ 'America/Grenada',
+ 'America/Guadeloupe',
+ 'America/Guatemala',
+ 'America/Guayaquil',
+ 'America/Guyana',
+ 'America/Halifax',
+ 'America/Havana',
+ 'America/Hermosillo',
+ 'America/Indiana/Indianapolis',
+ 'America/Indiana/Knox',
+ 'America/Indiana/Marengo',
+ 'America/Indiana/Petersburg',
+ 'America/Indiana/Tell_City',
+ 'America/Indiana/Vevay',
+ 'America/Indiana/Vincennes',
+ 'America/Indiana/Winamac',
+ 'America/Inuvik',
+ 'America/Iqaluit',
+ 'America/Jamaica',
+ 'America/Juneau',
+ 'America/Kentucky/Louisville',
+ 'America/Kentucky/Monticello',
+ 'America/Kralendijk',
+ 'America/La_Paz',
+ 'America/Lima',
+ 'America/Los_Angeles',
+ 'America/Lower_Princes',
+ 'America/Maceio',
+ 'America/Managua',
+ 'America/Manaus',
+ 'America/Marigot',
+ 'America/Martinique',
+ 'America/Matamoros',
+ 'America/Mazatlan',
+ 'America/Menominee',
+ 'America/Merida',
+ 'America/Metlakatla',
+ 'America/Mexico_City',
+ 'America/Miquelon',
+ 'America/Moncton',
+ 'America/Monterrey',
+ 'America/Montevideo',
+ 'America/Montserrat',
+ 'America/Nassau',
+ 'America/New_York',
+ 'America/Nipigon',
+ 'America/Nome',
+ 'America/Noronha',
+ 'America/North_Dakota/Beulah',
+ 'America/North_Dakota/Center',
+ 'America/North_Dakota/New_Salem',
+ 'America/Nuuk',
+ 'America/Ojinaga',
+ 'America/Panama',
+ 'America/Pangnirtung',
+ 'America/Paramaribo',
+ 'America/Phoenix',
+ 'America/Port-au-Prince',
+ 'America/Port_of_Spain',
+ 'America/Porto_Velho',
+ 'America/Puerto_Rico',
+ 'America/Punta_Arenas',
+ 'America/Rainy_River',
+ 'America/Rankin_Inlet',
+ 'America/Recife',
+ 'America/Regina',
+ 'America/Resolute',
+ 'America/Rio_Branco',
+ 'America/Santarem',
+ 'America/Santiago',
+ 'America/Santo_Domingo',
+ 'America/Sao_Paulo',
+ 'America/Scoresbysund',
+ 'America/Sitka',
+ 'America/St_Barthelemy',
+ 'America/St_Johns',
+ 'America/St_Kitts',
+ 'America/St_Lucia',
+ 'America/St_Thomas',
+ 'America/St_Vincent',
+ 'America/Swift_Current',
+ 'America/Tegucigalpa',
+ 'America/Thule',
+ 'America/Thunder_Bay',
+ 'America/Tijuana',
+ 'America/Toronto',
+ 'America/Tortola',
+ 'America/Vancouver',
+ 'America/Whitehorse',
+ 'America/Winnipeg',
+ 'America/Yakutat',
+ 'America/Yellowknife',
+ 'Antarctica/Casey',
+ 'Antarctica/Davis',
+ 'Antarctica/DumontDUrville',
+ 'Antarctica/Macquarie',
+ 'Antarctica/Mawson',
+ 'Antarctica/McMurdo',
+ 'Antarctica/Palmer',
+ 'Antarctica/Rothera',
+ 'Antarctica/Syowa',
+ 'Antarctica/Troll',
+ 'Antarctica/Vostok',
+ 'Arctic/Longyearbyen',
+ 'Asia/Aden',
+ 'Asia/Almaty',
+ 'Asia/Amman',
+ 'Asia/Anadyr',
+ 'Asia/Aqtau',
+ 'Asia/Aqtobe',
+ 'Asia/Ashgabat',
+ 'Asia/Atyrau',
+ 'Asia/Baghdad',
+ 'Asia/Bahrain',
+ 'Asia/Baku',
+ 'Asia/Bangkok',
+ 'Asia/Barnaul',
+ 'Asia/Beirut',
+ 'Asia/Bishkek',
+ 'Asia/Brunei',
+ 'Asia/Chita',
+ 'Asia/Choibalsan',
+ 'Asia/Colombo',
+ 'Asia/Damascus',
+ 'Asia/Dhaka',
+ 'Asia/Dili',
+ 'Asia/Dubai',
+ 'Asia/Dushanbe',
+ 'Asia/Famagusta',
+ 'Asia/Gaza',
+ 'Asia/Hebron',
+ 'Asia/Ho_Chi_Minh',
+ 'Asia/Hong_Kong',
+ 'Asia/Hovd',
+ 'Asia/Irkutsk',
+ 'Asia/Jakarta',
+ 'Asia/Jayapura',
+ 'Asia/Jerusalem',
+ 'Asia/Kabul',
+ 'Asia/Kamchatka',
+ 'Asia/Karachi',
+ 'Asia/Kathmandu',
+ 'Asia/Khandyga',
+ 'Asia/Kolkata',
+ 'Asia/Krasnoyarsk',
+ 'Asia/Kuala_Lumpur',
+ 'Asia/Kuching',
+ 'Asia/Kuwait',
+ 'Asia/Macau',
+ 'Asia/Magadan',
+ 'Asia/Makassar',
+ 'Asia/Manila',
+ 'Asia/Muscat',
+ 'Asia/Nicosia',
+ 'Asia/Novokuznetsk',
+ 'Asia/Novosibirsk',
+ 'Asia/Omsk',
+ 'Asia/Oral',
+ 'Asia/Phnom_Penh',
+ 'Asia/Pontianak',
+ 'Asia/Pyongyang',
+ 'Asia/Qatar',
+ 'Asia/Qostanay',
+ 'Asia/Qyzylorda',
+ 'Asia/Riyadh',
+ 'Asia/Sakhalin',
+ 'Asia/Samarkand',
+ 'Asia/Seoul',
+ 'Asia/Shanghai',
+ 'Asia/Singapore',
+ 'Asia/Srednekolymsk',
+ 'Asia/Taipei',
+ 'Asia/Tashkent',
+ 'Asia/Tbilisi',
+ 'Asia/Tehran',
+ 'Asia/Thimphu',
+ 'Asia/Tokyo',
+ 'Asia/Tomsk',
+ 'Asia/Ulaanbaatar',
+ 'Asia/Urumqi',
+ 'Asia/Ust-Nera',
+ 'Asia/Vientiane',
+ 'Asia/Vladivostok',
+ 'Asia/Yakutsk',
+ 'Asia/Yangon',
+ 'Asia/Yekaterinburg',
+ 'Asia/Yerevan',
+ 'Atlantic/Azores',
+ 'Atlantic/Bermuda',
+ 'Atlantic/Canary',
+ 'Atlantic/Cape_Verde',
+ 'Atlantic/Faroe',
+ 'Atlantic/Madeira',
+ 'Atlantic/Reykjavik',
+ 'Atlantic/South_Georgia',
+ 'Atlantic/St_Helena',
+ 'Atlantic/Stanley',
+ 'Australia/Adelaide',
+ 'Australia/Brisbane',
+ 'Australia/Broken_Hill',
+ 'Australia/Darwin',
+ 'Australia/Eucla',
+ 'Australia/Hobart',
+ 'Australia/Lindeman',
+ 'Australia/Lord_Howe',
+ 'Australia/Melbourne',
+ 'Australia/Perth',
+ 'Australia/Sydney',
+ 'Canada/Atlantic',
+ 'Canada/Central',
+ 'Canada/Eastern',
+ 'Canada/Mountain',
+ 'Canada/Newfoundland',
+ 'Canada/Pacific',
+ 'Europe/Amsterdam',
+ 'Europe/Andorra',
+ 'Europe/Astrakhan',
+ 'Europe/Athens',
+ 'Europe/Belgrade',
+ 'Europe/Berlin',
+ 'Europe/Bratislava',
+ 'Europe/Brussels',
+ 'Europe/Bucharest',
+ 'Europe/Budapest',
+ 'Europe/Busingen',
+ 'Europe/Chisinau',
+ 'Europe/Copenhagen',
+ 'Europe/Dublin',
+ 'Europe/Gibraltar',
+ 'Europe/Guernsey',
+ 'Europe/Helsinki',
+ 'Europe/Isle_of_Man',
+ 'Europe/Istanbul',
+ 'Europe/Jersey',
+ 'Europe/Kaliningrad',
+ 'Europe/Kiev',
+ 'Europe/Kirov',
+ 'Europe/Lisbon',
+ 'Europe/Ljubljana',
+ 'Europe/London',
+ 'Europe/Luxembourg',
+ 'Europe/Madrid',
+ 'Europe/Malta',
+ 'Europe/Mariehamn',
+ 'Europe/Minsk',
+ 'Europe/Monaco',
+ 'Europe/Moscow',
+ 'Europe/Oslo',
+ 'Europe/Paris',
+ 'Europe/Podgorica',
+ 'Europe/Prague',
+ 'Europe/Riga',
+ 'Europe/Rome',
+ 'Europe/Samara',
+ 'Europe/San_Marino',
+ 'Europe/Sarajevo',
+ 'Europe/Saratov',
+ 'Europe/Simferopol',
+ 'Europe/Skopje',
+ 'Europe/Sofia',
+ 'Europe/Stockholm',
+ 'Europe/Tallinn',
+ 'Europe/Tirane',
+ 'Europe/Ulyanovsk',
+ 'Europe/Uzhgorod',
+ 'Europe/Vaduz',
+ 'Europe/Vatican',
+ 'Europe/Vienna',
+ 'Europe/Vilnius',
+ 'Europe/Volgograd',
+ 'Europe/Warsaw',
+ 'Europe/Zagreb',
+ 'Europe/Zaporozhye',
+ 'Europe/Zurich',
+ 'GMT',
+ 'Indian/Antananarivo',
+ 'Indian/Chagos',
+ 'Indian/Christmas',
+ 'Indian/Cocos',
+ 'Indian/Comoro',
+ 'Indian/Kerguelen',
+ 'Indian/Mahe',
+ 'Indian/Maldives',
+ 'Indian/Mauritius',
+ 'Indian/Mayotte',
+ 'Indian/Reunion',
+ 'Pacific/Apia',
+ 'Pacific/Auckland',
+ 'Pacific/Bougainville',
+ 'Pacific/Chatham',
+ 'Pacific/Chuuk',
+ 'Pacific/Easter',
+ 'Pacific/Efate',
+ 'Pacific/Fakaofo',
+ 'Pacific/Fiji',
+ 'Pacific/Funafuti',
+ 'Pacific/Galapagos',
+ 'Pacific/Gambier',
+ 'Pacific/Guadalcanal',
+ 'Pacific/Guam',
+ 'Pacific/Honolulu',
+ 'Pacific/Kanton',
+ 'Pacific/Kiritimati',
+ 'Pacific/Kosrae',
+ 'Pacific/Kwajalein',
+ 'Pacific/Majuro',
+ 'Pacific/Marquesas',
+ 'Pacific/Midway',
+ 'Pacific/Nauru',
+ 'Pacific/Niue',
+ 'Pacific/Norfolk',
+ 'Pacific/Noumea',
+ 'Pacific/Pago_Pago',
+ 'Pacific/Palau',
+ 'Pacific/Pitcairn',
+ 'Pacific/Pohnpei',
+ 'Pacific/Port_Moresby',
+ 'Pacific/Rarotonga',
+ 'Pacific/Saipan',
+ 'Pacific/Tahiti',
+ 'Pacific/Tarawa',
+ 'Pacific/Tongatapu',
+ 'Pacific/Wake',
+ 'Pacific/Wallis',
+ 'US/Alaska',
+ 'US/Arizona',
+ 'US/Central',
+ 'US/Eastern',
+ 'US/Hawaii',
+ 'US/Mountain',
+ 'US/Pacific',
+ 'UTC']
+common_timezones = LazyList(
+ tz for tz in common_timezones if tz in all_timezones)
+
+common_timezones_set = LazySet(common_timezones)
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pytz/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f88a552c0710d2b0e59d74cab77dafdefa4a9465
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/__pycache__/exceptions.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pytz/__pycache__/exceptions.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..66818362f5f4907d788e2196e26269f5513f91ba
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/__pycache__/exceptions.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/__pycache__/lazy.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pytz/__pycache__/lazy.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9e13f0a501773c70d06b1752b4ea70e7da918165
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/__pycache__/lazy.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/__pycache__/reference.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pytz/__pycache__/reference.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a0ded73715cd87dc7bbfc01d540f3051704a57cd
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/__pycache__/reference.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/__pycache__/tzfile.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pytz/__pycache__/tzfile.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3a4ca27ec1396d8a33a8fe7bced5875b789b2ee4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/__pycache__/tzfile.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/__pycache__/tzinfo.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/pytz/__pycache__/tzinfo.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b3ebe6f2f28cb675be32b0bbff43d1670f39a01a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/__pycache__/tzinfo.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/exceptions.py b/monEnvTP/lib/python3.8/site-packages/pytz/exceptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..4b20bde9ff9240ce8cc578e480f4d9aa8555bab4
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pytz/exceptions.py
@@ -0,0 +1,59 @@
+'''
+Custom exceptions raised by pytz.
+'''
+
+__all__ = [
+ 'UnknownTimeZoneError', 'InvalidTimeError', 'AmbiguousTimeError',
+ 'NonExistentTimeError',
+]
+
+
+class Error(Exception):
+ '''Base class for all exceptions raised by the pytz library'''
+
+
+class UnknownTimeZoneError(KeyError, Error):
+ '''Exception raised when pytz is passed an unknown timezone.
+
+ >>> isinstance(UnknownTimeZoneError(), LookupError)
+ True
+
+ This class is actually a subclass of KeyError to provide backwards
+ compatibility with code relying on the undocumented behavior of earlier
+ pytz releases.
+
+ >>> isinstance(UnknownTimeZoneError(), KeyError)
+ True
+
+ And also a subclass of pytz.exceptions.Error, as are other pytz
+ exceptions.
+
+ >>> isinstance(UnknownTimeZoneError(), Error)
+ True
+
+ '''
+ pass
+
+
+class InvalidTimeError(Error):
+ '''Base class for invalid time exceptions.'''
+
+
+class AmbiguousTimeError(InvalidTimeError):
+ '''Exception raised when attempting to create an ambiguous wallclock time.
+
+ At the end of a DST transition period, a particular wallclock time will
+ occur twice (once before the clocks are set back, once after). Both
+ possibilities may be correct, unless further information is supplied.
+
+ See DstTzInfo.normalize() for more info
+ '''
+
+
+class NonExistentTimeError(InvalidTimeError):
+ '''Exception raised when attempting to create a wallclock time that
+ cannot exist.
+
+ At the start of a DST transition period, the wallclock time jumps forward.
+ The instants jumped over never occur.
+ '''
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/lazy.py b/monEnvTP/lib/python3.8/site-packages/pytz/lazy.py
new file mode 100644
index 0000000000000000000000000000000000000000..39344fc1f8c77d5ec43539d0c8e655f4b5d7d6f6
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pytz/lazy.py
@@ -0,0 +1,172 @@
+from threading import RLock
+try:
+ from collections.abc import Mapping as DictMixin
+except ImportError: # Python < 3.3
+ try:
+ from UserDict import DictMixin # Python 2
+ except ImportError: # Python 3.0-3.3
+ from collections import Mapping as DictMixin
+
+
+# With lazy loading, we might end up with multiple threads triggering
+# it at the same time. We need a lock.
+_fill_lock = RLock()
+
+
+class LazyDict(DictMixin):
+ """Dictionary populated on first use."""
+ data = None
+
+ def __getitem__(self, key):
+ if self.data is None:
+ _fill_lock.acquire()
+ try:
+ if self.data is None:
+ self._fill()
+ finally:
+ _fill_lock.release()
+ return self.data[key.upper()]
+
+ def __contains__(self, key):
+ if self.data is None:
+ _fill_lock.acquire()
+ try:
+ if self.data is None:
+ self._fill()
+ finally:
+ _fill_lock.release()
+ return key in self.data
+
+ def __iter__(self):
+ if self.data is None:
+ _fill_lock.acquire()
+ try:
+ if self.data is None:
+ self._fill()
+ finally:
+ _fill_lock.release()
+ return iter(self.data)
+
+ def __len__(self):
+ if self.data is None:
+ _fill_lock.acquire()
+ try:
+ if self.data is None:
+ self._fill()
+ finally:
+ _fill_lock.release()
+ return len(self.data)
+
+ def keys(self):
+ if self.data is None:
+ _fill_lock.acquire()
+ try:
+ if self.data is None:
+ self._fill()
+ finally:
+ _fill_lock.release()
+ return self.data.keys()
+
+
+class LazyList(list):
+ """List populated on first use."""
+
+ _props = [
+ '__str__', '__repr__', '__unicode__',
+ '__hash__', '__sizeof__', '__cmp__',
+ '__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__',
+ 'append', 'count', 'index', 'extend', 'insert', 'pop', 'remove',
+ 'reverse', 'sort', '__add__', '__radd__', '__iadd__', '__mul__',
+ '__rmul__', '__imul__', '__contains__', '__len__', '__nonzero__',
+ '__getitem__', '__setitem__', '__delitem__', '__iter__',
+ '__reversed__', '__getslice__', '__setslice__', '__delslice__']
+
+ def __new__(cls, fill_iter=None):
+
+ if fill_iter is None:
+ return list()
+
+ # We need a new class as we will be dynamically messing with its
+ # methods.
+ class LazyList(list):
+ pass
+
+ fill_iter = [fill_iter]
+
+ def lazy(name):
+ def _lazy(self, *args, **kw):
+ _fill_lock.acquire()
+ try:
+ if len(fill_iter) > 0:
+ list.extend(self, fill_iter.pop())
+ for method_name in cls._props:
+ delattr(LazyList, method_name)
+ finally:
+ _fill_lock.release()
+ return getattr(list, name)(self, *args, **kw)
+ return _lazy
+
+ for name in cls._props:
+ setattr(LazyList, name, lazy(name))
+
+ new_list = LazyList()
+ return new_list
+
+# Not all versions of Python declare the same magic methods.
+# Filter out properties that don't exist in this version of Python
+# from the list.
+LazyList._props = [prop for prop in LazyList._props if hasattr(list, prop)]
+
+
+class LazySet(set):
+ """Set populated on first use."""
+
+ _props = (
+ '__str__', '__repr__', '__unicode__',
+ '__hash__', '__sizeof__', '__cmp__',
+ '__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__',
+ '__contains__', '__len__', '__nonzero__',
+ '__getitem__', '__setitem__', '__delitem__', '__iter__',
+ '__sub__', '__and__', '__xor__', '__or__',
+ '__rsub__', '__rand__', '__rxor__', '__ror__',
+ '__isub__', '__iand__', '__ixor__', '__ior__',
+ 'add', 'clear', 'copy', 'difference', 'difference_update',
+ 'discard', 'intersection', 'intersection_update', 'isdisjoint',
+ 'issubset', 'issuperset', 'pop', 'remove',
+ 'symmetric_difference', 'symmetric_difference_update',
+ 'union', 'update')
+
+ def __new__(cls, fill_iter=None):
+
+ if fill_iter is None:
+ return set()
+
+ class LazySet(set):
+ pass
+
+ fill_iter = [fill_iter]
+
+ def lazy(name):
+ def _lazy(self, *args, **kw):
+ _fill_lock.acquire()
+ try:
+ if len(fill_iter) > 0:
+ for i in fill_iter.pop():
+ set.add(self, i)
+ for method_name in cls._props:
+ delattr(LazySet, method_name)
+ finally:
+ _fill_lock.release()
+ return getattr(set, name)(self, *args, **kw)
+ return _lazy
+
+ for name in cls._props:
+ setattr(LazySet, name, lazy(name))
+
+ new_set = LazySet()
+ return new_set
+
+# Not all versions of Python declare the same magic methods.
+# Filter out properties that don't exist in this version of Python
+# from the list.
+LazySet._props = [prop for prop in LazySet._props if hasattr(set, prop)]
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/reference.py b/monEnvTP/lib/python3.8/site-packages/pytz/reference.py
new file mode 100644
index 0000000000000000000000000000000000000000..f765ca0af0b24e66dc3b7d51b9bf97e71b2b67aa
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pytz/reference.py
@@ -0,0 +1,140 @@
+'''
+Reference tzinfo implementations from the Python docs.
+Used for testing against as they are only correct for the years
+1987 to 2006. Do not use these for real code.
+'''
+
+from datetime import tzinfo, timedelta, datetime
+from pytz import HOUR, ZERO, UTC
+
+__all__ = [
+ 'FixedOffset',
+ 'LocalTimezone',
+ 'USTimeZone',
+ 'Eastern',
+ 'Central',
+ 'Mountain',
+ 'Pacific',
+ 'UTC'
+]
+
+
+# A class building tzinfo objects for fixed-offset time zones.
+# Note that FixedOffset(0, "UTC") is a different way to build a
+# UTC tzinfo object.
+class FixedOffset(tzinfo):
+ """Fixed offset in minutes east from UTC."""
+
+ def __init__(self, offset, name):
+ self.__offset = timedelta(minutes=offset)
+ self.__name = name
+
+ def utcoffset(self, dt):
+ return self.__offset
+
+ def tzname(self, dt):
+ return self.__name
+
+ def dst(self, dt):
+ return ZERO
+
+
+import time as _time
+
+STDOFFSET = timedelta(seconds=-_time.timezone)
+if _time.daylight:
+ DSTOFFSET = timedelta(seconds=-_time.altzone)
+else:
+ DSTOFFSET = STDOFFSET
+
+DSTDIFF = DSTOFFSET - STDOFFSET
+
+
+# A class capturing the platform's idea of local time.
+class LocalTimezone(tzinfo):
+
+ def utcoffset(self, dt):
+ if self._isdst(dt):
+ return DSTOFFSET
+ else:
+ return STDOFFSET
+
+ def dst(self, dt):
+ if self._isdst(dt):
+ return DSTDIFF
+ else:
+ return ZERO
+
+ def tzname(self, dt):
+ return _time.tzname[self._isdst(dt)]
+
+ def _isdst(self, dt):
+ tt = (dt.year, dt.month, dt.day,
+ dt.hour, dt.minute, dt.second,
+ dt.weekday(), 0, -1)
+ stamp = _time.mktime(tt)
+ tt = _time.localtime(stamp)
+ return tt.tm_isdst > 0
+
+Local = LocalTimezone()
+
+
+def first_sunday_on_or_after(dt):
+ days_to_go = 6 - dt.weekday()
+ if days_to_go:
+ dt += timedelta(days_to_go)
+ return dt
+
+
+# In the US, DST starts at 2am (standard time) on the first Sunday in April.
+DSTSTART = datetime(1, 4, 1, 2)
+# and ends at 2am (DST time; 1am standard time) on the last Sunday of Oct.
+# which is the first Sunday on or after Oct 25.
+DSTEND = datetime(1, 10, 25, 1)
+
+
+# A complete implementation of current DST rules for major US time zones.
+class USTimeZone(tzinfo):
+
+ def __init__(self, hours, reprname, stdname, dstname):
+ self.stdoffset = timedelta(hours=hours)
+ self.reprname = reprname
+ self.stdname = stdname
+ self.dstname = dstname
+
+ def __repr__(self):
+ return self.reprname
+
+ def tzname(self, dt):
+ if self.dst(dt):
+ return self.dstname
+ else:
+ return self.stdname
+
+ def utcoffset(self, dt):
+ return self.stdoffset + self.dst(dt)
+
+ def dst(self, dt):
+ if dt is None or dt.tzinfo is None:
+ # An exception may be sensible here, in one or both cases.
+ # It depends on how you want to treat them. The default
+ # fromutc() implementation (called by the default astimezone()
+ # implementation) passes a datetime with dt.tzinfo is self.
+ return ZERO
+ assert dt.tzinfo is self
+
+ # Find first Sunday in April & the last in October.
+ start = first_sunday_on_or_after(DSTSTART.replace(year=dt.year))
+ end = first_sunday_on_or_after(DSTEND.replace(year=dt.year))
+
+ # Can't compare naive to aware objects, so strip the timezone from
+ # dt first.
+ if start <= dt.replace(tzinfo=None) < end:
+ return HOUR
+ else:
+ return ZERO
+
+Eastern = USTimeZone(-5, "Eastern", "EST", "EDT")
+Central = USTimeZone(-6, "Central", "CST", "CDT")
+Mountain = USTimeZone(-7, "Mountain", "MST", "MDT")
+Pacific = USTimeZone(-8, "Pacific", "PST", "PDT")
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/tzfile.py b/monEnvTP/lib/python3.8/site-packages/pytz/tzfile.py
new file mode 100644
index 0000000000000000000000000000000000000000..99e74489b859e21fcaa68e93089035c3d81a73c8
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pytz/tzfile.py
@@ -0,0 +1,133 @@
+'''
+$Id: tzfile.py,v 1.8 2004/06/03 00:15:24 zenzen Exp $
+'''
+
+from datetime import datetime
+from struct import unpack, calcsize
+
+from pytz.tzinfo import StaticTzInfo, DstTzInfo, memorized_ttinfo
+from pytz.tzinfo import memorized_datetime, memorized_timedelta
+
+
+def _byte_string(s):
+ """Cast a string or byte string to an ASCII byte string."""
+ return s.encode('ASCII')
+
+_NULL = _byte_string('\0')
+
+
+def _std_string(s):
+ """Cast a string or byte string to an ASCII string."""
+ return str(s.decode('ASCII'))
+
+
+def build_tzinfo(zone, fp):
+ head_fmt = '>4s c 15x 6l'
+ head_size = calcsize(head_fmt)
+ (magic, format, ttisgmtcnt, ttisstdcnt, leapcnt, timecnt,
+ typecnt, charcnt) = unpack(head_fmt, fp.read(head_size))
+
+ # Make sure it is a tzfile(5) file
+ assert magic == _byte_string('TZif'), 'Got magic %s' % repr(magic)
+
+ # Read out the transition times, localtime indices and ttinfo structures.
+ data_fmt = '>%(timecnt)dl %(timecnt)dB %(ttinfo)s %(charcnt)ds' % dict(
+ timecnt=timecnt, ttinfo='lBB' * typecnt, charcnt=charcnt)
+ data_size = calcsize(data_fmt)
+ data = unpack(data_fmt, fp.read(data_size))
+
+ # make sure we unpacked the right number of values
+ assert len(data) == 2 * timecnt + 3 * typecnt + 1
+ transitions = [memorized_datetime(trans)
+ for trans in data[:timecnt]]
+ lindexes = list(data[timecnt:2 * timecnt])
+ ttinfo_raw = data[2 * timecnt:-1]
+ tznames_raw = data[-1]
+ del data
+
+ # Process ttinfo into separate structs
+ ttinfo = []
+ tznames = {}
+ i = 0
+ while i < len(ttinfo_raw):
+ # have we looked up this timezone name yet?
+ tzname_offset = ttinfo_raw[i + 2]
+ if tzname_offset not in tznames:
+ nul = tznames_raw.find(_NULL, tzname_offset)
+ if nul < 0:
+ nul = len(tznames_raw)
+ tznames[tzname_offset] = _std_string(
+ tznames_raw[tzname_offset:nul])
+ ttinfo.append((ttinfo_raw[i],
+ bool(ttinfo_raw[i + 1]),
+ tznames[tzname_offset]))
+ i += 3
+
+ # Now build the timezone object
+ if len(ttinfo) == 1 or len(transitions) == 0:
+ ttinfo[0][0], ttinfo[0][2]
+ cls = type(zone, (StaticTzInfo,), dict(
+ zone=zone,
+ _utcoffset=memorized_timedelta(ttinfo[0][0]),
+ _tzname=ttinfo[0][2]))
+ else:
+ # Early dates use the first standard time ttinfo
+ i = 0
+ while ttinfo[i][1]:
+ i += 1
+ if ttinfo[i] == ttinfo[lindexes[0]]:
+ transitions[0] = datetime.min
+ else:
+ transitions.insert(0, datetime.min)
+ lindexes.insert(0, i)
+
+ # calculate transition info
+ transition_info = []
+ for i in range(len(transitions)):
+ inf = ttinfo[lindexes[i]]
+ utcoffset = inf[0]
+ if not inf[1]:
+ dst = 0
+ else:
+ for j in range(i - 1, -1, -1):
+ prev_inf = ttinfo[lindexes[j]]
+ if not prev_inf[1]:
+ break
+ dst = inf[0] - prev_inf[0] # dst offset
+
+ # Bad dst? Look further. DST > 24 hours happens when
+ # a timzone has moved across the international dateline.
+ if dst <= 0 or dst > 3600 * 3:
+ for j in range(i + 1, len(transitions)):
+ stdinf = ttinfo[lindexes[j]]
+ if not stdinf[1]:
+ dst = inf[0] - stdinf[0]
+ if dst > 0:
+ break # Found a useful std time.
+
+ tzname = inf[2]
+
+ # Round utcoffset and dst to the nearest minute or the
+ # datetime library will complain. Conversions to these timezones
+ # might be up to plus or minus 30 seconds out, but it is
+ # the best we can do.
+ utcoffset = int((utcoffset + 30) // 60) * 60
+ dst = int((dst + 30) // 60) * 60
+ transition_info.append(memorized_ttinfo(utcoffset, dst, tzname))
+
+ cls = type(zone, (DstTzInfo,), dict(
+ zone=zone,
+ _utc_transition_times=transitions,
+ _transition_info=transition_info))
+
+ return cls()
+
+if __name__ == '__main__':
+ import os.path
+ from pprint import pprint
+ base = os.path.join(os.path.dirname(__file__), 'zoneinfo')
+ tz = build_tzinfo('Australia/Melbourne',
+ open(os.path.join(base, 'Australia', 'Melbourne'), 'rb'))
+ tz = build_tzinfo('US/Eastern',
+ open(os.path.join(base, 'US', 'Eastern'), 'rb'))
+ pprint(tz._utc_transition_times)
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/tzinfo.py b/monEnvTP/lib/python3.8/site-packages/pytz/tzinfo.py
new file mode 100644
index 0000000000000000000000000000000000000000..725978d53720202bf7b1a64f356f47c49d42fd92
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pytz/tzinfo.py
@@ -0,0 +1,577 @@
+'''Base classes and helpers for building zone specific tzinfo classes'''
+
+from datetime import datetime, timedelta, tzinfo
+from bisect import bisect_right
+try:
+ set
+except NameError:
+ from sets import Set as set
+
+import pytz
+from pytz.exceptions import AmbiguousTimeError, NonExistentTimeError
+
+__all__ = []
+
+_timedelta_cache = {}
+
+
+def memorized_timedelta(seconds):
+ '''Create only one instance of each distinct timedelta'''
+ try:
+ return _timedelta_cache[seconds]
+ except KeyError:
+ delta = timedelta(seconds=seconds)
+ _timedelta_cache[seconds] = delta
+ return delta
+
+_epoch = datetime.utcfromtimestamp(0)
+_datetime_cache = {0: _epoch}
+
+
+def memorized_datetime(seconds):
+ '''Create only one instance of each distinct datetime'''
+ try:
+ return _datetime_cache[seconds]
+ except KeyError:
+ # NB. We can't just do datetime.utcfromtimestamp(seconds) as this
+ # fails with negative values under Windows (Bug #90096)
+ dt = _epoch + timedelta(seconds=seconds)
+ _datetime_cache[seconds] = dt
+ return dt
+
+_ttinfo_cache = {}
+
+
+def memorized_ttinfo(*args):
+ '''Create only one instance of each distinct tuple'''
+ try:
+ return _ttinfo_cache[args]
+ except KeyError:
+ ttinfo = (
+ memorized_timedelta(args[0]),
+ memorized_timedelta(args[1]),
+ args[2]
+ )
+ _ttinfo_cache[args] = ttinfo
+ return ttinfo
+
+_notime = memorized_timedelta(0)
+
+
+def _to_seconds(td):
+ '''Convert a timedelta to seconds'''
+ return td.seconds + td.days * 24 * 60 * 60
+
+
+class BaseTzInfo(tzinfo):
+ # Overridden in subclass
+ _utcoffset = None
+ _tzname = None
+ zone = None
+
+ def __str__(self):
+ return self.zone
+
+
+class StaticTzInfo(BaseTzInfo):
+ '''A timezone that has a constant offset from UTC
+
+ These timezones are rare, as most locations have changed their
+ offset at some point in their history
+ '''
+ def fromutc(self, dt):
+ '''See datetime.tzinfo.fromutc'''
+ if dt.tzinfo is not None and dt.tzinfo is not self:
+ raise ValueError('fromutc: dt.tzinfo is not self')
+ return (dt + self._utcoffset).replace(tzinfo=self)
+
+ def utcoffset(self, dt, is_dst=None):
+ '''See datetime.tzinfo.utcoffset
+
+ is_dst is ignored for StaticTzInfo, and exists only to
+ retain compatibility with DstTzInfo.
+ '''
+ return self._utcoffset
+
+ def dst(self, dt, is_dst=None):
+ '''See datetime.tzinfo.dst
+
+ is_dst is ignored for StaticTzInfo, and exists only to
+ retain compatibility with DstTzInfo.
+ '''
+ return _notime
+
+ def tzname(self, dt, is_dst=None):
+ '''See datetime.tzinfo.tzname
+
+ is_dst is ignored for StaticTzInfo, and exists only to
+ retain compatibility with DstTzInfo.
+ '''
+ return self._tzname
+
+ def localize(self, dt, is_dst=False):
+ '''Convert naive time to local time'''
+ if dt.tzinfo is not None:
+ raise ValueError('Not naive datetime (tzinfo is already set)')
+ return dt.replace(tzinfo=self)
+
+ def normalize(self, dt, is_dst=False):
+ '''Correct the timezone information on the given datetime.
+
+ This is normally a no-op, as StaticTzInfo timezones never have
+ ambiguous cases to correct:
+
+ >>> from pytz import timezone
+ >>> gmt = timezone('GMT')
+ >>> isinstance(gmt, StaticTzInfo)
+ True
+ >>> dt = datetime(2011, 5, 8, 1, 2, 3, tzinfo=gmt)
+ >>> gmt.normalize(dt) is dt
+ True
+
+ The supported method of converting between timezones is to use
+ datetime.astimezone(). Currently normalize() also works:
+
+ >>> la = timezone('America/Los_Angeles')
+ >>> dt = la.localize(datetime(2011, 5, 7, 1, 2, 3))
+ >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
+ >>> gmt.normalize(dt).strftime(fmt)
+ '2011-05-07 08:02:03 GMT (+0000)'
+ '''
+ if dt.tzinfo is self:
+ return dt
+ if dt.tzinfo is None:
+ raise ValueError('Naive time - no tzinfo set')
+ return dt.astimezone(self)
+
+ def __repr__(self):
+ return '<StaticTzInfo %r>' % (self.zone,)
+
+ def __reduce__(self):
+ # Special pickle to zone remains a singleton and to cope with
+ # database changes.
+ return pytz._p, (self.zone,)
+
+
+class DstTzInfo(BaseTzInfo):
+ '''A timezone that has a variable offset from UTC
+
+ The offset might change if daylight saving time comes into effect,
+ or at a point in history when the region decides to change their
+ timezone definition.
+ '''
+ # Overridden in subclass
+
+ # Sorted list of DST transition times, UTC
+ _utc_transition_times = None
+
+ # [(utcoffset, dstoffset, tzname)] corresponding to
+ # _utc_transition_times entries
+ _transition_info = None
+
+ zone = None
+
+ # Set in __init__
+
+ _tzinfos = None
+ _dst = None # DST offset
+
+ def __init__(self, _inf=None, _tzinfos=None):
+ if _inf:
+ self._tzinfos = _tzinfos
+ self._utcoffset, self._dst, self._tzname = _inf
+ else:
+ _tzinfos = {}
+ self._tzinfos = _tzinfos
+ self._utcoffset, self._dst, self._tzname = (
+ self._transition_info[0])
+ _tzinfos[self._transition_info[0]] = self
+ for inf in self._transition_info[1:]:
+ if inf not in _tzinfos:
+ _tzinfos[inf] = self.__class__(inf, _tzinfos)
+
+ def fromutc(self, dt):
+ '''See datetime.tzinfo.fromutc'''
+ if (dt.tzinfo is not None and
+ getattr(dt.tzinfo, '_tzinfos', None) is not self._tzinfos):
+ raise ValueError('fromutc: dt.tzinfo is not self')
+ dt = dt.replace(tzinfo=None)
+ idx = max(0, bisect_right(self._utc_transition_times, dt) - 1)
+ inf = self._transition_info[idx]
+ return (dt + inf[0]).replace(tzinfo=self._tzinfos[inf])
+
+ def normalize(self, dt):
+ '''Correct the timezone information on the given datetime
+
+ If date arithmetic crosses DST boundaries, the tzinfo
+ is not magically adjusted. This method normalizes the
+ tzinfo to the correct one.
+
+ To test, first we need to do some setup
+
+ >>> from pytz import timezone
+ >>> utc = timezone('UTC')
+ >>> eastern = timezone('US/Eastern')
+ >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
+
+ We next create a datetime right on an end-of-DST transition point,
+ the instant when the wallclocks are wound back one hour.
+
+ >>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc)
+ >>> loc_dt = utc_dt.astimezone(eastern)
+ >>> loc_dt.strftime(fmt)
+ '2002-10-27 01:00:00 EST (-0500)'
+
+ Now, if we subtract a few minutes from it, note that the timezone
+ information has not changed.
+
+ >>> before = loc_dt - timedelta(minutes=10)
+ >>> before.strftime(fmt)
+ '2002-10-27 00:50:00 EST (-0500)'
+
+ But we can fix that by calling the normalize method
+
+ >>> before = eastern.normalize(before)
+ >>> before.strftime(fmt)
+ '2002-10-27 01:50:00 EDT (-0400)'
+
+ The supported method of converting between timezones is to use
+ datetime.astimezone(). Currently, normalize() also works:
+
+ >>> th = timezone('Asia/Bangkok')
+ >>> am = timezone('Europe/Amsterdam')
+ >>> dt = th.localize(datetime(2011, 5, 7, 1, 2, 3))
+ >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
+ >>> am.normalize(dt).strftime(fmt)
+ '2011-05-06 20:02:03 CEST (+0200)'
+ '''
+ if dt.tzinfo is None:
+ raise ValueError('Naive time - no tzinfo set')
+
+ # Convert dt in localtime to UTC
+ offset = dt.tzinfo._utcoffset
+ dt = dt.replace(tzinfo=None)
+ dt = dt - offset
+ # convert it back, and return it
+ return self.fromutc(dt)
+
+ def localize(self, dt, is_dst=False):
+ '''Convert naive time to local time.
+
+ This method should be used to construct localtimes, rather
+ than passing a tzinfo argument to a datetime constructor.
+
+ is_dst is used to determine the correct timezone in the ambigous
+ period at the end of daylight saving time.
+
+ >>> from pytz import timezone
+ >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
+ >>> amdam = timezone('Europe/Amsterdam')
+ >>> dt = datetime(2004, 10, 31, 2, 0, 0)
+ >>> loc_dt1 = amdam.localize(dt, is_dst=True)
+ >>> loc_dt2 = amdam.localize(dt, is_dst=False)
+ >>> loc_dt1.strftime(fmt)
+ '2004-10-31 02:00:00 CEST (+0200)'
+ >>> loc_dt2.strftime(fmt)
+ '2004-10-31 02:00:00 CET (+0100)'
+ >>> str(loc_dt2 - loc_dt1)
+ '1:00:00'
+
+ Use is_dst=None to raise an AmbiguousTimeError for ambiguous
+ times at the end of daylight saving time
+
+ >>> try:
+ ... loc_dt1 = amdam.localize(dt, is_dst=None)
+ ... except AmbiguousTimeError:
+ ... print('Ambiguous')
+ Ambiguous
+
+ is_dst defaults to False
+
+ >>> amdam.localize(dt) == amdam.localize(dt, False)
+ True
+
+ is_dst is also used to determine the correct timezone in the
+ wallclock times jumped over at the start of daylight saving time.
+
+ >>> pacific = timezone('US/Pacific')
+ >>> dt = datetime(2008, 3, 9, 2, 0, 0)
+ >>> ploc_dt1 = pacific.localize(dt, is_dst=True)
+ >>> ploc_dt2 = pacific.localize(dt, is_dst=False)
+ >>> ploc_dt1.strftime(fmt)
+ '2008-03-09 02:00:00 PDT (-0700)'
+ >>> ploc_dt2.strftime(fmt)
+ '2008-03-09 02:00:00 PST (-0800)'
+ >>> str(ploc_dt2 - ploc_dt1)
+ '1:00:00'
+
+ Use is_dst=None to raise a NonExistentTimeError for these skipped
+ times.
+
+ >>> try:
+ ... loc_dt1 = pacific.localize(dt, is_dst=None)
+ ... except NonExistentTimeError:
+ ... print('Non-existent')
+ Non-existent
+ '''
+ if dt.tzinfo is not None:
+ raise ValueError('Not naive datetime (tzinfo is already set)')
+
+ # Find the two best possibilities.
+ possible_loc_dt = set()
+ for delta in [timedelta(days=-1), timedelta(days=1)]:
+ loc_dt = dt + delta
+ idx = max(0, bisect_right(
+ self._utc_transition_times, loc_dt) - 1)
+ inf = self._transition_info[idx]
+ tzinfo = self._tzinfos[inf]
+ loc_dt = tzinfo.normalize(dt.replace(tzinfo=tzinfo))
+ if loc_dt.replace(tzinfo=None) == dt:
+ possible_loc_dt.add(loc_dt)
+
+ if len(possible_loc_dt) == 1:
+ return possible_loc_dt.pop()
+
+ # If there are no possibly correct timezones, we are attempting
+ # to convert a time that never happened - the time period jumped
+ # during the start-of-DST transition period.
+ if len(possible_loc_dt) == 0:
+ # If we refuse to guess, raise an exception.
+ if is_dst is None:
+ raise NonExistentTimeError(dt)
+
+ # If we are forcing the pre-DST side of the DST transition, we
+ # obtain the correct timezone by winding the clock forward a few
+ # hours.
+ elif is_dst:
+ return self.localize(
+ dt + timedelta(hours=6), is_dst=True) - timedelta(hours=6)
+
+ # If we are forcing the post-DST side of the DST transition, we
+ # obtain the correct timezone by winding the clock back.
+ else:
+ return self.localize(
+ dt - timedelta(hours=6),
+ is_dst=False) + timedelta(hours=6)
+
+ # If we get this far, we have multiple possible timezones - this
+ # is an ambiguous case occuring during the end-of-DST transition.
+
+ # If told to be strict, raise an exception since we have an
+ # ambiguous case
+ if is_dst is None:
+ raise AmbiguousTimeError(dt)
+
+ # Filter out the possiblilities that don't match the requested
+ # is_dst
+ filtered_possible_loc_dt = [
+ p for p in possible_loc_dt if bool(p.tzinfo._dst) == is_dst
+ ]
+
+ # Hopefully we only have one possibility left. Return it.
+ if len(filtered_possible_loc_dt) == 1:
+ return filtered_possible_loc_dt[0]
+
+ if len(filtered_possible_loc_dt) == 0:
+ filtered_possible_loc_dt = list(possible_loc_dt)
+
+ # If we get this far, we have in a wierd timezone transition
+ # where the clocks have been wound back but is_dst is the same
+ # in both (eg. Europe/Warsaw 1915 when they switched to CET).
+ # At this point, we just have to guess unless we allow more
+ # hints to be passed in (such as the UTC offset or abbreviation),
+ # but that is just getting silly.
+ #
+ # Choose the earliest (by UTC) applicable timezone if is_dst=True
+ # Choose the latest (by UTC) applicable timezone if is_dst=False
+ # i.e., behave like end-of-DST transition
+ dates = {} # utc -> local
+ for local_dt in filtered_possible_loc_dt:
+ utc_time = (
+ local_dt.replace(tzinfo=None) - local_dt.tzinfo._utcoffset)
+ assert utc_time not in dates
+ dates[utc_time] = local_dt
+ return dates[[min, max][not is_dst](dates)]
+
+ def utcoffset(self, dt, is_dst=None):
+ '''See datetime.tzinfo.utcoffset
+
+ The is_dst parameter may be used to remove ambiguity during DST
+ transitions.
+
+ >>> from pytz import timezone
+ >>> tz = timezone('America/St_Johns')
+ >>> ambiguous = datetime(2009, 10, 31, 23, 30)
+
+ >>> str(tz.utcoffset(ambiguous, is_dst=False))
+ '-1 day, 20:30:00'
+
+ >>> str(tz.utcoffset(ambiguous, is_dst=True))
+ '-1 day, 21:30:00'
+
+ >>> try:
+ ... tz.utcoffset(ambiguous)
+ ... except AmbiguousTimeError:
+ ... print('Ambiguous')
+ Ambiguous
+
+ '''
+ if dt is None:
+ return None
+ elif dt.tzinfo is not self:
+ dt = self.localize(dt, is_dst)
+ return dt.tzinfo._utcoffset
+ else:
+ return self._utcoffset
+
+ def dst(self, dt, is_dst=None):
+ '''See datetime.tzinfo.dst
+
+ The is_dst parameter may be used to remove ambiguity during DST
+ transitions.
+
+ >>> from pytz import timezone
+ >>> tz = timezone('America/St_Johns')
+
+ >>> normal = datetime(2009, 9, 1)
+
+ >>> str(tz.dst(normal))
+ '1:00:00'
+ >>> str(tz.dst(normal, is_dst=False))
+ '1:00:00'
+ >>> str(tz.dst(normal, is_dst=True))
+ '1:00:00'
+
+ >>> ambiguous = datetime(2009, 10, 31, 23, 30)
+
+ >>> str(tz.dst(ambiguous, is_dst=False))
+ '0:00:00'
+ >>> str(tz.dst(ambiguous, is_dst=True))
+ '1:00:00'
+ >>> try:
+ ... tz.dst(ambiguous)
+ ... except AmbiguousTimeError:
+ ... print('Ambiguous')
+ Ambiguous
+
+ '''
+ if dt is None:
+ return None
+ elif dt.tzinfo is not self:
+ dt = self.localize(dt, is_dst)
+ return dt.tzinfo._dst
+ else:
+ return self._dst
+
+ def tzname(self, dt, is_dst=None):
+ '''See datetime.tzinfo.tzname
+
+ The is_dst parameter may be used to remove ambiguity during DST
+ transitions.
+
+ >>> from pytz import timezone
+ >>> tz = timezone('America/St_Johns')
+
+ >>> normal = datetime(2009, 9, 1)
+
+ >>> tz.tzname(normal)
+ 'NDT'
+ >>> tz.tzname(normal, is_dst=False)
+ 'NDT'
+ >>> tz.tzname(normal, is_dst=True)
+ 'NDT'
+
+ >>> ambiguous = datetime(2009, 10, 31, 23, 30)
+
+ >>> tz.tzname(ambiguous, is_dst=False)
+ 'NST'
+ >>> tz.tzname(ambiguous, is_dst=True)
+ 'NDT'
+ >>> try:
+ ... tz.tzname(ambiguous)
+ ... except AmbiguousTimeError:
+ ... print('Ambiguous')
+ Ambiguous
+ '''
+ if dt is None:
+ return self.zone
+ elif dt.tzinfo is not self:
+ dt = self.localize(dt, is_dst)
+ return dt.tzinfo._tzname
+ else:
+ return self._tzname
+
+ def __repr__(self):
+ if self._dst:
+ dst = 'DST'
+ else:
+ dst = 'STD'
+ if self._utcoffset > _notime:
+ return '<DstTzInfo %r %s+%s %s>' % (
+ self.zone, self._tzname, self._utcoffset, dst
+ )
+ else:
+ return '<DstTzInfo %r %s%s %s>' % (
+ self.zone, self._tzname, self._utcoffset, dst
+ )
+
+ def __reduce__(self):
+ # Special pickle to zone remains a singleton and to cope with
+ # database changes.
+ return pytz._p, (
+ self.zone,
+ _to_seconds(self._utcoffset),
+ _to_seconds(self._dst),
+ self._tzname
+ )
+
+
+def unpickler(zone, utcoffset=None, dstoffset=None, tzname=None):
+ """Factory function for unpickling pytz tzinfo instances.
+
+ This is shared for both StaticTzInfo and DstTzInfo instances, because
+ database changes could cause a zones implementation to switch between
+ these two base classes and we can't break pickles on a pytz version
+ upgrade.
+ """
+ # Raises a KeyError if zone no longer exists, which should never happen
+ # and would be a bug.
+ tz = pytz.timezone(zone)
+
+ # A StaticTzInfo - just return it
+ if utcoffset is None:
+ return tz
+
+ # This pickle was created from a DstTzInfo. We need to
+ # determine which of the list of tzinfo instances for this zone
+ # to use in order to restore the state of any datetime instances using
+ # it correctly.
+ utcoffset = memorized_timedelta(utcoffset)
+ dstoffset = memorized_timedelta(dstoffset)
+ try:
+ return tz._tzinfos[(utcoffset, dstoffset, tzname)]
+ except KeyError:
+ # The particular state requested in this timezone no longer exists.
+ # This indicates a corrupt pickle, or the timezone database has been
+ # corrected violently enough to make this particular
+ # (utcoffset,dstoffset) no longer exist in the zone, or the
+ # abbreviation has been changed.
+ pass
+
+ # See if we can find an entry differing only by tzname. Abbreviations
+ # get changed from the initial guess by the database maintainers to
+ # match reality when this information is discovered.
+ for localized_tz in tz._tzinfos.values():
+ if (localized_tz._utcoffset == utcoffset and
+ localized_tz._dst == dstoffset):
+ return localized_tz
+
+ # This (utcoffset, dstoffset) information has been removed from the
+ # zone. Add it back. This might occur when the database maintainers have
+ # corrected incorrect information. datetime instances using this
+ # incorrect information will continue to do so, exactly as they were
+ # before being pickled. This is purely an overly paranoid safety net - I
+ # doubt this will ever been needed in real life.
+ inf = (utcoffset, dstoffset, tzname)
+ tz._tzinfos[inf] = tz.__class__(inf, tz._tzinfos)
+ return tz._tzinfos[inf]
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Abidjan b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Abidjan
new file mode 100644
index 0000000000000000000000000000000000000000..28b32ab2e0b9053f39a91d9f28b6072e41423954
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Abidjan differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Accra b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Accra
new file mode 100644
index 0000000000000000000000000000000000000000..28b32ab2e0b9053f39a91d9f28b6072e41423954
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Accra differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Addis_Ababa b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Addis_Ababa
new file mode 100644
index 0000000000000000000000000000000000000000..9dcfc19c56e62b12b730f4335b34479695f273f5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Addis_Ababa differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Algiers b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Algiers
new file mode 100644
index 0000000000000000000000000000000000000000..6cfd8a16e16ec08c7cd83e6c0e1f9e1bbc5dc18a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Algiers differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Asmara b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Asmara
new file mode 100644
index 0000000000000000000000000000000000000000..9dcfc19c56e62b12b730f4335b34479695f273f5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Asmara differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Asmera b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Asmera
new file mode 100644
index 0000000000000000000000000000000000000000..9dcfc19c56e62b12b730f4335b34479695f273f5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Asmera differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Bamako b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Bamako
new file mode 100644
index 0000000000000000000000000000000000000000..28b32ab2e0b9053f39a91d9f28b6072e41423954
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Bamako differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Bangui b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Bangui
new file mode 100644
index 0000000000000000000000000000000000000000..afb6a4a8fb17b0d4670b8ea1b38f5cc6100244e4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Bangui differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Banjul b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Banjul
new file mode 100644
index 0000000000000000000000000000000000000000..28b32ab2e0b9053f39a91d9f28b6072e41423954
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Banjul differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Bissau b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Bissau
new file mode 100644
index 0000000000000000000000000000000000000000..82ea5aaf0c6ae2b3ec582013b6d16e6d6f29eb0a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Bissau differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Blantyre b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Blantyre
new file mode 100644
index 0000000000000000000000000000000000000000..52753c0f87bbfa457ada89d400908a3d6537ac0e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Blantyre differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Brazzaville b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Brazzaville
new file mode 100644
index 0000000000000000000000000000000000000000..afb6a4a8fb17b0d4670b8ea1b38f5cc6100244e4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Brazzaville differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Bujumbura b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Bujumbura
new file mode 100644
index 0000000000000000000000000000000000000000..52753c0f87bbfa457ada89d400908a3d6537ac0e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Bujumbura differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Cairo b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Cairo
new file mode 100644
index 0000000000000000000000000000000000000000..d3f819623fc9ef90d327380fad15341ec1a0e202
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Cairo differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Casablanca b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Casablanca
new file mode 100644
index 0000000000000000000000000000000000000000..17e0d1b89f093e1e4452a8921ffff9a91287eb4c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Casablanca differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Ceuta b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Ceuta
new file mode 100644
index 0000000000000000000000000000000000000000..850c8f06fa7918684e67e9ab8192ac933dac90b3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Ceuta differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Conakry b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Conakry
new file mode 100644
index 0000000000000000000000000000000000000000..28b32ab2e0b9053f39a91d9f28b6072e41423954
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Conakry differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Dakar b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Dakar
new file mode 100644
index 0000000000000000000000000000000000000000..28b32ab2e0b9053f39a91d9f28b6072e41423954
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Dakar differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Dar_es_Salaam b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Dar_es_Salaam
new file mode 100644
index 0000000000000000000000000000000000000000..9dcfc19c56e62b12b730f4335b34479695f273f5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Dar_es_Salaam differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Djibouti b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Djibouti
new file mode 100644
index 0000000000000000000000000000000000000000..9dcfc19c56e62b12b730f4335b34479695f273f5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Djibouti differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Douala b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Douala
new file mode 100644
index 0000000000000000000000000000000000000000..afb6a4a8fb17b0d4670b8ea1b38f5cc6100244e4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Douala differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/El_Aaiun b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/El_Aaiun
new file mode 100644
index 0000000000000000000000000000000000000000..64f1b7694418a8c284febe195659c4dd53359b1a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/El_Aaiun differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Freetown b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Freetown
new file mode 100644
index 0000000000000000000000000000000000000000..28b32ab2e0b9053f39a91d9f28b6072e41423954
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Freetown differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Gaborone b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Gaborone
new file mode 100644
index 0000000000000000000000000000000000000000..52753c0f87bbfa457ada89d400908a3d6537ac0e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Gaborone differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Harare b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Harare
new file mode 100644
index 0000000000000000000000000000000000000000..52753c0f87bbfa457ada89d400908a3d6537ac0e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Harare differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Johannesburg b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Johannesburg
new file mode 100644
index 0000000000000000000000000000000000000000..b1c425daced454f53d7d18fea807bf8d081cf97e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Johannesburg differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Juba b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Juba
new file mode 100644
index 0000000000000000000000000000000000000000..06482943a45a58a02a43b9e2b6a3f215b21b045f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Juba differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Kampala b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Kampala
new file mode 100644
index 0000000000000000000000000000000000000000..9dcfc19c56e62b12b730f4335b34479695f273f5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Kampala differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Khartoum b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Khartoum
new file mode 100644
index 0000000000000000000000000000000000000000..8ee8cb92e72d9c507ad0ee06dc6a38406ab06f34
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Khartoum differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Kigali b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Kigali
new file mode 100644
index 0000000000000000000000000000000000000000..52753c0f87bbfa457ada89d400908a3d6537ac0e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Kigali differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Kinshasa b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Kinshasa
new file mode 100644
index 0000000000000000000000000000000000000000..afb6a4a8fb17b0d4670b8ea1b38f5cc6100244e4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Kinshasa differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Lagos b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Lagos
new file mode 100644
index 0000000000000000000000000000000000000000..afb6a4a8fb17b0d4670b8ea1b38f5cc6100244e4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Lagos differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Libreville b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Libreville
new file mode 100644
index 0000000000000000000000000000000000000000..afb6a4a8fb17b0d4670b8ea1b38f5cc6100244e4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Libreville differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Lome b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Lome
new file mode 100644
index 0000000000000000000000000000000000000000..28b32ab2e0b9053f39a91d9f28b6072e41423954
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Lome differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Luanda b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Luanda
new file mode 100644
index 0000000000000000000000000000000000000000..afb6a4a8fb17b0d4670b8ea1b38f5cc6100244e4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Luanda differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Lubumbashi b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Lubumbashi
new file mode 100644
index 0000000000000000000000000000000000000000..52753c0f87bbfa457ada89d400908a3d6537ac0e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Lubumbashi differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Lusaka b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Lusaka
new file mode 100644
index 0000000000000000000000000000000000000000..52753c0f87bbfa457ada89d400908a3d6537ac0e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Lusaka differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Malabo b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Malabo
new file mode 100644
index 0000000000000000000000000000000000000000..afb6a4a8fb17b0d4670b8ea1b38f5cc6100244e4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Malabo differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Maputo b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Maputo
new file mode 100644
index 0000000000000000000000000000000000000000..52753c0f87bbfa457ada89d400908a3d6537ac0e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Maputo differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Maseru b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Maseru
new file mode 100644
index 0000000000000000000000000000000000000000..b1c425daced454f53d7d18fea807bf8d081cf97e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Maseru differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Mbabane b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Mbabane
new file mode 100644
index 0000000000000000000000000000000000000000..b1c425daced454f53d7d18fea807bf8d081cf97e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Mbabane differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Mogadishu b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Mogadishu
new file mode 100644
index 0000000000000000000000000000000000000000..9dcfc19c56e62b12b730f4335b34479695f273f5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Mogadishu differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Monrovia b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Monrovia
new file mode 100644
index 0000000000000000000000000000000000000000..6d688502a1ca80f2e57a6de2790ac3193879d248
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Monrovia differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Nairobi b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Nairobi
new file mode 100644
index 0000000000000000000000000000000000000000..9dcfc19c56e62b12b730f4335b34479695f273f5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Nairobi differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Ndjamena b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Ndjamena
new file mode 100644
index 0000000000000000000000000000000000000000..a968845e29b8b2b47d4a73f74ae04ef681d7d485
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Ndjamena differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Niamey b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Niamey
new file mode 100644
index 0000000000000000000000000000000000000000..afb6a4a8fb17b0d4670b8ea1b38f5cc6100244e4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Niamey differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Nouakchott b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Nouakchott
new file mode 100644
index 0000000000000000000000000000000000000000..28b32ab2e0b9053f39a91d9f28b6072e41423954
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Nouakchott differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Ouagadougou b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Ouagadougou
new file mode 100644
index 0000000000000000000000000000000000000000..28b32ab2e0b9053f39a91d9f28b6072e41423954
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Ouagadougou differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Porto-Novo b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Porto-Novo
new file mode 100644
index 0000000000000000000000000000000000000000..afb6a4a8fb17b0d4670b8ea1b38f5cc6100244e4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Porto-Novo differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Sao_Tome b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Sao_Tome
new file mode 100644
index 0000000000000000000000000000000000000000..59f3759c409a1fb50e632ef5ef613d3fee7af7ef
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Sao_Tome differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Timbuktu b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Timbuktu
new file mode 100644
index 0000000000000000000000000000000000000000..28b32ab2e0b9053f39a91d9f28b6072e41423954
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Timbuktu differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Tripoli b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Tripoli
new file mode 100644
index 0000000000000000000000000000000000000000..07b393bb7db14cef1e906ebe63cfbbe8cddc79d5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Tripoli differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Tunis b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Tunis
new file mode 100644
index 0000000000000000000000000000000000000000..427fa563033fdd8533ae56337fa20befe9719b42
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Tunis differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Windhoek b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Windhoek
new file mode 100644
index 0000000000000000000000000000000000000000..abecd137b1fc3220637b22ffea0e7256a58e9377
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Africa/Windhoek differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Adak b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Adak
new file mode 100644
index 0000000000000000000000000000000000000000..43236498f681cc06f64ca2afa613880331fe6fbb
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Adak differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Anchorage b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Anchorage
new file mode 100644
index 0000000000000000000000000000000000000000..9bbb2fd3b361ea8aa4c126d14df5fa370343a63f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Anchorage differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Anguilla b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Anguilla
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Anguilla differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Antigua b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Antigua
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Antigua differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Araguaina b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Araguaina
new file mode 100644
index 0000000000000000000000000000000000000000..49381b4108b1827f24ad7d2c18064a22624092e7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Araguaina differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Buenos_Aires b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Buenos_Aires
new file mode 100644
index 0000000000000000000000000000000000000000..260f86a9180677d86fc3280b06f01d6a6cd91c94
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Buenos_Aires differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Catamarca b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Catamarca
new file mode 100644
index 0000000000000000000000000000000000000000..0ae222a2f8bb2fb1b7abe17d08e076674c51541d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Catamarca differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/ComodRivadavia b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/ComodRivadavia
new file mode 100644
index 0000000000000000000000000000000000000000..0ae222a2f8bb2fb1b7abe17d08e076674c51541d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/ComodRivadavia differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Cordoba b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Cordoba
new file mode 100644
index 0000000000000000000000000000000000000000..da4c23a545b3603bcdd4555ba8feba117802e7b4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Cordoba differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Jujuy b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Jujuy
new file mode 100644
index 0000000000000000000000000000000000000000..604b85663672d83658f331a69cc8f41cf2a2b5a1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Jujuy differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/La_Rioja b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/La_Rioja
new file mode 100644
index 0000000000000000000000000000000000000000..2218e36bfdb95ea041b166f183895a6ca4871ab1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/La_Rioja differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Mendoza b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Mendoza
new file mode 100644
index 0000000000000000000000000000000000000000..f9e677f171b3900dfd978eb8ea2aa226557d2c5b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Mendoza differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Rio_Gallegos b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Rio_Gallegos
new file mode 100644
index 0000000000000000000000000000000000000000..c36587e1c292673fa537cecf729f4c873d375c9a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Rio_Gallegos differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Salta b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Salta
new file mode 100644
index 0000000000000000000000000000000000000000..0e797f2215ab398bc59b8e4d60e5260fcb365c3b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Salta differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/San_Juan b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/San_Juan
new file mode 100644
index 0000000000000000000000000000000000000000..2698495bb3f953685e45edb916798cd24f772664
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/San_Juan differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/San_Luis b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/San_Luis
new file mode 100644
index 0000000000000000000000000000000000000000..fe50f6211cff908f21257cb42259fe2692abdc4e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/San_Luis differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Tucuman b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Tucuman
new file mode 100644
index 0000000000000000000000000000000000000000..c954000ba9b28204cc3223628d13e9dcfa4b6eb0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Tucuman differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Ushuaia b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Ushuaia
new file mode 100644
index 0000000000000000000000000000000000000000..3643628a24723239a13d61b2215c907cdba03985
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Argentina/Ushuaia differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Aruba b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Aruba
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Aruba differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Asuncion b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Asuncion
new file mode 100644
index 0000000000000000000000000000000000000000..2f3bbda6d3586a2ef5b94bc28ff37906fbc758f5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Asuncion differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Atikokan b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Atikokan
new file mode 100644
index 0000000000000000000000000000000000000000..9964b9a33452f4b636f43703b7cdec4891cbda5f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Atikokan differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Atka b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Atka
new file mode 100644
index 0000000000000000000000000000000000000000..43236498f681cc06f64ca2afa613880331fe6fbb
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Atka differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Bahia b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Bahia
new file mode 100644
index 0000000000000000000000000000000000000000..15808d30fb1bb250cce195a00a2d7dca321df7d3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Bahia differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Bahia_Banderas b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Bahia_Banderas
new file mode 100644
index 0000000000000000000000000000000000000000..896af3f56abd712e2fa85d69592bd33b88d364bc
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Bahia_Banderas differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Barbados b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Barbados
new file mode 100644
index 0000000000000000000000000000000000000000..00cd045ac86d6060e9e8b8dc0460caa49d2479b5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Barbados differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Belem b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Belem
new file mode 100644
index 0000000000000000000000000000000000000000..60b5924dc12f14c274073b1fe4ccc590b742da5a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Belem differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Belize b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Belize
new file mode 100644
index 0000000000000000000000000000000000000000..e6f5dfa6a8d82523e8cb12f17cf73560bc09a383
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Belize differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Blanc-Sablon b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Blanc-Sablon
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Blanc-Sablon differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Boa_Vista b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Boa_Vista
new file mode 100644
index 0000000000000000000000000000000000000000..978c33100fb2d9b14c1faa581890c16c8747b42b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Boa_Vista differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Bogota b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Bogota
new file mode 100644
index 0000000000000000000000000000000000000000..b2647d7a837637d3b85d4bc85f83c88982597faf
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Bogota differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Boise b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Boise
new file mode 100644
index 0000000000000000000000000000000000000000..f8d54e27479149d35f6ddff12f709096f08bfac3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Boise differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Buenos_Aires b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Buenos_Aires
new file mode 100644
index 0000000000000000000000000000000000000000..260f86a9180677d86fc3280b06f01d6a6cd91c94
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Buenos_Aires differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Cambridge_Bay b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Cambridge_Bay
new file mode 100644
index 0000000000000000000000000000000000000000..f8db4b6ebf5b66d3d566fc3975d6151783f6e645
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Cambridge_Bay differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Campo_Grande b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Campo_Grande
new file mode 100644
index 0000000000000000000000000000000000000000..81206247d9efa7d3dd10dd9e88c669718d86cc50
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Campo_Grande differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Cancun b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Cancun
new file mode 100644
index 0000000000000000000000000000000000000000..f907f0a5ba77b9ec845c450cd535ee589b46c5ad
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Cancun differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Caracas b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Caracas
new file mode 100644
index 0000000000000000000000000000000000000000..eedf725e8de1a61c4db886ce8b8b60fb4ee91c77
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Caracas differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Catamarca b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Catamarca
new file mode 100644
index 0000000000000000000000000000000000000000..0ae222a2f8bb2fb1b7abe17d08e076674c51541d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Catamarca differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Cayenne b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Cayenne
new file mode 100644
index 0000000000000000000000000000000000000000..e5bc06fdbe5a3062f90274a6b5cc5c56e2d74d5f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Cayenne differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Cayman b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Cayman
new file mode 100644
index 0000000000000000000000000000000000000000..9964b9a33452f4b636f43703b7cdec4891cbda5f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Cayman differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Chicago b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Chicago
new file mode 100644
index 0000000000000000000000000000000000000000..a5b1617c7f70bfc77b7d504aaa3f23603082c3cb
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Chicago differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Chihuahua b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Chihuahua
new file mode 100644
index 0000000000000000000000000000000000000000..8ed5f93b02001a63ce43dd221ff48cc5faa6752e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Chihuahua differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Coral_Harbour b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Coral_Harbour
new file mode 100644
index 0000000000000000000000000000000000000000..9964b9a33452f4b636f43703b7cdec4891cbda5f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Coral_Harbour differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Cordoba b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Cordoba
new file mode 100644
index 0000000000000000000000000000000000000000..da4c23a545b3603bcdd4555ba8feba117802e7b4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Cordoba differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Costa_Rica b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Costa_Rica
new file mode 100644
index 0000000000000000000000000000000000000000..37cb85e4dbfb7ac9c01eecf584a1a721ed251e93
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Costa_Rica differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Creston b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Creston
new file mode 100644
index 0000000000000000000000000000000000000000..ac6bb0c78291f1e341f42c34639458fb385bf8ed
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Creston differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Cuiaba b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Cuiaba
new file mode 100644
index 0000000000000000000000000000000000000000..9bea3d4079f4449d5bed22b81404b5ee2ab6394a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Cuiaba differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Curacao b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Curacao
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Curacao differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Danmarkshavn b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Danmarkshavn
new file mode 100644
index 0000000000000000000000000000000000000000..9549adcb657569ea304592a4070ceecb4550a4db
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Danmarkshavn differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Dawson b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Dawson
new file mode 100644
index 0000000000000000000000000000000000000000..343b63227d2185863cd720bf449de000bbc794d0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Dawson differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Dawson_Creek b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Dawson_Creek
new file mode 100644
index 0000000000000000000000000000000000000000..db9e3396557652707c0c2232e119e13a2dd172fb
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Dawson_Creek differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Denver b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Denver
new file mode 100644
index 0000000000000000000000000000000000000000..5fbe26b1d93d1acb2561c390c1e097d07f1a262e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Denver differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Detroit b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Detroit
new file mode 100644
index 0000000000000000000000000000000000000000..e104faa46545ee873295cde34e1d46bccad8647c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Detroit differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Dominica b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Dominica
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Dominica differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Edmonton b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Edmonton
new file mode 100644
index 0000000000000000000000000000000000000000..cd78a6f8be1dd55ac5afd25bbae39bd5706e42d1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Edmonton differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Eirunepe b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Eirunepe
new file mode 100644
index 0000000000000000000000000000000000000000..39d6daeb9b6dd3b225b0ddacceb6984e8204ac7d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Eirunepe differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/El_Salvador b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/El_Salvador
new file mode 100644
index 0000000000000000000000000000000000000000..e2f22304aad56062cfb66d23f3a8c296689286ed
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/El_Salvador differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Ensenada b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Ensenada
new file mode 100644
index 0000000000000000000000000000000000000000..ada6bf78b2815d3d99c97d521ab9a6b35c8af8c3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Ensenada differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Fort_Nelson b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Fort_Nelson
new file mode 100644
index 0000000000000000000000000000000000000000..5a0b7f1ca032be1adf8ba91f863e727c5ec6a026
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Fort_Nelson differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Fort_Wayne b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Fort_Wayne
new file mode 100644
index 0000000000000000000000000000000000000000..09511ccdcf97a5baa8e1b0eb75e040eee6b6e0c4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Fort_Wayne differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Fortaleza b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Fortaleza
new file mode 100644
index 0000000000000000000000000000000000000000..be57dc20b46142787cbd820e0d1c927d2f02f046
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Fortaleza differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Glace_Bay b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Glace_Bay
new file mode 100644
index 0000000000000000000000000000000000000000..48412a4cbf9241ea83887876b1b8b22c367ff4fd
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Glace_Bay differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Godthab b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Godthab
new file mode 100644
index 0000000000000000000000000000000000000000..0160308bf63690a6dee51c3c8849f494a47186c2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Godthab differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Goose_Bay b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Goose_Bay
new file mode 100644
index 0000000000000000000000000000000000000000..a3f299079aebb8524bf77e7f92e0a7e6d0a7b6fb
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Goose_Bay differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Grand_Turk b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Grand_Turk
new file mode 100644
index 0000000000000000000000000000000000000000..06da1a6d7a3eeec0e82f7c5815a8fed4252e16b2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Grand_Turk differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Grenada b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Grenada
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Grenada differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Guadeloupe b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Guadeloupe
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Guadeloupe differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Guatemala b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Guatemala
new file mode 100644
index 0000000000000000000000000000000000000000..407138caf94e467b52d925f96ad80b506e16d9ec
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Guatemala differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Guayaquil b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Guayaquil
new file mode 100644
index 0000000000000000000000000000000000000000..0559a7a4a4288d7e3c3b1bcf47c57a7ea82be2ef
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Guayaquil differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Guyana b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Guyana
new file mode 100644
index 0000000000000000000000000000000000000000..7af58e502329b9625a9a9a34268e4f024b545b0f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Guyana differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Halifax b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Halifax
new file mode 100644
index 0000000000000000000000000000000000000000..756099abe6cee44295a5566ad6cd0c352fb82e64
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Halifax differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Havana b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Havana
new file mode 100644
index 0000000000000000000000000000000000000000..b69ac4510784f23ee794cd6d11e62315a7318e5e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Havana differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Hermosillo b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Hermosillo
new file mode 100644
index 0000000000000000000000000000000000000000..791a9fa2b38729ff10032f9e1f021754c76c87f5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Hermosillo differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Indianapolis b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Indianapolis
new file mode 100644
index 0000000000000000000000000000000000000000..09511ccdcf97a5baa8e1b0eb75e040eee6b6e0c4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Indianapolis differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Knox b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Knox
new file mode 100644
index 0000000000000000000000000000000000000000..fcd408d74df43310a9a85c475f83d545f6d75911
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Knox differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Marengo b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Marengo
new file mode 100644
index 0000000000000000000000000000000000000000..1abf75e7e864625b975feebdd0b232d0de624b27
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Marengo differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Petersburg b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Petersburg
new file mode 100644
index 0000000000000000000000000000000000000000..0133548ecac014f4b37f0abcd471a5a6b4e7ed5f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Petersburg differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Tell_City b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Tell_City
new file mode 100644
index 0000000000000000000000000000000000000000..7bbb653cd7ce09d88f8a1701853bfe92ac7db0c4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Tell_City differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Vevay b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Vevay
new file mode 100644
index 0000000000000000000000000000000000000000..d236b7c07726828984d3afbfa872b3abeae3e809
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Vevay differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Vincennes b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Vincennes
new file mode 100644
index 0000000000000000000000000000000000000000..c818929d19b2ca5925c181696f02a2ba93a7009a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Vincennes differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Winamac b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Winamac
new file mode 100644
index 0000000000000000000000000000000000000000..630935c1e1a8c1a87e728fc1dcc4c930e81b30e9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indiana/Winamac differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indianapolis b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indianapolis
new file mode 100644
index 0000000000000000000000000000000000000000..09511ccdcf97a5baa8e1b0eb75e040eee6b6e0c4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Indianapolis differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Inuvik b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Inuvik
new file mode 100644
index 0000000000000000000000000000000000000000..87bb355295a3efcacd4baecb5a8bfa3fb9ce9c54
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Inuvik differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Iqaluit b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Iqaluit
new file mode 100644
index 0000000000000000000000000000000000000000..c8138bdbb3cf141ad42ace037e144bee98d81481
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Iqaluit differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Jamaica b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Jamaica
new file mode 100644
index 0000000000000000000000000000000000000000..2a9b7fd52d37a1ffe9fc589daa04d88c6c71a6e0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Jamaica differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Jujuy b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Jujuy
new file mode 100644
index 0000000000000000000000000000000000000000..604b85663672d83658f331a69cc8f41cf2a2b5a1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Jujuy differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Juneau b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Juneau
new file mode 100644
index 0000000000000000000000000000000000000000..451f3490096338f40e601628ac70f04112ace51d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Juneau differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Kentucky/Louisville b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Kentucky/Louisville
new file mode 100644
index 0000000000000000000000000000000000000000..177836e4fd98399bd4cdda410648cc286408b95b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Kentucky/Louisville differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Kentucky/Monticello b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Kentucky/Monticello
new file mode 100644
index 0000000000000000000000000000000000000000..438e3eab4a6e581c6f5e7661098bf07b1a000593
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Kentucky/Monticello differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Knox_IN b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Knox_IN
new file mode 100644
index 0000000000000000000000000000000000000000..fcd408d74df43310a9a85c475f83d545f6d75911
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Knox_IN differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Kralendijk b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Kralendijk
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Kralendijk differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/La_Paz b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/La_Paz
new file mode 100644
index 0000000000000000000000000000000000000000..a10137243577a8b312dc463571c4c94ecd3bf1f1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/La_Paz differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Lima b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Lima
new file mode 100644
index 0000000000000000000000000000000000000000..3c6529b7567f032882863fcd48b790e0988261bf
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Lima differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Los_Angeles b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Los_Angeles
new file mode 100644
index 0000000000000000000000000000000000000000..9dad4f4c75b373635ccbe634798f8d9e587e36c1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Los_Angeles differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Louisville b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Louisville
new file mode 100644
index 0000000000000000000000000000000000000000..177836e4fd98399bd4cdda410648cc286408b95b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Louisville differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Lower_Princes b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Lower_Princes
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Lower_Princes differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Maceio b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Maceio
new file mode 100644
index 0000000000000000000000000000000000000000..bc8b951d2e880182a369cba8fae1270e40694aaa
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Maceio differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Managua b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Managua
new file mode 100644
index 0000000000000000000000000000000000000000..e0242bff6e5df45de8b235abb4a8405fbb8ba6a6
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Managua differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Manaus b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Manaus
new file mode 100644
index 0000000000000000000000000000000000000000..63d58f80f556e1c3135d6dbcb2a2dfc6f61b4fc9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Manaus differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Marigot b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Marigot
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Marigot differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Martinique b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Martinique
new file mode 100644
index 0000000000000000000000000000000000000000..8df43dcf1c9f63d3ea9f056f062ea97e5c7c0b57
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Martinique differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Matamoros b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Matamoros
new file mode 100644
index 0000000000000000000000000000000000000000..047968dfff4dba196d5c9695e79cf28395d9a0aa
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Matamoros differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Mazatlan b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Mazatlan
new file mode 100644
index 0000000000000000000000000000000000000000..e4a785743d75f939c3e4798ebe7c79d38e4cfd08
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Mazatlan differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Mendoza b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Mendoza
new file mode 100644
index 0000000000000000000000000000000000000000..f9e677f171b3900dfd978eb8ea2aa226557d2c5b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Mendoza differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Menominee b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Menominee
new file mode 100644
index 0000000000000000000000000000000000000000..314613866de53e1457f6cbf2fb617be7e4955edf
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Menominee differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Merida b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Merida
new file mode 100644
index 0000000000000000000000000000000000000000..ea852da33a81478433a389e2b4da03b425d9f764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Merida differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Metlakatla b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Metlakatla
new file mode 100644
index 0000000000000000000000000000000000000000..1e94be3d552ea0d6e29824469866c2a51a187be9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Metlakatla differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Mexico_City b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Mexico_City
new file mode 100644
index 0000000000000000000000000000000000000000..e7fb6f2953d123efbc2550f450decd794b017d4f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Mexico_City differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Miquelon b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Miquelon
new file mode 100644
index 0000000000000000000000000000000000000000..b924b7100438cfacf25c56c494fd1abf3f262100
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Miquelon differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Moncton b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Moncton
new file mode 100644
index 0000000000000000000000000000000000000000..9df8d0f2ec9fc8f1974d83cdd8155c79340007ed
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Moncton differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Monterrey b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Monterrey
new file mode 100644
index 0000000000000000000000000000000000000000..a8928c8dc94aaebfaf9cdeb93011d41b482a4cc6
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Monterrey differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Montevideo b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Montevideo
new file mode 100644
index 0000000000000000000000000000000000000000..2f357bcf50da794541a518510777bd5c470b9b77
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Montevideo differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Montreal b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Montreal
new file mode 100644
index 0000000000000000000000000000000000000000..6752c5b05285678b86aea170f0921fc5f5e57738
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Montreal differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Montserrat b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Montserrat
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Montserrat differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Nassau b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Nassau
new file mode 100644
index 0000000000000000000000000000000000000000..6752c5b05285678b86aea170f0921fc5f5e57738
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Nassau differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/New_York b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/New_York
new file mode 100644
index 0000000000000000000000000000000000000000..2f75480e069b60b6c58a9137c7eebd4796f74226
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/New_York differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Nipigon b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Nipigon
new file mode 100644
index 0000000000000000000000000000000000000000..f6a856e693420d6d989c45acff2c48b60db69186
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Nipigon differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Nome b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Nome
new file mode 100644
index 0000000000000000000000000000000000000000..10998df3bbe67aa8a02602301d10ec2b2c33006b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Nome differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Noronha b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Noronha
new file mode 100644
index 0000000000000000000000000000000000000000..f140726f2a6945357b325f0bb452dce7557bbff7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Noronha differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/North_Dakota/Beulah b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/North_Dakota/Beulah
new file mode 100644
index 0000000000000000000000000000000000000000..246345dde7cada7b0de81cc23fabc66b60d51e79
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/North_Dakota/Beulah differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/North_Dakota/Center b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/North_Dakota/Center
new file mode 100644
index 0000000000000000000000000000000000000000..1fa0703778034551487b7b55d80d41ad220f2102
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/North_Dakota/Center differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/North_Dakota/New_Salem b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/North_Dakota/New_Salem
new file mode 100644
index 0000000000000000000000000000000000000000..123f2aeecfc88f2e543f99d6a859e02788170852
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/North_Dakota/New_Salem differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Nuuk b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Nuuk
new file mode 100644
index 0000000000000000000000000000000000000000..0160308bf63690a6dee51c3c8849f494a47186c2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Nuuk differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Ojinaga b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Ojinaga
new file mode 100644
index 0000000000000000000000000000000000000000..fc4a03e36931bbca9b00e0344d3659e4af700be5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Ojinaga differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Panama b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Panama
new file mode 100644
index 0000000000000000000000000000000000000000..9964b9a33452f4b636f43703b7cdec4891cbda5f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Panama differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Pangnirtung b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Pangnirtung
new file mode 100644
index 0000000000000000000000000000000000000000..3e4e0db6ae0a5a704f8cdd549071cc5b7124e2fd
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Pangnirtung differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Paramaribo b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Paramaribo
new file mode 100644
index 0000000000000000000000000000000000000000..bc8a6edf1331b6912c26e81078b00c8e5d87ffdf
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Paramaribo differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Phoenix b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Phoenix
new file mode 100644
index 0000000000000000000000000000000000000000..ac6bb0c78291f1e341f42c34639458fb385bf8ed
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Phoenix differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Port-au-Prince b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Port-au-Prince
new file mode 100644
index 0000000000000000000000000000000000000000..287f1439266639f9564149d6e162feaba3fbed86
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Port-au-Prince differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Port_of_Spain b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Port_of_Spain
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Port_of_Spain differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Porto_Acre b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Porto_Acre
new file mode 100644
index 0000000000000000000000000000000000000000..a374cb43d98bfbd06c82ca306a74f96993fd5657
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Porto_Acre differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Porto_Velho b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Porto_Velho
new file mode 100644
index 0000000000000000000000000000000000000000..2e873a5aa868c5605443c48510646f1408d58c4a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Porto_Velho differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Puerto_Rico b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Puerto_Rico
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Puerto_Rico differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Punta_Arenas b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Punta_Arenas
new file mode 100644
index 0000000000000000000000000000000000000000..a5a8af52c2f26baf6f85a1786f69593491ad5195
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Punta_Arenas differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Rainy_River b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Rainy_River
new file mode 100644
index 0000000000000000000000000000000000000000..ea66099155ca930810062eebcfbc7fc99e97097f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Rainy_River differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Rankin_Inlet b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Rankin_Inlet
new file mode 100644
index 0000000000000000000000000000000000000000..3a70587472c633aaa178c7f979b8b3884d604372
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Rankin_Inlet differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Recife b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Recife
new file mode 100644
index 0000000000000000000000000000000000000000..d7abb168a7434579852b7533fe062fed9297ec2b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Recife differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Regina b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Regina
new file mode 100644
index 0000000000000000000000000000000000000000..20c9c84df491e4072ec4c5d2c931a7433d9fd394
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Regina differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Resolute b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Resolute
new file mode 100644
index 0000000000000000000000000000000000000000..0a73b753ba597f89cd57cc3875e7869dc133778c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Resolute differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Rio_Branco b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Rio_Branco
new file mode 100644
index 0000000000000000000000000000000000000000..a374cb43d98bfbd06c82ca306a74f96993fd5657
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Rio_Branco differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Rosario b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Rosario
new file mode 100644
index 0000000000000000000000000000000000000000..da4c23a545b3603bcdd4555ba8feba117802e7b4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Rosario differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Santa_Isabel b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Santa_Isabel
new file mode 100644
index 0000000000000000000000000000000000000000..ada6bf78b2815d3d99c97d521ab9a6b35c8af8c3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Santa_Isabel differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Santarem b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Santarem
new file mode 100644
index 0000000000000000000000000000000000000000..c28f36063bd264e932a1910861e884d2a1669b00
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Santarem differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Santiago b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Santiago
new file mode 100644
index 0000000000000000000000000000000000000000..816a0428188d99f437004312ee73c3860ee0f54f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Santiago differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Santo_Domingo b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Santo_Domingo
new file mode 100644
index 0000000000000000000000000000000000000000..4fe36fd4c11f998ba3f626c5e23d97bd82d12791
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Santo_Domingo differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Sao_Paulo b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Sao_Paulo
new file mode 100644
index 0000000000000000000000000000000000000000..13ff083869a9ac8476775587de8476654a100017
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Sao_Paulo differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Scoresbysund b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Scoresbysund
new file mode 100644
index 0000000000000000000000000000000000000000..e20e9e1c4272adc40c07562bb3d6767cb056b550
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Scoresbysund differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Shiprock b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Shiprock
new file mode 100644
index 0000000000000000000000000000000000000000..5fbe26b1d93d1acb2561c390c1e097d07f1a262e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Shiprock differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Sitka b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Sitka
new file mode 100644
index 0000000000000000000000000000000000000000..31f7061371910ad42e4310b7a646ba1a98b6cba4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Sitka differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/St_Barthelemy b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/St_Barthelemy
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/St_Barthelemy differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/St_Johns b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/St_Johns
new file mode 100644
index 0000000000000000000000000000000000000000..65a5b0c720dad151ffdcba3dbe91c8bd638845c6
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/St_Johns differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/St_Kitts b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/St_Kitts
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/St_Kitts differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/St_Lucia b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/St_Lucia
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/St_Lucia differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/St_Thomas b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/St_Thomas
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/St_Thomas differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/St_Vincent b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/St_Vincent
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/St_Vincent differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Swift_Current b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Swift_Current
new file mode 100644
index 0000000000000000000000000000000000000000..8e9ef255eeb11515b84126d9ee5c0c6b3c72f2a0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Swift_Current differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Tegucigalpa b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Tegucigalpa
new file mode 100644
index 0000000000000000000000000000000000000000..2adacb2e500e2f9621b2debc1756d308747b67f6
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Tegucigalpa differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Thule b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Thule
new file mode 100644
index 0000000000000000000000000000000000000000..6f802f1c2acf9cc73481ae86c9e099fcfc28cf25
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Thule differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Thunder_Bay b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Thunder_Bay
new file mode 100644
index 0000000000000000000000000000000000000000..e504c9acf198fbd447221f120354774e46dbbcc0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Thunder_Bay differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Tijuana b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Tijuana
new file mode 100644
index 0000000000000000000000000000000000000000..ada6bf78b2815d3d99c97d521ab9a6b35c8af8c3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Tijuana differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Toronto b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Toronto
new file mode 100644
index 0000000000000000000000000000000000000000..6752c5b05285678b86aea170f0921fc5f5e57738
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Toronto differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Tortola b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Tortola
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Tortola differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Vancouver b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Vancouver
new file mode 100644
index 0000000000000000000000000000000000000000..bb60cbced30763c08b6cf73554c8d6651ff387d0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Vancouver differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Virgin b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Virgin
new file mode 100644
index 0000000000000000000000000000000000000000..a662a57137b69e8ba445e899566222cdd422a764
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Virgin differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Whitehorse b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Whitehorse
new file mode 100644
index 0000000000000000000000000000000000000000..9ee229c0eb82c6aea7359e0b38677264f3cad8ab
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Whitehorse differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Winnipeg b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Winnipeg
new file mode 100644
index 0000000000000000000000000000000000000000..ac40299f6b27043e8f2454ac594b0ec184c1a237
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Winnipeg differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Yakutat b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Yakutat
new file mode 100644
index 0000000000000000000000000000000000000000..da209f9f0a07625ec83d4ec84917216347f5687f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Yakutat differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Yellowknife b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Yellowknife
new file mode 100644
index 0000000000000000000000000000000000000000..e6afa390e879f97cef351e382ae62daf183e8d77
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/America/Yellowknife differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Casey b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Casey
new file mode 100644
index 0000000000000000000000000000000000000000..cbcbe4e339d934f57725542db534641292fd077c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Casey differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Davis b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Davis
new file mode 100644
index 0000000000000000000000000000000000000000..916f2c25926bf444b7c366110a29a3fafb17fbc0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Davis differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/DumontDUrville b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/DumontDUrville
new file mode 100644
index 0000000000000000000000000000000000000000..920ad27e629e350c1baac8537bb639a59fd19039
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/DumontDUrville differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Macquarie b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Macquarie
new file mode 100644
index 0000000000000000000000000000000000000000..9e7cc687d76b00d8f112245d5c5d2f20a2a61814
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Macquarie differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Mawson b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Mawson
new file mode 100644
index 0000000000000000000000000000000000000000..b32e7fd6c6a3faddaa98dec06b88dcca3574486f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Mawson differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/McMurdo b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/McMurdo
new file mode 100644
index 0000000000000000000000000000000000000000..6575fdce31183d8238b18f2f30ab5b9227c7071c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/McMurdo differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Palmer b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Palmer
new file mode 100644
index 0000000000000000000000000000000000000000..3dd85f84ff48fb8553b4a964659872ae1fb7d33d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Palmer differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Rothera b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Rothera
new file mode 100644
index 0000000000000000000000000000000000000000..8b2430a20eb481b488fccd5f1a966d06bfa214b9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Rothera differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/South_Pole b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/South_Pole
new file mode 100644
index 0000000000000000000000000000000000000000..6575fdce31183d8238b18f2f30ab5b9227c7071c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/South_Pole differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Syowa b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Syowa
new file mode 100644
index 0000000000000000000000000000000000000000..2aea25f8c210369e0b805d0dd5f0e899190c2340
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Syowa differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Troll b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Troll
new file mode 100644
index 0000000000000000000000000000000000000000..5e565da2f6b138b70179cb7e72347163ab44b6ab
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Troll differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Vostok b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Vostok
new file mode 100644
index 0000000000000000000000000000000000000000..728305305df3d82fca7829ff3e9581583758dbeb
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Antarctica/Vostok differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Arctic/Longyearbyen b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Arctic/Longyearbyen
new file mode 100644
index 0000000000000000000000000000000000000000..15a34c3cedb7c9ca519c195f5ec0ce9d8d1885a5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Arctic/Longyearbyen differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Aden b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Aden
new file mode 100644
index 0000000000000000000000000000000000000000..2aea25f8c210369e0b805d0dd5f0e899190c2340
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Aden differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Almaty b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Almaty
new file mode 100644
index 0000000000000000000000000000000000000000..a4b00779005803d665d613e326bb77d86f45ebd8
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Almaty differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Amman b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Amman
new file mode 100644
index 0000000000000000000000000000000000000000..5dcf7e0977ba0faf59f44e2891d2eb39b11ba1f6
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Amman differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Anadyr b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Anadyr
new file mode 100644
index 0000000000000000000000000000000000000000..6ed8b7cb07634b9d669c6c36448fb8df0fdb32a3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Anadyr differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Aqtau b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Aqtau
new file mode 100644
index 0000000000000000000000000000000000000000..e2d0f919541fc9d64f01d8c6128c7e73a94869cf
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Aqtau differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Aqtobe b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Aqtobe
new file mode 100644
index 0000000000000000000000000000000000000000..06f0a13a662aacfa70c489d2fec5276362576089
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Aqtobe differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ashgabat b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ashgabat
new file mode 100644
index 0000000000000000000000000000000000000000..73891af1ee95a4d5d602967fd5a965c8a7ec1327
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ashgabat differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ashkhabad b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ashkhabad
new file mode 100644
index 0000000000000000000000000000000000000000..73891af1ee95a4d5d602967fd5a965c8a7ec1327
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ashkhabad differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Atyrau b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Atyrau
new file mode 100644
index 0000000000000000000000000000000000000000..8b5153e0545f550c6f9b0da9ce134a8a7a4ebe36
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Atyrau differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Baghdad b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Baghdad
new file mode 100644
index 0000000000000000000000000000000000000000..f7162edf93c28193436cc379c0dfe4185e368a42
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Baghdad differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Bahrain b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Bahrain
new file mode 100644
index 0000000000000000000000000000000000000000..63188b269d077e29f48a42a03c2a52aefdb61320
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Bahrain differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Baku b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Baku
new file mode 100644
index 0000000000000000000000000000000000000000..a0de74b958e42ade6f93ec0c4bc06ae714d5c606
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Baku differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Bangkok b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Bangkok
new file mode 100644
index 0000000000000000000000000000000000000000..c292ac5b5f489a88bf10a1263b14e7412479d9e7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Bangkok differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Barnaul b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Barnaul
new file mode 100644
index 0000000000000000000000000000000000000000..759592a255408caf354d0c4a2e406ee5a1638181
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Barnaul differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Beirut b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Beirut
new file mode 100644
index 0000000000000000000000000000000000000000..fb266ede2279b6aff913538d9d5aae3935e53aeb
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Beirut differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Bishkek b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Bishkek
new file mode 100644
index 0000000000000000000000000000000000000000..f6e20dd3a85eadd3dcc28cf59f3acc5b95584f0e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Bishkek differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Brunei b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Brunei
new file mode 100644
index 0000000000000000000000000000000000000000..3dab0abf4ed950f46a23282c59f6f6fa3b6ba758
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Brunei differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Calcutta b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Calcutta
new file mode 100644
index 0000000000000000000000000000000000000000..0014046d29a38e9b8006f746fea794d7f71eb479
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Calcutta differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Chita b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Chita
new file mode 100644
index 0000000000000000000000000000000000000000..c4149c05ce2638c976aec0d18eb7567c7276bc80
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Chita differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Choibalsan b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Choibalsan
new file mode 100644
index 0000000000000000000000000000000000000000..e48daa82435078a2a731225ee963b30a48f78ce1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Choibalsan differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Chongqing b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Chongqing
new file mode 100644
index 0000000000000000000000000000000000000000..91f6f8bc2e234bafd484146986bdb289082c3588
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Chongqing differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Chungking b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Chungking
new file mode 100644
index 0000000000000000000000000000000000000000..91f6f8bc2e234bafd484146986bdb289082c3588
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Chungking differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Colombo b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Colombo
new file mode 100644
index 0000000000000000000000000000000000000000..62c64d85dfbdc41ae8e78bc8304ceb35b16b4255
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Colombo differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Dacca b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Dacca
new file mode 100644
index 0000000000000000000000000000000000000000..b11c92841068c12a5d0102402127ff4537a66899
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Dacca differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Damascus b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Damascus
new file mode 100644
index 0000000000000000000000000000000000000000..d9104a7ab8cb13b4c87eb25ff633b03ec12ddd81
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Damascus differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Dhaka b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Dhaka
new file mode 100644
index 0000000000000000000000000000000000000000..b11c92841068c12a5d0102402127ff4537a66899
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Dhaka differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Dili b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Dili
new file mode 100644
index 0000000000000000000000000000000000000000..30943bbd0a8251404c407cde82243294071baf22
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Dili differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Dubai b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Dubai
new file mode 100644
index 0000000000000000000000000000000000000000..fc0a589e2b22acd9a76e402f0e7ef3c7b0547148
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Dubai differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Dushanbe b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Dushanbe
new file mode 100644
index 0000000000000000000000000000000000000000..82d85b8c1b387ffc54acf509ac75220465b64d4c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Dushanbe differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Famagusta b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Famagusta
new file mode 100644
index 0000000000000000000000000000000000000000..653b146a60e5e5641a07bfc82f9590dad1ed69f5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Famagusta differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Gaza b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Gaza
new file mode 100644
index 0000000000000000000000000000000000000000..a4ca1c6e028f91977916e3ab1f45d5be135eea02
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Gaza differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Harbin b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Harbin
new file mode 100644
index 0000000000000000000000000000000000000000..91f6f8bc2e234bafd484146986bdb289082c3588
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Harbin differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Hebron b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Hebron
new file mode 100644
index 0000000000000000000000000000000000000000..1a206a70c7da285a55cf3790e116e633fbd0a14a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Hebron differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ho_Chi_Minh b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ho_Chi_Minh
new file mode 100644
index 0000000000000000000000000000000000000000..e2934e371b6d5cf80244d8d55594f7094bc9cbb8
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ho_Chi_Minh differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Hong_Kong b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Hong_Kong
new file mode 100644
index 0000000000000000000000000000000000000000..23d0375fba3377a3d513d849c0d29c82ad2add64
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Hong_Kong differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Hovd b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Hovd
new file mode 100644
index 0000000000000000000000000000000000000000..4cb800a9187901afa985f1ae67565d654021cabc
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Hovd differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Irkutsk b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Irkutsk
new file mode 100644
index 0000000000000000000000000000000000000000..4dcbbb7ea21e193cd2e2578a17bc955caea227a4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Irkutsk differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Istanbul b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Istanbul
new file mode 100644
index 0000000000000000000000000000000000000000..508446bb6aee2841ab88e82607a6ad6e748e7db7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Istanbul differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Jakarta b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Jakarta
new file mode 100644
index 0000000000000000000000000000000000000000..5baa3a8f2ed24f68c49bd7e3ccd4772a8e734414
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Jakarta differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Jayapura b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Jayapura
new file mode 100644
index 0000000000000000000000000000000000000000..3002c82022f7e9a4ff1ad545ea617106a13b24e0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Jayapura differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Jerusalem b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Jerusalem
new file mode 100644
index 0000000000000000000000000000000000000000..1ebd0664aa29c0abd722661f761031ec0304631c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Jerusalem differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kabul b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kabul
new file mode 100644
index 0000000000000000000000000000000000000000..d19b9bd51d701d8bfe7b3e95c9f6f3c736a9aeee
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kabul differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kamchatka b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kamchatka
new file mode 100644
index 0000000000000000000000000000000000000000..3e80b4e09f792e37422ffafe42355aeb3e6940eb
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kamchatka differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Karachi b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Karachi
new file mode 100644
index 0000000000000000000000000000000000000000..ba65c0e8d31cb1d59935878af29baac722f45936
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Karachi differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kashgar b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kashgar
new file mode 100644
index 0000000000000000000000000000000000000000..faa14d92d58fa883c4203022e091dc9154ac92dc
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kashgar differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kathmandu b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kathmandu
new file mode 100644
index 0000000000000000000000000000000000000000..a5d510753f02d5dedcf1ffac153558466dca9b99
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kathmandu differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Katmandu b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Katmandu
new file mode 100644
index 0000000000000000000000000000000000000000..a5d510753f02d5dedcf1ffac153558466dca9b99
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Katmandu differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Khandyga b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Khandyga
new file mode 100644
index 0000000000000000000000000000000000000000..72bea64ba83546e72ff9d6a4f9a9e64cc2d93b47
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Khandyga differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kolkata b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kolkata
new file mode 100644
index 0000000000000000000000000000000000000000..0014046d29a38e9b8006f746fea794d7f71eb479
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kolkata differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Krasnoyarsk b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Krasnoyarsk
new file mode 100644
index 0000000000000000000000000000000000000000..30c6f165052efa4fbc176cbcf19b626b1615d88b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Krasnoyarsk differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kuala_Lumpur b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kuala_Lumpur
new file mode 100644
index 0000000000000000000000000000000000000000..612b01e71cf873b7f8a80d25d7906d77aa91ca5c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kuala_Lumpur differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kuching b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kuching
new file mode 100644
index 0000000000000000000000000000000000000000..c86750cb7d512966b1a8b19cb44531a618b00930
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kuching differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kuwait b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kuwait
new file mode 100644
index 0000000000000000000000000000000000000000..2aea25f8c210369e0b805d0dd5f0e899190c2340
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Kuwait differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Macao b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Macao
new file mode 100644
index 0000000000000000000000000000000000000000..cac65063d0dbf48e37c547fba3b67f34110d5a90
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Macao differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Macau b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Macau
new file mode 100644
index 0000000000000000000000000000000000000000..cac65063d0dbf48e37c547fba3b67f34110d5a90
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Macau differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Magadan b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Magadan
new file mode 100644
index 0000000000000000000000000000000000000000..b4fcac18e3540f029f01bbf2751045b3983d96fa
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Magadan differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Makassar b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Makassar
new file mode 100644
index 0000000000000000000000000000000000000000..556ba866933d37f3cfcf8042045d64e209bae30f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Makassar differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Manila b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Manila
new file mode 100644
index 0000000000000000000000000000000000000000..f4f4b04efa2b6a442d4072b3899f4dae69bdd771
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Manila differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Muscat b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Muscat
new file mode 100644
index 0000000000000000000000000000000000000000..fc0a589e2b22acd9a76e402f0e7ef3c7b0547148
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Muscat differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Nicosia b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Nicosia
new file mode 100644
index 0000000000000000000000000000000000000000..f7f10ab7665e94ca44fd8cd98a362cd4b304eff1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Nicosia differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Novokuznetsk b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Novokuznetsk
new file mode 100644
index 0000000000000000000000000000000000000000..d983276119c95872882589a9fdd829eb1f86f606
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Novokuznetsk differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Novosibirsk b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Novosibirsk
new file mode 100644
index 0000000000000000000000000000000000000000..e0ee5fcea981010008b7b9dc3af1cdd53363128c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Novosibirsk differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Omsk b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Omsk
new file mode 100644
index 0000000000000000000000000000000000000000..b29b7693118fb1ed214adb8a4e686172d0914b2f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Omsk differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Oral b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Oral
new file mode 100644
index 0000000000000000000000000000000000000000..ad1f9ca1ca321f6852d1d654d1fb0b14c556330f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Oral differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Phnom_Penh b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Phnom_Penh
new file mode 100644
index 0000000000000000000000000000000000000000..c292ac5b5f489a88bf10a1263b14e7412479d9e7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Phnom_Penh differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Pontianak b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Pontianak
new file mode 100644
index 0000000000000000000000000000000000000000..12ce24cbeae404efe6921081d21289be452ff88d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Pontianak differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Pyongyang b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Pyongyang
new file mode 100644
index 0000000000000000000000000000000000000000..7ad7e0b2cf8fa4fc844fe8cc9c58e4f3018cead1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Pyongyang differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Qatar b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Qatar
new file mode 100644
index 0000000000000000000000000000000000000000..63188b269d077e29f48a42a03c2a52aefdb61320
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Qatar differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Qostanay b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Qostanay
new file mode 100644
index 0000000000000000000000000000000000000000..73b9d963efc99f7af196755a6b2be015d9cb7a67
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Qostanay differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Qyzylorda b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Qyzylorda
new file mode 100644
index 0000000000000000000000000000000000000000..c2fe4c144c82b300ff126ed44098ee2bf8d752d3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Qyzylorda differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Rangoon b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Rangoon
new file mode 100644
index 0000000000000000000000000000000000000000..dd77395b05a89b875683f8aa82062748f2ed504c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Rangoon differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Riyadh b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Riyadh
new file mode 100644
index 0000000000000000000000000000000000000000..2aea25f8c210369e0b805d0dd5f0e899190c2340
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Riyadh differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Saigon b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Saigon
new file mode 100644
index 0000000000000000000000000000000000000000..e2934e371b6d5cf80244d8d55594f7094bc9cbb8
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Saigon differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Sakhalin b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Sakhalin
new file mode 100644
index 0000000000000000000000000000000000000000..485459ce0397787a099064952c66989712ae707f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Sakhalin differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Samarkand b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Samarkand
new file mode 100644
index 0000000000000000000000000000000000000000..030d47ce0785f3bdb75314e0d03fb20c3e0172d5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Samarkand differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Seoul b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Seoul
new file mode 100644
index 0000000000000000000000000000000000000000..96199e73e73aafacd89e48cb2855a96d7a134e1d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Seoul differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Shanghai b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Shanghai
new file mode 100644
index 0000000000000000000000000000000000000000..91f6f8bc2e234bafd484146986bdb289082c3588
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Shanghai differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Singapore b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Singapore
new file mode 100644
index 0000000000000000000000000000000000000000..2364b2178b03853b1771cb3f34b964fdb82e2e91
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Singapore differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Srednekolymsk b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Srednekolymsk
new file mode 100644
index 0000000000000000000000000000000000000000..261a9832b3ee4bda7c9935b566a7f7f0e6489e97
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Srednekolymsk differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Taipei b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Taipei
new file mode 100644
index 0000000000000000000000000000000000000000..24c43444b6751343d2915843d03e55753d4d7359
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Taipei differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Tashkent b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Tashkent
new file mode 100644
index 0000000000000000000000000000000000000000..32a9d7d0c9cbfa841cc4e922fd1b486a34b57370
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Tashkent differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Tbilisi b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Tbilisi
new file mode 100644
index 0000000000000000000000000000000000000000..b608d79748884c4a8271e695fe4ed992a91fea65
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Tbilisi differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Tehran b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Tehran
new file mode 100644
index 0000000000000000000000000000000000000000..8cec5ad7de2f2c14dd4b0a6c375d7198ef7429b6
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Tehran differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Tel_Aviv b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Tel_Aviv
new file mode 100644
index 0000000000000000000000000000000000000000..1ebd0664aa29c0abd722661f761031ec0304631c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Tel_Aviv differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Thimbu b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Thimbu
new file mode 100644
index 0000000000000000000000000000000000000000..fe409c7a2a40294af6bae4523492be88d38a97bc
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Thimbu differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Thimphu b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Thimphu
new file mode 100644
index 0000000000000000000000000000000000000000..fe409c7a2a40294af6bae4523492be88d38a97bc
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Thimphu differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Tokyo b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Tokyo
new file mode 100644
index 0000000000000000000000000000000000000000..26f4d34d67b46513491f26c2e661c6e653cc130d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Tokyo differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Tomsk b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Tomsk
new file mode 100644
index 0000000000000000000000000000000000000000..670e2ad2ce22f08d496b42363caa2e424121b20c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Tomsk differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ujung_Pandang b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ujung_Pandang
new file mode 100644
index 0000000000000000000000000000000000000000..556ba866933d37f3cfcf8042045d64e209bae30f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ujung_Pandang differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ulaanbaatar b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ulaanbaatar
new file mode 100644
index 0000000000000000000000000000000000000000..2e20cc3a438bb2076bfc1856045075dd041cad3d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ulaanbaatar differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ulan_Bator b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ulan_Bator
new file mode 100644
index 0000000000000000000000000000000000000000..2e20cc3a438bb2076bfc1856045075dd041cad3d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ulan_Bator differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Urumqi b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Urumqi
new file mode 100644
index 0000000000000000000000000000000000000000..faa14d92d58fa883c4203022e091dc9154ac92dc
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Urumqi differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ust-Nera b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ust-Nera
new file mode 100644
index 0000000000000000000000000000000000000000..9e4a78f6a547de2a91e728ac4ab5ffa4884105b4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Ust-Nera differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Vientiane b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Vientiane
new file mode 100644
index 0000000000000000000000000000000000000000..c292ac5b5f489a88bf10a1263b14e7412479d9e7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Vientiane differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Vladivostok b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Vladivostok
new file mode 100644
index 0000000000000000000000000000000000000000..8ab253ce73555cbbf42ec67a8560acef080fc480
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Vladivostok differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Yakutsk b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Yakutsk
new file mode 100644
index 0000000000000000000000000000000000000000..c815e99b1a8f2d4b9bd45d3a6f39c95db5bbf563
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Yakutsk differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Yangon b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Yangon
new file mode 100644
index 0000000000000000000000000000000000000000..dd77395b05a89b875683f8aa82062748f2ed504c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Yangon differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Yekaterinburg b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Yekaterinburg
new file mode 100644
index 0000000000000000000000000000000000000000..6958d7edddb85d298c5f2b890b21d9ca2056e9c5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Yekaterinburg differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Yerevan b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Yerevan
new file mode 100644
index 0000000000000000000000000000000000000000..250bfe020ada912671d670e77403a269c74b658e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Asia/Yerevan differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Azores b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Azores
new file mode 100644
index 0000000000000000000000000000000000000000..00a564fe4c6bd24337793b551c8b05ec58cc6589
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Azores differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Bermuda b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Bermuda
new file mode 100644
index 0000000000000000000000000000000000000000..527524ed295aba41b9a0448ffd7993c489a2cb99
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Bermuda differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Canary b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Canary
new file mode 100644
index 0000000000000000000000000000000000000000..f3192156ff043a529461aa9004a8de9dda326f7d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Canary differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Cape_Verde b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Cape_Verde
new file mode 100644
index 0000000000000000000000000000000000000000..e2a49d248de086306d2dd16a2b2d497a008c8f07
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Cape_Verde differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Faeroe b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Faeroe
new file mode 100644
index 0000000000000000000000000000000000000000..4dab7ef0859c244b916d61b7489d7371881e0ca2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Faeroe differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Faroe b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Faroe
new file mode 100644
index 0000000000000000000000000000000000000000..4dab7ef0859c244b916d61b7489d7371881e0ca2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Faroe differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Jan_Mayen b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Jan_Mayen
new file mode 100644
index 0000000000000000000000000000000000000000..15a34c3cedb7c9ca519c195f5ec0ce9d8d1885a5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Jan_Mayen differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Madeira b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Madeira
new file mode 100644
index 0000000000000000000000000000000000000000..7ddcd883fedcb493b7ab527483c7d4a6d4fc5055
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Madeira differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Reykjavik b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Reykjavik
new file mode 100644
index 0000000000000000000000000000000000000000..10e0fc8190a401f67b861ced3b8b16401432a565
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Reykjavik differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/South_Georgia b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/South_Georgia
new file mode 100644
index 0000000000000000000000000000000000000000..446660861227aa8ceb7084f48c3b2654ea64f4dc
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/South_Georgia differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/St_Helena b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/St_Helena
new file mode 100644
index 0000000000000000000000000000000000000000..28b32ab2e0b9053f39a91d9f28b6072e41423954
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/St_Helena differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Stanley b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Stanley
new file mode 100644
index 0000000000000000000000000000000000000000..88077f110715ac1e349708821ac1e00f35bf6395
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Atlantic/Stanley differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/ACT b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/ACT
new file mode 100644
index 0000000000000000000000000000000000000000..0aea4c3d43e504dafabc031d7ca9cbe8db46163c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/ACT differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Adelaide b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Adelaide
new file mode 100644
index 0000000000000000000000000000000000000000..f5dedca59e2b220f7395c73f60ff26e610373e8b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Adelaide differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Brisbane b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Brisbane
new file mode 100644
index 0000000000000000000000000000000000000000..7ff9949ffa93e44835ab133998b89e440094f909
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Brisbane differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Broken_Hill b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Broken_Hill
new file mode 100644
index 0000000000000000000000000000000000000000..698c76e30e91f568a29daca12993cfacbfdbf83e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Broken_Hill differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Canberra b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Canberra
new file mode 100644
index 0000000000000000000000000000000000000000..0aea4c3d43e504dafabc031d7ca9cbe8db46163c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Canberra differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Currie b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Currie
new file mode 100644
index 0000000000000000000000000000000000000000..3adb8e1bf7c6ec51f1c100538799271d7d7a6e6f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Currie differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Darwin b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Darwin
new file mode 100644
index 0000000000000000000000000000000000000000..74a30879bc6180d588a706451226cb4c95faf79d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Darwin differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Eucla b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Eucla
new file mode 100644
index 0000000000000000000000000000000000000000..3bf1171caddd7882d8a438f1d5eca0026549be08
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Eucla differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Hobart b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Hobart
new file mode 100644
index 0000000000000000000000000000000000000000..3adb8e1bf7c6ec51f1c100538799271d7d7a6e6f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Hobart differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/LHI b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/LHI
new file mode 100644
index 0000000000000000000000000000000000000000..9e04a80ecea45473faabeb609eb06cfe62193d48
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/LHI differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Lindeman b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Lindeman
new file mode 100644
index 0000000000000000000000000000000000000000..4ee1825abfe65887069dcbd10bcf786d50ba0702
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Lindeman differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Lord_Howe b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Lord_Howe
new file mode 100644
index 0000000000000000000000000000000000000000..9e04a80ecea45473faabeb609eb06cfe62193d48
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Lord_Howe differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Melbourne b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Melbourne
new file mode 100644
index 0000000000000000000000000000000000000000..ee903f4b1fc292bc9cbec7b501a266030ef3510e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Melbourne differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/NSW b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/NSW
new file mode 100644
index 0000000000000000000000000000000000000000..0aea4c3d43e504dafabc031d7ca9cbe8db46163c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/NSW differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/North b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/North
new file mode 100644
index 0000000000000000000000000000000000000000..74a30879bc6180d588a706451226cb4c95faf79d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/North differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Perth b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Perth
new file mode 100644
index 0000000000000000000000000000000000000000..f8ddbdf215d34b022af11c3d1930dd6ea4dca87e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Perth differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Queensland b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Queensland
new file mode 100644
index 0000000000000000000000000000000000000000..7ff9949ffa93e44835ab133998b89e440094f909
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Queensland differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/South b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/South
new file mode 100644
index 0000000000000000000000000000000000000000..f5dedca59e2b220f7395c73f60ff26e610373e8b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/South differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Sydney b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Sydney
new file mode 100644
index 0000000000000000000000000000000000000000..0aea4c3d43e504dafabc031d7ca9cbe8db46163c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Sydney differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Tasmania b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Tasmania
new file mode 100644
index 0000000000000000000000000000000000000000..3adb8e1bf7c6ec51f1c100538799271d7d7a6e6f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Tasmania differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Victoria b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Victoria
new file mode 100644
index 0000000000000000000000000000000000000000..ee903f4b1fc292bc9cbec7b501a266030ef3510e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Victoria differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/West b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/West
new file mode 100644
index 0000000000000000000000000000000000000000..f8ddbdf215d34b022af11c3d1930dd6ea4dca87e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/West differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Yancowinna b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Yancowinna
new file mode 100644
index 0000000000000000000000000000000000000000..698c76e30e91f568a29daca12993cfacbfdbf83e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Australia/Yancowinna differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Brazil/Acre b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Brazil/Acre
new file mode 100644
index 0000000000000000000000000000000000000000..a374cb43d98bfbd06c82ca306a74f96993fd5657
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Brazil/Acre differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Brazil/DeNoronha b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Brazil/DeNoronha
new file mode 100644
index 0000000000000000000000000000000000000000..f140726f2a6945357b325f0bb452dce7557bbff7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Brazil/DeNoronha differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Brazil/East b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Brazil/East
new file mode 100644
index 0000000000000000000000000000000000000000..13ff083869a9ac8476775587de8476654a100017
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Brazil/East differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Brazil/West b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Brazil/West
new file mode 100644
index 0000000000000000000000000000000000000000..63d58f80f556e1c3135d6dbcb2a2dfc6f61b4fc9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Brazil/West differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/CET b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/CET
new file mode 100644
index 0000000000000000000000000000000000000000..122e934210cabf0b29a2dd7d11eb8220ed1cad43
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/CET differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/CST6CDT b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/CST6CDT
new file mode 100644
index 0000000000000000000000000000000000000000..ca67929fbeb05083c63e8319dd9ebf65b3d75e4d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/CST6CDT differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Atlantic b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Atlantic
new file mode 100644
index 0000000000000000000000000000000000000000..756099abe6cee44295a5566ad6cd0c352fb82e64
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Atlantic differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Central b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Central
new file mode 100644
index 0000000000000000000000000000000000000000..ac40299f6b27043e8f2454ac594b0ec184c1a237
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Central differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Eastern b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Eastern
new file mode 100644
index 0000000000000000000000000000000000000000..6752c5b05285678b86aea170f0921fc5f5e57738
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Eastern differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Mountain b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Mountain
new file mode 100644
index 0000000000000000000000000000000000000000..cd78a6f8be1dd55ac5afd25bbae39bd5706e42d1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Mountain differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Newfoundland b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Newfoundland
new file mode 100644
index 0000000000000000000000000000000000000000..65a5b0c720dad151ffdcba3dbe91c8bd638845c6
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Newfoundland differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Pacific b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Pacific
new file mode 100644
index 0000000000000000000000000000000000000000..bb60cbced30763c08b6cf73554c8d6651ff387d0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Pacific differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Saskatchewan b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Saskatchewan
new file mode 100644
index 0000000000000000000000000000000000000000..20c9c84df491e4072ec4c5d2c931a7433d9fd394
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Saskatchewan differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Yukon b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Yukon
new file mode 100644
index 0000000000000000000000000000000000000000..9ee229c0eb82c6aea7359e0b38677264f3cad8ab
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Canada/Yukon differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Chile/Continental b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Chile/Continental
new file mode 100644
index 0000000000000000000000000000000000000000..816a0428188d99f437004312ee73c3860ee0f54f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Chile/Continental differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Chile/EasterIsland b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Chile/EasterIsland
new file mode 100644
index 0000000000000000000000000000000000000000..cae3744096402e8a452336544edf96ca9ae5ad8d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Chile/EasterIsland differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Cuba b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Cuba
new file mode 100644
index 0000000000000000000000000000000000000000..b69ac4510784f23ee794cd6d11e62315a7318e5e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Cuba differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/EET b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/EET
new file mode 100644
index 0000000000000000000000000000000000000000..cbdb71ddd38be8f4a23e57bdb4b86e52195e9f89
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/EET differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/EST b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/EST
new file mode 100644
index 0000000000000000000000000000000000000000..21ebc00b3fc096035b9810519d778d04a3562a44
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/EST differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/EST5EDT b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/EST5EDT
new file mode 100644
index 0000000000000000000000000000000000000000..9bce5007d4dbb871974a69cb0f68151c1ee22556
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/EST5EDT differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Egypt b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Egypt
new file mode 100644
index 0000000000000000000000000000000000000000..d3f819623fc9ef90d327380fad15341ec1a0e202
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Egypt differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Eire b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Eire
new file mode 100644
index 0000000000000000000000000000000000000000..1d994902db21814a626e42639d7a96b18ee73756
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Eire differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT
new file mode 100644
index 0000000000000000000000000000000000000000..c63474664a289aa3c3c0d8b2ce06d484679754c0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+0 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+0
new file mode 100644
index 0000000000000000000000000000000000000000..c63474664a289aa3c3c0d8b2ce06d484679754c0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+0 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+1 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+1
new file mode 100644
index 0000000000000000000000000000000000000000..4dab6f9005bea50a065c685ec8260b0da2bff921
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+1 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+10 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+10
new file mode 100644
index 0000000000000000000000000000000000000000..c749290af2f6b5fe22770c34eb1e8fc87cd85aff
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+10 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+11 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+11
new file mode 100644
index 0000000000000000000000000000000000000000..d969982309e5ca7d32979a7dad814ca307d2cd8d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+11 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+12 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+12
new file mode 100644
index 0000000000000000000000000000000000000000..cdeec90973be28ee4075eadd22b8b574db2d7a5f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+12 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+2 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+2
new file mode 100644
index 0000000000000000000000000000000000000000..fbd2a941fda996f4abc1f0e09cdf99c271f5a1e2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+2 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+3 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+3
new file mode 100644
index 0000000000000000000000000000000000000000..ee246ef56f18de61105af0c14d201fd090f74905
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+3 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+4 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+4
new file mode 100644
index 0000000000000000000000000000000000000000..5a25ff2a6afda2cb09b9e147ad20610bc1923444
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+4 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+5 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+5
new file mode 100644
index 0000000000000000000000000000000000000000..c0b745f1cc44d03a00f8bdf127c154392e3baf27
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+5 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+6 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+6
new file mode 100644
index 0000000000000000000000000000000000000000..06e777d57e0267a0635b6b284729fddcfe6221dd
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+6 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+7 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+7
new file mode 100644
index 0000000000000000000000000000000000000000..4e0b53a082f11f9b9debf5e110b97b1b0473c9a6
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+7 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+8 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+8
new file mode 100644
index 0000000000000000000000000000000000000000..714b0c562889a8a774d9aa27810d8400164d00e6
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+8 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+9 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+9
new file mode 100644
index 0000000000000000000000000000000000000000..78b9daa373d2aa2856eafcc92ebc6d899cafde5c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT+9 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-0 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-0
new file mode 100644
index 0000000000000000000000000000000000000000..c63474664a289aa3c3c0d8b2ce06d484679754c0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-0 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-1 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-1
new file mode 100644
index 0000000000000000000000000000000000000000..a838bebf5e7bac3e1257eeb0a61c1b83feb1324c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-1 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-10 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-10
new file mode 100644
index 0000000000000000000000000000000000000000..68ff77db0d95c7d054ef33c05e05ba71bcbbbdd8
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-10 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-11 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-11
new file mode 100644
index 0000000000000000000000000000000000000000..66af5a42be440f1fb8fec3b915afb49b356f63a5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-11 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-12 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-12
new file mode 100644
index 0000000000000000000000000000000000000000..17ba5057727dd73bd5f6234cc5b239b71a861945
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-12 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-13 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-13
new file mode 100644
index 0000000000000000000000000000000000000000..5f3706ce64cadf081a6c56abd7ba423575a4abb2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-13 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-14 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-14
new file mode 100644
index 0000000000000000000000000000000000000000..7e9f9c465ce6211c65d617f60472c9b55b5052c5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-14 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-2 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-2
new file mode 100644
index 0000000000000000000000000000000000000000..fcef6d9acb247deb539fcc4b30149802572ea642
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-2 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-3 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-3
new file mode 100644
index 0000000000000000000000000000000000000000..27973bc857b4e618218ca2790acacb81f7c7bb82
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-3 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-4 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-4
new file mode 100644
index 0000000000000000000000000000000000000000..1efd841261a977ae218d408f9cc308c3e312a5e8
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-4 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-5 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-5
new file mode 100644
index 0000000000000000000000000000000000000000..1f761844fc44f8228bb748235bfd30be6c389cd1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-5 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-6 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-6
new file mode 100644
index 0000000000000000000000000000000000000000..952681ed46cb60e59baf76a2c43b49d5f67255d1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-6 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-7 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-7
new file mode 100644
index 0000000000000000000000000000000000000000..cefc9126c691060225ff2eee1241b1e5e9825fcd
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-7 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-8 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-8
new file mode 100644
index 0000000000000000000000000000000000000000..afb093da00685297cb11347c4840acf3a8e2e2bf
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-8 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-9 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-9
new file mode 100644
index 0000000000000000000000000000000000000000..9265fb7c2071ec0e66c657ad2ae42d5dd525fe97
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT-9 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT0 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT0
new file mode 100644
index 0000000000000000000000000000000000000000..c63474664a289aa3c3c0d8b2ce06d484679754c0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/GMT0 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/Greenwich b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/Greenwich
new file mode 100644
index 0000000000000000000000000000000000000000..c63474664a289aa3c3c0d8b2ce06d484679754c0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/Greenwich differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/UCT b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/UCT
new file mode 100644
index 0000000000000000000000000000000000000000..91558be0c2bf903b2364215ba26d5227d6126508
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/UCT differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/UTC b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/UTC
new file mode 100644
index 0000000000000000000000000000000000000000..91558be0c2bf903b2364215ba26d5227d6126508
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/UTC differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/Universal b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/Universal
new file mode 100644
index 0000000000000000000000000000000000000000..91558be0c2bf903b2364215ba26d5227d6126508
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/Universal differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/Zulu b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/Zulu
new file mode 100644
index 0000000000000000000000000000000000000000..91558be0c2bf903b2364215ba26d5227d6126508
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Etc/Zulu differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Amsterdam b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Amsterdam
new file mode 100644
index 0000000000000000000000000000000000000000..c3ff07b436aedf662eae60f50668f5abcdb172b6
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Amsterdam differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Andorra b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Andorra
new file mode 100644
index 0000000000000000000000000000000000000000..5962550392fa78514061582e9371c32b9f1d929b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Andorra differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Astrakhan b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Astrakhan
new file mode 100644
index 0000000000000000000000000000000000000000..73a4d013fcb82c2beb6f885f359b9ca20da054e7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Astrakhan differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Athens b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Athens
new file mode 100644
index 0000000000000000000000000000000000000000..9f3a0678d766881389e129c93def7fffd74f14f1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Athens differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Belfast b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Belfast
new file mode 100644
index 0000000000000000000000000000000000000000..ac02a81440f47a67b9f01d3fbcdb085266d20894
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Belfast differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Belgrade b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Belgrade
new file mode 100644
index 0000000000000000000000000000000000000000..27de456f16ab549627b284a39e2265cbdb4ad8e9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Belgrade differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Berlin b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Berlin
new file mode 100644
index 0000000000000000000000000000000000000000..7f6d958f8630cba512d8e58ca8edfbd516291522
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Berlin differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Bratislava b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Bratislava
new file mode 100644
index 0000000000000000000000000000000000000000..ce8f433ece44f0b96b18d3b5780730e7f9cad9f5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Bratislava differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Brussels b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Brussels
new file mode 100644
index 0000000000000000000000000000000000000000..40d7124e5346af056c75e2f7012a51d94e8154b7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Brussels differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Bucharest b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Bucharest
new file mode 100644
index 0000000000000000000000000000000000000000..4303b903e5e007484c0d8e1eea43a35e9b53f38b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Bucharest differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Budapest b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Budapest
new file mode 100644
index 0000000000000000000000000000000000000000..b76c873d9256e1d73c2ea672140b813f15657bc7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Budapest differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Busingen b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Busingen
new file mode 100644
index 0000000000000000000000000000000000000000..ad6cf59281a1046d9dcd045fda521585e3e33e06
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Busingen differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Chisinau b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Chisinau
new file mode 100644
index 0000000000000000000000000000000000000000..5ee23fe0e59f044598675db44d53c20590b88934
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Chisinau differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Copenhagen b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Copenhagen
new file mode 100644
index 0000000000000000000000000000000000000000..776be6e4a6d5a054152135a1ad149576052f49a1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Copenhagen differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Dublin b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Dublin
new file mode 100644
index 0000000000000000000000000000000000000000..1d994902db21814a626e42639d7a96b18ee73756
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Dublin differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Gibraltar b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Gibraltar
new file mode 100644
index 0000000000000000000000000000000000000000..117aadb8364cd7901388098503f4538c7b445aeb
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Gibraltar differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Guernsey b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Guernsey
new file mode 100644
index 0000000000000000000000000000000000000000..ac02a81440f47a67b9f01d3fbcdb085266d20894
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Guernsey differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Helsinki b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Helsinki
new file mode 100644
index 0000000000000000000000000000000000000000..b4f8f9cbb57450549933f83ac90dd56a2ca75344
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Helsinki differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Isle_of_Man b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Isle_of_Man
new file mode 100644
index 0000000000000000000000000000000000000000..ac02a81440f47a67b9f01d3fbcdb085266d20894
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Isle_of_Man differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Istanbul b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Istanbul
new file mode 100644
index 0000000000000000000000000000000000000000..508446bb6aee2841ab88e82607a6ad6e748e7db7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Istanbul differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Jersey b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Jersey
new file mode 100644
index 0000000000000000000000000000000000000000..ac02a81440f47a67b9f01d3fbcdb085266d20894
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Jersey differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Kaliningrad b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Kaliningrad
new file mode 100644
index 0000000000000000000000000000000000000000..cc99beabe4ffc5107c4719d1201d6583b9ead03a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Kaliningrad differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Kiev b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Kiev
new file mode 100644
index 0000000000000000000000000000000000000000..9337c9ea27c0a61b1082f4be37cfb0f9484cf5e2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Kiev differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Kirov b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Kirov
new file mode 100644
index 0000000000000000000000000000000000000000..a3b5320a0bd139c07b8642c4efd7b98f57c6e8dd
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Kirov differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Lisbon b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Lisbon
new file mode 100644
index 0000000000000000000000000000000000000000..55f01930ba92ff6852ae4745e78adb5f96c5b057
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Lisbon differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Ljubljana b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Ljubljana
new file mode 100644
index 0000000000000000000000000000000000000000..27de456f16ab549627b284a39e2265cbdb4ad8e9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Ljubljana differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/London b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/London
new file mode 100644
index 0000000000000000000000000000000000000000..ac02a81440f47a67b9f01d3fbcdb085266d20894
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/London differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Luxembourg b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Luxembourg
new file mode 100644
index 0000000000000000000000000000000000000000..c4ca733f5345df24e5286b70464a6c0498353372
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Luxembourg differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Madrid b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Madrid
new file mode 100644
index 0000000000000000000000000000000000000000..16f6420ab7efc7ceac3b0e42fe37836185cfc463
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Madrid differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Malta b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Malta
new file mode 100644
index 0000000000000000000000000000000000000000..bf2452da40314be196f61e6a7cdd48eaf5c426f3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Malta differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Mariehamn b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Mariehamn
new file mode 100644
index 0000000000000000000000000000000000000000..b4f8f9cbb57450549933f83ac90dd56a2ca75344
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Mariehamn differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Minsk b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Minsk
new file mode 100644
index 0000000000000000000000000000000000000000..453306c07566a94c0c391024fb16ee36245a0a40
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Minsk differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Monaco b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Monaco
new file mode 100644
index 0000000000000000000000000000000000000000..adbe45d1c1db7287345b1826a356732075a38c51
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Monaco differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Moscow b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Moscow
new file mode 100644
index 0000000000000000000000000000000000000000..ddb3f4e99a1030f33b56fad986c8d9c16e59eb32
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Moscow differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Nicosia b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Nicosia
new file mode 100644
index 0000000000000000000000000000000000000000..f7f10ab7665e94ca44fd8cd98a362cd4b304eff1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Nicosia differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Oslo b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Oslo
new file mode 100644
index 0000000000000000000000000000000000000000..15a34c3cedb7c9ca519c195f5ec0ce9d8d1885a5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Oslo differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Paris b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Paris
new file mode 100644
index 0000000000000000000000000000000000000000..7d366c6098c49ecd546e1cc1538919e1414a3aee
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Paris differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Podgorica b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Podgorica
new file mode 100644
index 0000000000000000000000000000000000000000..27de456f16ab549627b284a39e2265cbdb4ad8e9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Podgorica differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Prague b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Prague
new file mode 100644
index 0000000000000000000000000000000000000000..ce8f433ece44f0b96b18d3b5780730e7f9cad9f5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Prague differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Riga b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Riga
new file mode 100644
index 0000000000000000000000000000000000000000..8db477d01736445cafce8af7a7085d226d81f546
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Riga differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Rome b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Rome
new file mode 100644
index 0000000000000000000000000000000000000000..ac4c16342b5bbfa4c58a26f57db33b95f5b3e533
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Rome differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Samara b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Samara
new file mode 100644
index 0000000000000000000000000000000000000000..97d5dd9e6ed7fc924c9bcb514f991cc8b52061b3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Samara differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/San_Marino b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/San_Marino
new file mode 100644
index 0000000000000000000000000000000000000000..ac4c16342b5bbfa4c58a26f57db33b95f5b3e533
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/San_Marino differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Sarajevo b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Sarajevo
new file mode 100644
index 0000000000000000000000000000000000000000..27de456f16ab549627b284a39e2265cbdb4ad8e9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Sarajevo differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Saratov b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Saratov
new file mode 100644
index 0000000000000000000000000000000000000000..8fd5f6d4b881457d13fdcdd35abb6fc5429d7084
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Saratov differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Simferopol b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Simferopol
new file mode 100644
index 0000000000000000000000000000000000000000..432e8315bc9dfa74080467f9e08073d9fdcc833a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Simferopol differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Skopje b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Skopje
new file mode 100644
index 0000000000000000000000000000000000000000..27de456f16ab549627b284a39e2265cbdb4ad8e9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Skopje differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Sofia b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Sofia
new file mode 100644
index 0000000000000000000000000000000000000000..0e4d879332d21c93c229fc25587205020eeb3127
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Sofia differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Stockholm b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Stockholm
new file mode 100644
index 0000000000000000000000000000000000000000..f3e0c7f0f25f0a7290e56281c91190e3611498a7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Stockholm differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Tallinn b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Tallinn
new file mode 100644
index 0000000000000000000000000000000000000000..b5acca3cf51e7f7b3176965748688ff41720246f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Tallinn differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Tirane b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Tirane
new file mode 100644
index 0000000000000000000000000000000000000000..0b86017d243f1b7bbb41d6b4feefcb2b7edfc7d8
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Tirane differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Tiraspol b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Tiraspol
new file mode 100644
index 0000000000000000000000000000000000000000..5ee23fe0e59f044598675db44d53c20590b88934
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Tiraspol differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Ulyanovsk b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Ulyanovsk
new file mode 100644
index 0000000000000000000000000000000000000000..7b61bdc522b5b7f4397fdb9246185f4d972f4b6c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Ulyanovsk differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Uzhgorod b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Uzhgorod
new file mode 100644
index 0000000000000000000000000000000000000000..66ae8d69e3f86cfdd8d90a9b1d094d807f75f27a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Uzhgorod differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Vaduz b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Vaduz
new file mode 100644
index 0000000000000000000000000000000000000000..ad6cf59281a1046d9dcd045fda521585e3e33e06
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Vaduz differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Vatican b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Vatican
new file mode 100644
index 0000000000000000000000000000000000000000..ac4c16342b5bbfa4c58a26f57db33b95f5b3e533
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Vatican differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Vienna b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Vienna
new file mode 100644
index 0000000000000000000000000000000000000000..3582bb15cd7322088839b0134987ad10e717b6b5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Vienna differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Vilnius b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Vilnius
new file mode 100644
index 0000000000000000000000000000000000000000..7abd63fa608e0186b9f154d9fcc32472c28f6759
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Vilnius differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Volgograd b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Volgograd
new file mode 100644
index 0000000000000000000000000000000000000000..11739ac271da2b623bef49ea64908820b7ca05fd
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Volgograd differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Warsaw b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Warsaw
new file mode 100644
index 0000000000000000000000000000000000000000..e33cf67171da78aa9e6eb02e50f9b9603da4c3f4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Warsaw differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Zagreb b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Zagreb
new file mode 100644
index 0000000000000000000000000000000000000000..27de456f16ab549627b284a39e2265cbdb4ad8e9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Zagreb differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Zaporozhye b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Zaporozhye
new file mode 100644
index 0000000000000000000000000000000000000000..e42edfc8506b9b99362b36d90c8b8c4db67d50d8
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Zaporozhye differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Zurich b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Zurich
new file mode 100644
index 0000000000000000000000000000000000000000..ad6cf59281a1046d9dcd045fda521585e3e33e06
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Europe/Zurich differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Factory b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Factory
new file mode 100644
index 0000000000000000000000000000000000000000..60aa2a0d695ba577ff87624d479f1eb25c8f1caf
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Factory differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/GB b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/GB
new file mode 100644
index 0000000000000000000000000000000000000000..ac02a81440f47a67b9f01d3fbcdb085266d20894
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/GB differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/GB-Eire b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/GB-Eire
new file mode 100644
index 0000000000000000000000000000000000000000..ac02a81440f47a67b9f01d3fbcdb085266d20894
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/GB-Eire differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/GMT b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/GMT
new file mode 100644
index 0000000000000000000000000000000000000000..c63474664a289aa3c3c0d8b2ce06d484679754c0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/GMT differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/GMT+0 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/GMT+0
new file mode 100644
index 0000000000000000000000000000000000000000..c63474664a289aa3c3c0d8b2ce06d484679754c0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/GMT+0 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/GMT-0 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/GMT-0
new file mode 100644
index 0000000000000000000000000000000000000000..c63474664a289aa3c3c0d8b2ce06d484679754c0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/GMT-0 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/GMT0 b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/GMT0
new file mode 100644
index 0000000000000000000000000000000000000000..c63474664a289aa3c3c0d8b2ce06d484679754c0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/GMT0 differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Greenwich b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Greenwich
new file mode 100644
index 0000000000000000000000000000000000000000..c63474664a289aa3c3c0d8b2ce06d484679754c0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Greenwich differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/HST b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/HST
new file mode 100644
index 0000000000000000000000000000000000000000..cccd45eb8cb2f56b6a1b75e2d7b9530cb5abf2e1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/HST differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Hongkong b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Hongkong
new file mode 100644
index 0000000000000000000000000000000000000000..23d0375fba3377a3d513d849c0d29c82ad2add64
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Hongkong differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Iceland b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Iceland
new file mode 100644
index 0000000000000000000000000000000000000000..10e0fc8190a401f67b861ced3b8b16401432a565
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Iceland differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Antananarivo b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Antananarivo
new file mode 100644
index 0000000000000000000000000000000000000000..9dcfc19c56e62b12b730f4335b34479695f273f5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Antananarivo differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Chagos b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Chagos
new file mode 100644
index 0000000000000000000000000000000000000000..93d6dda50f579093f25617c77081ae99c8631ea5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Chagos differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Christmas b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Christmas
new file mode 100644
index 0000000000000000000000000000000000000000..d18c3810d97bbd424dc3c8fa98de46bfa08c3fa8
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Christmas differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Cocos b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Cocos
new file mode 100644
index 0000000000000000000000000000000000000000..f8116e7025cadc709bbd995905e88c92ed03642a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Cocos differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Comoro b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Comoro
new file mode 100644
index 0000000000000000000000000000000000000000..9dcfc19c56e62b12b730f4335b34479695f273f5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Comoro differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Kerguelen b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Kerguelen
new file mode 100644
index 0000000000000000000000000000000000000000..cde4cf7ea7086a3fa3609566ff03e9425b096f36
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Kerguelen differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Mahe b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Mahe
new file mode 100644
index 0000000000000000000000000000000000000000..208f9386bdad172305a48f6ab7e70ac3e1ca0e1e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Mahe differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Maldives b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Maldives
new file mode 100644
index 0000000000000000000000000000000000000000..7c839cfa9bd62842cf23f01d5195b239bc5a437c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Maldives differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Mauritius b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Mauritius
new file mode 100644
index 0000000000000000000000000000000000000000..17f26169904928e4061e4ee58bdf7a6c62001524
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Mauritius differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Mayotte b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Mayotte
new file mode 100644
index 0000000000000000000000000000000000000000..9dcfc19c56e62b12b730f4335b34479695f273f5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Mayotte differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Reunion b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Reunion
new file mode 100644
index 0000000000000000000000000000000000000000..dfe08313dffde345044d5053e3359f92163d3e38
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Indian/Reunion differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Iran b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Iran
new file mode 100644
index 0000000000000000000000000000000000000000..8cec5ad7de2f2c14dd4b0a6c375d7198ef7429b6
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Iran differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Israel b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Israel
new file mode 100644
index 0000000000000000000000000000000000000000..1ebd0664aa29c0abd722661f761031ec0304631c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Israel differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Jamaica b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Jamaica
new file mode 100644
index 0000000000000000000000000000000000000000..2a9b7fd52d37a1ffe9fc589daa04d88c6c71a6e0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Jamaica differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Japan b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Japan
new file mode 100644
index 0000000000000000000000000000000000000000..26f4d34d67b46513491f26c2e661c6e653cc130d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Japan differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Kwajalein b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Kwajalein
new file mode 100644
index 0000000000000000000000000000000000000000..1a7975fad7f7e96f7101eb3c64c9b420eeebb621
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Kwajalein differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Libya b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Libya
new file mode 100644
index 0000000000000000000000000000000000000000..07b393bb7db14cef1e906ebe63cfbbe8cddc79d5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Libya differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/MET b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/MET
new file mode 100644
index 0000000000000000000000000000000000000000..4a826bb185531c34eb37959037c68fbf08c23f71
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/MET differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/MST b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/MST
new file mode 100644
index 0000000000000000000000000000000000000000..c93a58eee8b32f672fd3a96ca3e6ada5b0a0e168
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/MST differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/MST7MDT b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/MST7MDT
new file mode 100644
index 0000000000000000000000000000000000000000..4506a6e150dfd73884811c8c0f5a0e21dc76a756
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/MST7MDT differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Mexico/BajaNorte b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Mexico/BajaNorte
new file mode 100644
index 0000000000000000000000000000000000000000..ada6bf78b2815d3d99c97d521ab9a6b35c8af8c3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Mexico/BajaNorte differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Mexico/BajaSur b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Mexico/BajaSur
new file mode 100644
index 0000000000000000000000000000000000000000..e4a785743d75f939c3e4798ebe7c79d38e4cfd08
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Mexico/BajaSur differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Mexico/General b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Mexico/General
new file mode 100644
index 0000000000000000000000000000000000000000..e7fb6f2953d123efbc2550f450decd794b017d4f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Mexico/General differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/NZ b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/NZ
new file mode 100644
index 0000000000000000000000000000000000000000..6575fdce31183d8238b18f2f30ab5b9227c7071c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/NZ differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/NZ-CHAT b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/NZ-CHAT
new file mode 100644
index 0000000000000000000000000000000000000000..c00410988272dec2ae70ede88720b4a46146a4d1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/NZ-CHAT differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Navajo b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Navajo
new file mode 100644
index 0000000000000000000000000000000000000000..5fbe26b1d93d1acb2561c390c1e097d07f1a262e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Navajo differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/PRC b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/PRC
new file mode 100644
index 0000000000000000000000000000000000000000..91f6f8bc2e234bafd484146986bdb289082c3588
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/PRC differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/PST8PDT b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/PST8PDT
new file mode 100644
index 0000000000000000000000000000000000000000..99d246baa35cb9c6f56d50adbec163452e2a47fa
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/PST8PDT differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Apia b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Apia
new file mode 100644
index 0000000000000000000000000000000000000000..999c367c3f201c957a8ab746ad89669c4c9ed5ca
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Apia differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Auckland b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Auckland
new file mode 100644
index 0000000000000000000000000000000000000000..6575fdce31183d8238b18f2f30ab5b9227c7071c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Auckland differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Bougainville b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Bougainville
new file mode 100644
index 0000000000000000000000000000000000000000..2892d268094ea785b045e53cb441a551672aabd0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Bougainville differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Chatham b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Chatham
new file mode 100644
index 0000000000000000000000000000000000000000..c00410988272dec2ae70ede88720b4a46146a4d1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Chatham differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Chuuk b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Chuuk
new file mode 100644
index 0000000000000000000000000000000000000000..07c84b7110ad9589810b916390aedc7ef498f423
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Chuuk differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Easter b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Easter
new file mode 100644
index 0000000000000000000000000000000000000000..cae3744096402e8a452336544edf96ca9ae5ad8d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Easter differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Efate b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Efate
new file mode 100644
index 0000000000000000000000000000000000000000..d8d4093bc804be0730b5f5f530db42d55991afbf
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Efate differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Enderbury b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Enderbury
new file mode 100644
index 0000000000000000000000000000000000000000..39b786e9601e1a0bc98aca6d8a55a97127775046
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Enderbury differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Fakaofo b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Fakaofo
new file mode 100644
index 0000000000000000000000000000000000000000..e40307f6aab2a169bb957979f5580affb379131e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Fakaofo differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Fiji b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Fiji
new file mode 100644
index 0000000000000000000000000000000000000000..e71691ee932809771920778c699c27e32ce99207
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Fiji differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Funafuti b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Funafuti
new file mode 100644
index 0000000000000000000000000000000000000000..ea728637ac1fa43f2d73469151c688468b34c3e3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Funafuti differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Galapagos b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Galapagos
new file mode 100644
index 0000000000000000000000000000000000000000..31f0921ea04cd201675b613082fb4b0b0b91941c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Galapagos differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Gambier b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Gambier
new file mode 100644
index 0000000000000000000000000000000000000000..e1fc3daa55eb2bc8c5d6a78bd77a01d193a821a7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Gambier differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Guadalcanal b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Guadalcanal
new file mode 100644
index 0000000000000000000000000000000000000000..7e9d10a100f9cbe796d99945dd60e0430dced523
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Guadalcanal differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Guam b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Guam
new file mode 100644
index 0000000000000000000000000000000000000000..66490d25dff9bcc8f710b0141f1a02e64aeb32f3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Guam differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Honolulu b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Honolulu
new file mode 100644
index 0000000000000000000000000000000000000000..c7cd060159bd22fc5e6f10ac5a2089afb2c19c6a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Honolulu differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Johnston b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Johnston
new file mode 100644
index 0000000000000000000000000000000000000000..c7cd060159bd22fc5e6f10ac5a2089afb2c19c6a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Johnston differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Kanton b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Kanton
new file mode 100644
index 0000000000000000000000000000000000000000..39b786e9601e1a0bc98aca6d8a55a97127775046
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Kanton differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Kiritimati b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Kiritimati
new file mode 100644
index 0000000000000000000000000000000000000000..7cae0cb7562e5c0f9fa46913b71a5c3628c01bbf
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Kiritimati differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Kosrae b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Kosrae
new file mode 100644
index 0000000000000000000000000000000000000000..a584aae5eb8187f88600e97b6d6c764245a37e65
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Kosrae differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Kwajalein b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Kwajalein
new file mode 100644
index 0000000000000000000000000000000000000000..1a7975fad7f7e96f7101eb3c64c9b420eeebb621
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Kwajalein differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Majuro b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Majuro
new file mode 100644
index 0000000000000000000000000000000000000000..9ef8374de4fa73cd2a3359d3c3886b11643a7146
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Majuro differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Marquesas b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Marquesas
new file mode 100644
index 0000000000000000000000000000000000000000..74d6792bf6fcb791bfc0af1f827737f612abef67
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Marquesas differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Midway b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Midway
new file mode 100644
index 0000000000000000000000000000000000000000..cb56709a77dedb471150f4907771bf38f1879ba4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Midway differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Nauru b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Nauru
new file mode 100644
index 0000000000000000000000000000000000000000..acec0429f147f40279107a48cb85c3b0e9f56c94
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Nauru differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Niue b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Niue
new file mode 100644
index 0000000000000000000000000000000000000000..89117b377325529e65994174eb5a2fe90e1f39ff
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Niue differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Norfolk b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Norfolk
new file mode 100644
index 0000000000000000000000000000000000000000..53c1aad4e0a541ee3c2c4c6de33a79ca93a7d244
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Norfolk differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Noumea b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Noumea
new file mode 100644
index 0000000000000000000000000000000000000000..931a1a306f70eb0c7578a65425086270c6ea2b88
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Noumea differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Pago_Pago b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Pago_Pago
new file mode 100644
index 0000000000000000000000000000000000000000..cb56709a77dedb471150f4907771bf38f1879ba4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Pago_Pago differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Palau b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Palau
new file mode 100644
index 0000000000000000000000000000000000000000..146b35152aaeffb5940d30910ba37703f4096285
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Palau differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Pitcairn b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Pitcairn
new file mode 100644
index 0000000000000000000000000000000000000000..ef91b061bb145b2658d49fd5065ed74b1a6cf6f7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Pitcairn differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Pohnpei b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Pohnpei
new file mode 100644
index 0000000000000000000000000000000000000000..c298ddd4debb649220e5dfde60948591bc6a3501
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Pohnpei differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Ponape b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Ponape
new file mode 100644
index 0000000000000000000000000000000000000000..c298ddd4debb649220e5dfde60948591bc6a3501
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Ponape differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Port_Moresby b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Port_Moresby
new file mode 100644
index 0000000000000000000000000000000000000000..920ad27e629e350c1baac8537bb639a59fd19039
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Port_Moresby differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Rarotonga b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Rarotonga
new file mode 100644
index 0000000000000000000000000000000000000000..eea37aba3724b3517c275dffd57e49bed205c5e0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Rarotonga differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Saipan b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Saipan
new file mode 100644
index 0000000000000000000000000000000000000000..66490d25dff9bcc8f710b0141f1a02e64aeb32f3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Saipan differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Samoa b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Samoa
new file mode 100644
index 0000000000000000000000000000000000000000..cb56709a77dedb471150f4907771bf38f1879ba4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Samoa differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Tahiti b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Tahiti
new file mode 100644
index 0000000000000000000000000000000000000000..442b8eb5a438985092d8657ebcabe8859037482a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Tahiti differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Tarawa b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Tarawa
new file mode 100644
index 0000000000000000000000000000000000000000..3db6c750333fa6dc1efc84b1abc24528fbc00b0b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Tarawa differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Tongatapu b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Tongatapu
new file mode 100644
index 0000000000000000000000000000000000000000..c2e5999bb2cbfb7a8059004f8a593d454b49d3be
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Tongatapu differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Truk b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Truk
new file mode 100644
index 0000000000000000000000000000000000000000..07c84b7110ad9589810b916390aedc7ef498f423
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Truk differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Wake b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Wake
new file mode 100644
index 0000000000000000000000000000000000000000..c9e310670f07e9e577791bfa19fefde61351fcdf
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Wake differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Wallis b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Wallis
new file mode 100644
index 0000000000000000000000000000000000000000..b35344b312c6ca690c0f79a858c3995a05c71ff3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Wallis differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Yap b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Yap
new file mode 100644
index 0000000000000000000000000000000000000000..07c84b7110ad9589810b916390aedc7ef498f423
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Pacific/Yap differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Poland b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Poland
new file mode 100644
index 0000000000000000000000000000000000000000..e33cf67171da78aa9e6eb02e50f9b9603da4c3f4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Poland differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Portugal b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Portugal
new file mode 100644
index 0000000000000000000000000000000000000000..55f01930ba92ff6852ae4745e78adb5f96c5b057
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Portugal differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/ROC b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/ROC
new file mode 100644
index 0000000000000000000000000000000000000000..24c43444b6751343d2915843d03e55753d4d7359
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/ROC differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/ROK b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/ROK
new file mode 100644
index 0000000000000000000000000000000000000000..96199e73e73aafacd89e48cb2855a96d7a134e1d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/ROK differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Singapore b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Singapore
new file mode 100644
index 0000000000000000000000000000000000000000..2364b2178b03853b1771cb3f34b964fdb82e2e91
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Singapore differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Turkey b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Turkey
new file mode 100644
index 0000000000000000000000000000000000000000..508446bb6aee2841ab88e82607a6ad6e748e7db7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Turkey differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/UCT b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/UCT
new file mode 100644
index 0000000000000000000000000000000000000000..91558be0c2bf903b2364215ba26d5227d6126508
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/UCT differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Alaska b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Alaska
new file mode 100644
index 0000000000000000000000000000000000000000..9bbb2fd3b361ea8aa4c126d14df5fa370343a63f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Alaska differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Aleutian b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Aleutian
new file mode 100644
index 0000000000000000000000000000000000000000..43236498f681cc06f64ca2afa613880331fe6fbb
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Aleutian differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Arizona b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Arizona
new file mode 100644
index 0000000000000000000000000000000000000000..ac6bb0c78291f1e341f42c34639458fb385bf8ed
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Arizona differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Central b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Central
new file mode 100644
index 0000000000000000000000000000000000000000..a5b1617c7f70bfc77b7d504aaa3f23603082c3cb
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Central differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/East-Indiana b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/East-Indiana
new file mode 100644
index 0000000000000000000000000000000000000000..09511ccdcf97a5baa8e1b0eb75e040eee6b6e0c4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/East-Indiana differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Eastern b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Eastern
new file mode 100644
index 0000000000000000000000000000000000000000..2f75480e069b60b6c58a9137c7eebd4796f74226
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Eastern differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Hawaii b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Hawaii
new file mode 100644
index 0000000000000000000000000000000000000000..c7cd060159bd22fc5e6f10ac5a2089afb2c19c6a
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Hawaii differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Indiana-Starke b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Indiana-Starke
new file mode 100644
index 0000000000000000000000000000000000000000..fcd408d74df43310a9a85c475f83d545f6d75911
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Indiana-Starke differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Michigan b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Michigan
new file mode 100644
index 0000000000000000000000000000000000000000..e104faa46545ee873295cde34e1d46bccad8647c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Michigan differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Mountain b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Mountain
new file mode 100644
index 0000000000000000000000000000000000000000..5fbe26b1d93d1acb2561c390c1e097d07f1a262e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Mountain differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Pacific b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Pacific
new file mode 100644
index 0000000000000000000000000000000000000000..9dad4f4c75b373635ccbe634798f8d9e587e36c1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Pacific differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Samoa b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Samoa
new file mode 100644
index 0000000000000000000000000000000000000000..cb56709a77dedb471150f4907771bf38f1879ba4
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/US/Samoa differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/UTC b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/UTC
new file mode 100644
index 0000000000000000000000000000000000000000..91558be0c2bf903b2364215ba26d5227d6126508
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/UTC differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Universal b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Universal
new file mode 100644
index 0000000000000000000000000000000000000000..91558be0c2bf903b2364215ba26d5227d6126508
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Universal differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/W-SU b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/W-SU
new file mode 100644
index 0000000000000000000000000000000000000000..ddb3f4e99a1030f33b56fad986c8d9c16e59eb32
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/W-SU differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/WET b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/WET
new file mode 100644
index 0000000000000000000000000000000000000000..c27390b5b638399057d5f5c6d09ef8c81d5f01c1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/WET differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Zulu b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Zulu
new file mode 100644
index 0000000000000000000000000000000000000000..91558be0c2bf903b2364215ba26d5227d6126508
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/Zulu differ
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/iso3166.tab b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/iso3166.tab
new file mode 100644
index 0000000000000000000000000000000000000000..a4ff61a4d3213a5cce54f00bad04f1dfb550d581
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/iso3166.tab
@@ -0,0 +1,274 @@
+# ISO 3166 alpha-2 country codes
+#
+# This file is in the public domain, so clarified as of
+# 2009-05-17 by Arthur David Olson.
+#
+# From Paul Eggert (2015-05-02):
+# This file contains a table of two-letter country codes. Columns are
+# separated by a single tab. Lines beginning with '#' are comments.
+# All text uses UTF-8 encoding. The columns of the table are as follows:
+#
+# 1. ISO 3166-1 alpha-2 country code, current as of
+# ISO 3166-1 N976 (2018-11-06). See: Updates on ISO 3166-1
+# https://isotc.iso.org/livelink/livelink/Open/16944257
+# 2. The usual English name for the coded region,
+# chosen so that alphabetic sorting of subsets produces helpful lists.
+# This is not the same as the English name in the ISO 3166 tables.
+#
+# The table is sorted by country code.
+#
+# This table is intended as an aid for users, to help them select time
+# zone data appropriate for their practical needs. It is not intended
+# to take or endorse any position on legal or territorial claims.
+#
+#country-
+#code name of country, territory, area, or subdivision
+AD Andorra
+AE United Arab Emirates
+AF Afghanistan
+AG Antigua & Barbuda
+AI Anguilla
+AL Albania
+AM Armenia
+AO Angola
+AQ Antarctica
+AR Argentina
+AS Samoa (American)
+AT Austria
+AU Australia
+AW Aruba
+AX Åland Islands
+AZ Azerbaijan
+BA Bosnia & Herzegovina
+BB Barbados
+BD Bangladesh
+BE Belgium
+BF Burkina Faso
+BG Bulgaria
+BH Bahrain
+BI Burundi
+BJ Benin
+BL St Barthelemy
+BM Bermuda
+BN Brunei
+BO Bolivia
+BQ Caribbean NL
+BR Brazil
+BS Bahamas
+BT Bhutan
+BV Bouvet Island
+BW Botswana
+BY Belarus
+BZ Belize
+CA Canada
+CC Cocos (Keeling) Islands
+CD Congo (Dem. Rep.)
+CF Central African Rep.
+CG Congo (Rep.)
+CH Switzerland
+CI Côte d'Ivoire
+CK Cook Islands
+CL Chile
+CM Cameroon
+CN China
+CO Colombia
+CR Costa Rica
+CU Cuba
+CV Cape Verde
+CW Curaçao
+CX Christmas Island
+CY Cyprus
+CZ Czech Republic
+DE Germany
+DJ Djibouti
+DK Denmark
+DM Dominica
+DO Dominican Republic
+DZ Algeria
+EC Ecuador
+EE Estonia
+EG Egypt
+EH Western Sahara
+ER Eritrea
+ES Spain
+ET Ethiopia
+FI Finland
+FJ Fiji
+FK Falkland Islands
+FM Micronesia
+FO Faroe Islands
+FR France
+GA Gabon
+GB Britain (UK)
+GD Grenada
+GE Georgia
+GF French Guiana
+GG Guernsey
+GH Ghana
+GI Gibraltar
+GL Greenland
+GM Gambia
+GN Guinea
+GP Guadeloupe
+GQ Equatorial Guinea
+GR Greece
+GS South Georgia & the South Sandwich Islands
+GT Guatemala
+GU Guam
+GW Guinea-Bissau
+GY Guyana
+HK Hong Kong
+HM Heard Island & McDonald Islands
+HN Honduras
+HR Croatia
+HT Haiti
+HU Hungary
+ID Indonesia
+IE Ireland
+IL Israel
+IM Isle of Man
+IN India
+IO British Indian Ocean Territory
+IQ Iraq
+IR Iran
+IS Iceland
+IT Italy
+JE Jersey
+JM Jamaica
+JO Jordan
+JP Japan
+KE Kenya
+KG Kyrgyzstan
+KH Cambodia
+KI Kiribati
+KM Comoros
+KN St Kitts & Nevis
+KP Korea (North)
+KR Korea (South)
+KW Kuwait
+KY Cayman Islands
+KZ Kazakhstan
+LA Laos
+LB Lebanon
+LC St Lucia
+LI Liechtenstein
+LK Sri Lanka
+LR Liberia
+LS Lesotho
+LT Lithuania
+LU Luxembourg
+LV Latvia
+LY Libya
+MA Morocco
+MC Monaco
+MD Moldova
+ME Montenegro
+MF St Martin (French)
+MG Madagascar
+MH Marshall Islands
+MK North Macedonia
+ML Mali
+MM Myanmar (Burma)
+MN Mongolia
+MO Macau
+MP Northern Mariana Islands
+MQ Martinique
+MR Mauritania
+MS Montserrat
+MT Malta
+MU Mauritius
+MV Maldives
+MW Malawi
+MX Mexico
+MY Malaysia
+MZ Mozambique
+NA Namibia
+NC New Caledonia
+NE Niger
+NF Norfolk Island
+NG Nigeria
+NI Nicaragua
+NL Netherlands
+NO Norway
+NP Nepal
+NR Nauru
+NU Niue
+NZ New Zealand
+OM Oman
+PA Panama
+PE Peru
+PF French Polynesia
+PG Papua New Guinea
+PH Philippines
+PK Pakistan
+PL Poland
+PM St Pierre & Miquelon
+PN Pitcairn
+PR Puerto Rico
+PS Palestine
+PT Portugal
+PW Palau
+PY Paraguay
+QA Qatar
+RE Réunion
+RO Romania
+RS Serbia
+RU Russia
+RW Rwanda
+SA Saudi Arabia
+SB Solomon Islands
+SC Seychelles
+SD Sudan
+SE Sweden
+SG Singapore
+SH St Helena
+SI Slovenia
+SJ Svalbard & Jan Mayen
+SK Slovakia
+SL Sierra Leone
+SM San Marino
+SN Senegal
+SO Somalia
+SR Suriname
+SS South Sudan
+ST Sao Tome & Principe
+SV El Salvador
+SX St Maarten (Dutch)
+SY Syria
+SZ Eswatini (Swaziland)
+TC Turks & Caicos Is
+TD Chad
+TF French Southern & Antarctic Lands
+TG Togo
+TH Thailand
+TJ Tajikistan
+TK Tokelau
+TL East Timor
+TM Turkmenistan
+TN Tunisia
+TO Tonga
+TR Turkey
+TT Trinidad & Tobago
+TV Tuvalu
+TW Taiwan
+TZ Tanzania
+UA Ukraine
+UG Uganda
+UM US minor outlying islands
+US United States
+UY Uruguay
+UZ Uzbekistan
+VA Vatican City
+VC St Vincent
+VE Venezuela
+VG Virgin Islands (UK)
+VI Virgin Islands (US)
+VN Vietnam
+VU Vanuatu
+WF Wallis & Futuna
+WS Samoa (western)
+YE Yemen
+YT Mayotte
+ZA South Africa
+ZM Zambia
+ZW Zimbabwe
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/leapseconds b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/leapseconds
new file mode 100644
index 0000000000000000000000000000000000000000..834b96ea882cfc9512b9695791c64d31f75f5f0d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/leapseconds
@@ -0,0 +1,82 @@
+# Allowance for leap seconds added to each time zone file.
+
+# This file is in the public domain.
+
+# This file is generated automatically from the data in the public-domain
+# NIST format leap-seconds.list file, which can be copied from
+# <ftp://ftp.nist.gov/pub/time/leap-seconds.list>
+# or <ftp://ftp.boulder.nist.gov/pub/time/leap-seconds.list>.
+# The NIST file is used instead of its IERS upstream counterpart
+# <https://hpiers.obspm.fr/iers/bul/bulc/ntp/leap-seconds.list>
+# because under US law the NIST file is public domain
+# whereas the IERS file's copyright and license status is unclear.
+# For more about leap-seconds.list, please see
+# The NTP Timescale and Leap Seconds
+# <https://www.eecis.udel.edu/~mills/leap.html>.
+
+# The rules for leap seconds are specified in Annex 1 (Time scales) of:
+# Standard-frequency and time-signal emissions.
+# International Telecommunication Union - Radiocommunication Sector
+# (ITU-R) Recommendation TF.460-6 (02/2002)
+# <https://www.itu.int/rec/R-REC-TF.460-6-200202-I/>.
+# The International Earth Rotation and Reference Systems Service (IERS)
+# periodically uses leap seconds to keep UTC to within 0.9 s of UT1
+# (a proxy for Earth's angle in space as measured by astronomers)
+# and publishes leap second data in a copyrighted file
+# <https://hpiers.obspm.fr/iers/bul/bulc/Leap_Second.dat>.
+# See: Levine J. Coordinated Universal Time and the leap second.
+# URSI Radio Sci Bull. 2016;89(4):30-6. doi:10.23919/URSIRSB.2016.7909995
+# <https://ieeexplore.ieee.org/document/7909995>.
+
+# There were no leap seconds before 1972, as no official mechanism
+# accounted for the discrepancy between atomic time (TAI) and the earth's
+# rotation. The first ("1 Jan 1972") data line in leap-seconds.list
+# does not denote a leap second; it denotes the start of the current definition
+# of UTC.
+
+# All leap-seconds are Stationary (S) at the given UTC time.
+# The correction (+ or -) is made at the given time, so in the unlikely
+# event of a negative leap second, a line would look like this:
+# Leap YEAR MON DAY 23:59:59 - S
+# Typical lines look like this:
+# Leap YEAR MON DAY 23:59:60 + S
+Leap 1972 Jun 30 23:59:60 + S
+Leap 1972 Dec 31 23:59:60 + S
+Leap 1973 Dec 31 23:59:60 + S
+Leap 1974 Dec 31 23:59:60 + S
+Leap 1975 Dec 31 23:59:60 + S
+Leap 1976 Dec 31 23:59:60 + S
+Leap 1977 Dec 31 23:59:60 + S
+Leap 1978 Dec 31 23:59:60 + S
+Leap 1979 Dec 31 23:59:60 + S
+Leap 1981 Jun 30 23:59:60 + S
+Leap 1982 Jun 30 23:59:60 + S
+Leap 1983 Jun 30 23:59:60 + S
+Leap 1985 Jun 30 23:59:60 + S
+Leap 1987 Dec 31 23:59:60 + S
+Leap 1989 Dec 31 23:59:60 + S
+Leap 1990 Dec 31 23:59:60 + S
+Leap 1992 Jun 30 23:59:60 + S
+Leap 1993 Jun 30 23:59:60 + S
+Leap 1994 Jun 30 23:59:60 + S
+Leap 1995 Dec 31 23:59:60 + S
+Leap 1997 Jun 30 23:59:60 + S
+Leap 1998 Dec 31 23:59:60 + S
+Leap 2005 Dec 31 23:59:60 + S
+Leap 2008 Dec 31 23:59:60 + S
+Leap 2012 Jun 30 23:59:60 + S
+Leap 2015 Jun 30 23:59:60 + S
+Leap 2016 Dec 31 23:59:60 + S
+
+# UTC timestamp when this leap second list expires.
+# Any additional leap seconds will come after this.
+# This Expires line is commented out for now,
+# so that pre-2020a zic implementations do not reject this file.
+#Expires 2022 Jun 28 00:00:00
+
+# POSIX timestamps for the data in this file:
+#updated 1467936000 (2016-07-08 00:00:00 UTC)
+#expires 1656374400 (2022-06-28 00:00:00 UTC)
+
+# Updated through IERS Bulletin C62
+# File expires on: 28 June 2022
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/tzdata.zi b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/tzdata.zi
new file mode 100644
index 0000000000000000000000000000000000000000..e16ab09f9f6bd5acf923b7587c7b7af71f37281a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/tzdata.zi
@@ -0,0 +1,4433 @@
+# version unknown-dirty
+# This zic input file is in the public domain.
+R d 1916 o - Jun 14 23s 1 S
+R d 1916 1919 - O Su>=1 23s 0 -
+R d 1917 o - Mar 24 23s 1 S
+R d 1918 o - Mar 9 23s 1 S
+R d 1919 o - Mar 1 23s 1 S
+R d 1920 o - F 14 23s 1 S
+R d 1920 o - O 23 23s 0 -
+R d 1921 o - Mar 14 23s 1 S
+R d 1921 o - Jun 21 23s 0 -
+R d 1939 o - S 11 23s 1 S
+R d 1939 o - N 19 1 0 -
+R d 1944 1945 - Ap M>=1 2 1 S
+R d 1944 o - O 8 2 0 -
+R d 1945 o - S 16 1 0 -
+R d 1971 o - Ap 25 23s 1 S
+R d 1971 o - S 26 23s 0 -
+R d 1977 o - May 6 0 1 S
+R d 1977 o - O 21 0 0 -
+R d 1978 o - Mar 24 1 1 S
+R d 1978 o - S 22 3 0 -
+R d 1980 o - Ap 25 0 1 S
+R d 1980 o - O 31 2 0 -
+Z Africa/Algiers 0:12:12 - LMT 1891 Mar 16
+0:9:21 - PMT 1911 Mar 11
+0 d WE%sT 1940 F 25 2
+1 d CE%sT 1946 O 7
+0 - WET 1956 Ja 29
+1 - CET 1963 Ap 14
+0 d WE%sT 1977 O 21
+1 d CE%sT 1979 O 26
+0 d WE%sT 1981 May
+1 - CET
+Z Atlantic/Cape_Verde -1:34:4 - LMT 1912 Ja 1 2u
+-2 - -02 1942 S
+-2 1 -01 1945 O 15
+-2 - -02 1975 N 25 2
+-1 - -01
+Z Africa/Ndjamena 1:0:12 - LMT 1912
+1 - WAT 1979 O 14
+1 1 WAST 1980 Mar 8
+1 - WAT
+Z Africa/Abidjan -0:16:8 - LMT 1912
+0 - GMT
+L Africa/Abidjan Africa/Accra
+L Africa/Abidjan Africa/Bamako
+L Africa/Abidjan Africa/Banjul
+L Africa/Abidjan Africa/Conakry
+L Africa/Abidjan Africa/Dakar
+L Africa/Abidjan Africa/Freetown
+L Africa/Abidjan Africa/Lome
+L Africa/Abidjan Africa/Nouakchott
+L Africa/Abidjan Africa/Ouagadougou
+L Africa/Abidjan Atlantic/St_Helena
+R K 1940 o - Jul 15 0 1 S
+R K 1940 o - O 1 0 0 -
+R K 1941 o - Ap 15 0 1 S
+R K 1941 o - S 16 0 0 -
+R K 1942 1944 - Ap 1 0 1 S
+R K 1942 o - O 27 0 0 -
+R K 1943 1945 - N 1 0 0 -
+R K 1945 o - Ap 16 0 1 S
+R K 1957 o - May 10 0 1 S
+R K 1957 1958 - O 1 0 0 -
+R K 1958 o - May 1 0 1 S
+R K 1959 1981 - May 1 1 1 S
+R K 1959 1965 - S 30 3 0 -
+R K 1966 1994 - O 1 3 0 -
+R K 1982 o - Jul 25 1 1 S
+R K 1983 o - Jul 12 1 1 S
+R K 1984 1988 - May 1 1 1 S
+R K 1989 o - May 6 1 1 S
+R K 1990 1994 - May 1 1 1 S
+R K 1995 2010 - Ap lastF 0s 1 S
+R K 1995 2005 - S lastTh 24 0 -
+R K 2006 o - S 21 24 0 -
+R K 2007 o - S Th>=1 24 0 -
+R K 2008 o - Au lastTh 24 0 -
+R K 2009 o - Au 20 24 0 -
+R K 2010 o - Au 10 24 0 -
+R K 2010 o - S 9 24 1 S
+R K 2010 o - S lastTh 24 0 -
+R K 2014 o - May 15 24 1 S
+R K 2014 o - Jun 26 24 0 -
+R K 2014 o - Jul 31 24 1 S
+R K 2014 o - S lastTh 24 0 -
+Z Africa/Cairo 2:5:9 - LMT 1900 O
+2 K EE%sT
+Z Africa/Bissau -1:2:20 - LMT 1912 Ja 1 1u
+-1 - -01 1975
+0 - GMT
+Z Africa/Nairobi 2:27:16 - LMT 1908 May
+2:30 - +0230 1928 Jun 30 24
+3 - EAT 1930 Ja 4 24
+2:30 - +0230 1936 D 31 24
+2:45 - +0245 1942 Jul 31 24
+3 - EAT
+L Africa/Nairobi Africa/Addis_Ababa
+L Africa/Nairobi Africa/Asmara
+L Africa/Nairobi Africa/Dar_es_Salaam
+L Africa/Nairobi Africa/Djibouti
+L Africa/Nairobi Africa/Kampala
+L Africa/Nairobi Africa/Mogadishu
+L Africa/Nairobi Indian/Antananarivo
+L Africa/Nairobi Indian/Comoro
+L Africa/Nairobi Indian/Mayotte
+Z Africa/Monrovia -0:43:8 - LMT 1882
+-0:43:8 - MMT 1919 Mar
+-0:44:30 - MMT 1972 Ja 7
+0 - GMT
+R L 1951 o - O 14 2 1 S
+R L 1952 o - Ja 1 0 0 -
+R L 1953 o - O 9 2 1 S
+R L 1954 o - Ja 1 0 0 -
+R L 1955 o - S 30 0 1 S
+R L 1956 o - Ja 1 0 0 -
+R L 1982 1984 - Ap 1 0 1 S
+R L 1982 1985 - O 1 0 0 -
+R L 1985 o - Ap 6 0 1 S
+R L 1986 o - Ap 4 0 1 S
+R L 1986 o - O 3 0 0 -
+R L 1987 1989 - Ap 1 0 1 S
+R L 1987 1989 - O 1 0 0 -
+R L 1997 o - Ap 4 0 1 S
+R L 1997 o - O 4 0 0 -
+R L 2013 o - Mar lastF 1 1 S
+R L 2013 o - O lastF 2 0 -
+Z Africa/Tripoli 0:52:44 - LMT 1920
+1 L CE%sT 1959
+2 - EET 1982
+1 L CE%sT 1990 May 4
+2 - EET 1996 S 30
+1 L CE%sT 1997 O 4
+2 - EET 2012 N 10 2
+1 L CE%sT 2013 O 25 2
+2 - EET
+R MU 1982 o - O 10 0 1 -
+R MU 1983 o - Mar 21 0 0 -
+R MU 2008 o - O lastSu 2 1 -
+R MU 2009 o - Mar lastSu 2 0 -
+Z Indian/Mauritius 3:50 - LMT 1907
+4 MU +04/+05
+R M 1939 o - S 12 0 1 -
+R M 1939 o - N 19 0 0 -
+R M 1940 o - F 25 0 1 -
+R M 1945 o - N 18 0 0 -
+R M 1950 o - Jun 11 0 1 -
+R M 1950 o - O 29 0 0 -
+R M 1967 o - Jun 3 12 1 -
+R M 1967 o - O 1 0 0 -
+R M 1974 o - Jun 24 0 1 -
+R M 1974 o - S 1 0 0 -
+R M 1976 1977 - May 1 0 1 -
+R M 1976 o - Au 1 0 0 -
+R M 1977 o - S 28 0 0 -
+R M 1978 o - Jun 1 0 1 -
+R M 1978 o - Au 4 0 0 -
+R M 2008 o - Jun 1 0 1 -
+R M 2008 o - S 1 0 0 -
+R M 2009 o - Jun 1 0 1 -
+R M 2009 o - Au 21 0 0 -
+R M 2010 o - May 2 0 1 -
+R M 2010 o - Au 8 0 0 -
+R M 2011 o - Ap 3 0 1 -
+R M 2011 o - Jul 31 0 0 -
+R M 2012 2013 - Ap lastSu 2 1 -
+R M 2012 o - Jul 20 3 0 -
+R M 2012 o - Au 20 2 1 -
+R M 2012 o - S 30 3 0 -
+R M 2013 o - Jul 7 3 0 -
+R M 2013 o - Au 10 2 1 -
+R M 2013 2018 - O lastSu 3 0 -
+R M 2014 2018 - Mar lastSu 2 1 -
+R M 2014 o - Jun 28 3 0 -
+R M 2014 o - Au 2 2 1 -
+R M 2015 o - Jun 14 3 0 -
+R M 2015 o - Jul 19 2 1 -
+R M 2016 o - Jun 5 3 0 -
+R M 2016 o - Jul 10 2 1 -
+R M 2017 o - May 21 3 0 -
+R M 2017 o - Jul 2 2 1 -
+R M 2018 o - May 13 3 0 -
+R M 2018 o - Jun 17 2 1 -
+R M 2019 o - May 5 3 -1 -
+R M 2019 o - Jun 9 2 0 -
+R M 2020 o - Ap 19 3 -1 -
+R M 2020 o - May 31 2 0 -
+R M 2021 o - Ap 11 3 -1 -
+R M 2021 o - May 16 2 0 -
+R M 2022 o - Mar 27 3 -1 -
+R M 2022 o - May 8 2 0 -
+R M 2023 o - Mar 19 3 -1 -
+R M 2023 o - Ap 30 2 0 -
+R M 2024 o - Mar 10 3 -1 -
+R M 2024 o - Ap 14 2 0 -
+R M 2025 o - F 23 3 -1 -
+R M 2025 o - Ap 6 2 0 -
+R M 2026 o - F 15 3 -1 -
+R M 2026 o - Mar 22 2 0 -
+R M 2027 o - F 7 3 -1 -
+R M 2027 o - Mar 14 2 0 -
+R M 2028 o - Ja 23 3 -1 -
+R M 2028 o - Mar 5 2 0 -
+R M 2029 o - Ja 14 3 -1 -
+R M 2029 o - F 18 2 0 -
+R M 2029 o - D 30 3 -1 -
+R M 2030 o - F 10 2 0 -
+R M 2030 o - D 22 3 -1 -
+R M 2031 o - F 2 2 0 -
+R M 2031 o - D 14 3 -1 -
+R M 2032 o - Ja 18 2 0 -
+R M 2032 o - N 28 3 -1 -
+R M 2033 o - Ja 9 2 0 -
+R M 2033 o - N 20 3 -1 -
+R M 2033 o - D 25 2 0 -
+R M 2034 o - N 5 3 -1 -
+R M 2034 o - D 17 2 0 -
+R M 2035 o - O 28 3 -1 -
+R M 2035 o - D 9 2 0 -
+R M 2036 o - O 19 3 -1 -
+R M 2036 o - N 23 2 0 -
+R M 2037 o - O 4 3 -1 -
+R M 2037 o - N 15 2 0 -
+R M 2038 o - S 26 3 -1 -
+R M 2038 o - N 7 2 0 -
+R M 2039 o - S 18 3 -1 -
+R M 2039 o - O 23 2 0 -
+R M 2040 o - S 2 3 -1 -
+R M 2040 o - O 14 2 0 -
+R M 2041 o - Au 25 3 -1 -
+R M 2041 o - S 29 2 0 -
+R M 2042 o - Au 10 3 -1 -
+R M 2042 o - S 21 2 0 -
+R M 2043 o - Au 2 3 -1 -
+R M 2043 o - S 13 2 0 -
+R M 2044 o - Jul 24 3 -1 -
+R M 2044 o - Au 28 2 0 -
+R M 2045 o - Jul 9 3 -1 -
+R M 2045 o - Au 20 2 0 -
+R M 2046 o - Jul 1 3 -1 -
+R M 2046 o - Au 12 2 0 -
+R M 2047 o - Jun 23 3 -1 -
+R M 2047 o - Jul 28 2 0 -
+R M 2048 o - Jun 7 3 -1 -
+R M 2048 o - Jul 19 2 0 -
+R M 2049 o - May 30 3 -1 -
+R M 2049 o - Jul 4 2 0 -
+R M 2050 o - May 15 3 -1 -
+R M 2050 o - Jun 26 2 0 -
+R M 2051 o - May 7 3 -1 -
+R M 2051 o - Jun 18 2 0 -
+R M 2052 o - Ap 28 3 -1 -
+R M 2052 o - Jun 2 2 0 -
+R M 2053 o - Ap 13 3 -1 -
+R M 2053 o - May 25 2 0 -
+R M 2054 o - Ap 5 3 -1 -
+R M 2054 o - May 17 2 0 -
+R M 2055 o - Mar 28 3 -1 -
+R M 2055 o - May 2 2 0 -
+R M 2056 o - Mar 12 3 -1 -
+R M 2056 o - Ap 23 2 0 -
+R M 2057 o - Mar 4 3 -1 -
+R M 2057 o - Ap 8 2 0 -
+R M 2058 o - F 17 3 -1 -
+R M 2058 o - Mar 31 2 0 -
+R M 2059 o - F 9 3 -1 -
+R M 2059 o - Mar 23 2 0 -
+R M 2060 o - F 1 3 -1 -
+R M 2060 o - Mar 7 2 0 -
+R M 2061 o - Ja 16 3 -1 -
+R M 2061 o - F 27 2 0 -
+R M 2062 o - Ja 8 3 -1 -
+R M 2062 o - F 19 2 0 -
+R M 2062 o - D 31 3 -1 -
+R M 2063 o - F 4 2 0 -
+R M 2063 o - D 16 3 -1 -
+R M 2064 o - Ja 27 2 0 -
+R M 2064 o - D 7 3 -1 -
+R M 2065 o - Ja 11 2 0 -
+R M 2065 o - N 22 3 -1 -
+R M 2066 o - Ja 3 2 0 -
+R M 2066 o - N 14 3 -1 -
+R M 2066 o - D 26 2 0 -
+R M 2067 o - N 6 3 -1 -
+R M 2067 o - D 11 2 0 -
+R M 2068 o - O 21 3 -1 -
+R M 2068 o - D 2 2 0 -
+R M 2069 o - O 13 3 -1 -
+R M 2069 o - N 24 2 0 -
+R M 2070 o - O 5 3 -1 -
+R M 2070 o - N 9 2 0 -
+R M 2071 o - S 20 3 -1 -
+R M 2071 o - N 1 2 0 -
+R M 2072 o - S 11 3 -1 -
+R M 2072 o - O 16 2 0 -
+R M 2073 o - Au 27 3 -1 -
+R M 2073 o - O 8 2 0 -
+R M 2074 o - Au 19 3 -1 -
+R M 2074 o - S 30 2 0 -
+R M 2075 o - Au 11 3 -1 -
+R M 2075 o - S 15 2 0 -
+R M 2076 o - Jul 26 3 -1 -
+R M 2076 o - S 6 2 0 -
+R M 2077 o - Jul 18 3 -1 -
+R M 2077 o - Au 29 2 0 -
+R M 2078 o - Jul 10 3 -1 -
+R M 2078 o - Au 14 2 0 -
+R M 2079 o - Jun 25 3 -1 -
+R M 2079 o - Au 6 2 0 -
+R M 2080 o - Jun 16 3 -1 -
+R M 2080 o - Jul 21 2 0 -
+R M 2081 o - Jun 1 3 -1 -
+R M 2081 o - Jul 13 2 0 -
+R M 2082 o - May 24 3 -1 -
+R M 2082 o - Jul 5 2 0 -
+R M 2083 o - May 16 3 -1 -
+R M 2083 o - Jun 20 2 0 -
+R M 2084 o - Ap 30 3 -1 -
+R M 2084 o - Jun 11 2 0 -
+R M 2085 o - Ap 22 3 -1 -
+R M 2085 o - Jun 3 2 0 -
+R M 2086 o - Ap 14 3 -1 -
+R M 2086 o - May 19 2 0 -
+R M 2087 o - Mar 30 3 -1 -
+R M 2087 o - May 11 2 0 -
+Z Africa/Casablanca -0:30:20 - LMT 1913 O 26
+0 M +00/+01 1984 Mar 16
+1 - +01 1986
+0 M +00/+01 2018 O 28 3
+1 M +01/+00
+Z Africa/El_Aaiun -0:52:48 - LMT 1934
+-1 - -01 1976 Ap 14
+0 M +00/+01 2018 O 28 3
+1 M +01/+00
+Z Africa/Maputo 2:10:20 - LMT 1903 Mar
+2 - CAT
+L Africa/Maputo Africa/Blantyre
+L Africa/Maputo Africa/Bujumbura
+L Africa/Maputo Africa/Gaborone
+L Africa/Maputo Africa/Harare
+L Africa/Maputo Africa/Kigali
+L Africa/Maputo Africa/Lubumbashi
+L Africa/Maputo Africa/Lusaka
+R NA 1994 o - Mar 21 0 -1 WAT
+R NA 1994 2017 - S Su>=1 2 0 CAT
+R NA 1995 2017 - Ap Su>=1 2 -1 WAT
+Z Africa/Windhoek 1:8:24 - LMT 1892 F 8
+1:30 - +0130 1903 Mar
+2 - SAST 1942 S 20 2
+2 1 SAST 1943 Mar 21 2
+2 - SAST 1990 Mar 21
+2 NA %s
+Z Africa/Lagos 0:13:35 - LMT 1905 Jul
+0 - GMT 1908 Jul
+0:13:35 - LMT 1914
+0:30 - +0030 1919 S
+1 - WAT
+L Africa/Lagos Africa/Bangui
+L Africa/Lagos Africa/Brazzaville
+L Africa/Lagos Africa/Douala
+L Africa/Lagos Africa/Kinshasa
+L Africa/Lagos Africa/Libreville
+L Africa/Lagos Africa/Luanda
+L Africa/Lagos Africa/Malabo
+L Africa/Lagos Africa/Niamey
+L Africa/Lagos Africa/Porto-Novo
+Z Indian/Reunion 3:41:52 - LMT 1911 Jun
+4 - +04
+Z Africa/Sao_Tome 0:26:56 - LMT 1884
+-0:36:45 - LMT 1912 Ja 1 0u
+0 - GMT 2018 Ja 1 1
+1 - WAT 2019 Ja 1 2
+0 - GMT
+Z Indian/Mahe 3:41:48 - LMT 1907
+4 - +04
+R SA 1942 1943 - S Su>=15 2 1 -
+R SA 1943 1944 - Mar Su>=15 2 0 -
+Z Africa/Johannesburg 1:52 - LMT 1892 F 8
+1:30 - SAST 1903 Mar
+2 SA SAST
+L Africa/Johannesburg Africa/Maseru
+L Africa/Johannesburg Africa/Mbabane
+R SD 1970 o - May 1 0 1 S
+R SD 1970 1985 - O 15 0 0 -
+R SD 1971 o - Ap 30 0 1 S
+R SD 1972 1985 - Ap lastSu 0 1 S
+Z Africa/Khartoum 2:10:8 - LMT 1931
+2 SD CA%sT 2000 Ja 15 12
+3 - EAT 2017 N
+2 - CAT
+Z Africa/Juba 2:6:28 - LMT 1931
+2 SD CA%sT 2000 Ja 15 12
+3 - EAT 2021 F
+2 - CAT
+R n 1939 o - Ap 15 23s 1 S
+R n 1939 o - N 18 23s 0 -
+R n 1940 o - F 25 23s 1 S
+R n 1941 o - O 6 0 0 -
+R n 1942 o - Mar 9 0 1 S
+R n 1942 o - N 2 3 0 -
+R n 1943 o - Mar 29 2 1 S
+R n 1943 o - Ap 17 2 0 -
+R n 1943 o - Ap 25 2 1 S
+R n 1943 o - O 4 2 0 -
+R n 1944 1945 - Ap M>=1 2 1 S
+R n 1944 o - O 8 0 0 -
+R n 1945 o - S 16 0 0 -
+R n 1977 o - Ap 30 0s 1 S
+R n 1977 o - S 24 0s 0 -
+R n 1978 o - May 1 0s 1 S
+R n 1978 o - O 1 0s 0 -
+R n 1988 o - Jun 1 0s 1 S
+R n 1988 1990 - S lastSu 0s 0 -
+R n 1989 o - Mar 26 0s 1 S
+R n 1990 o - May 1 0s 1 S
+R n 2005 o - May 1 0s 1 S
+R n 2005 o - S 30 1s 0 -
+R n 2006 2008 - Mar lastSu 2s 1 S
+R n 2006 2008 - O lastSu 2s 0 -
+Z Africa/Tunis 0:40:44 - LMT 1881 May 12
+0:9:21 - PMT 1911 Mar 11
+1 n CE%sT
+Z Antarctica/Casey 0 - -00 1969
+8 - +08 2009 O 18 2
+11 - +11 2010 Mar 5 2
+8 - +08 2011 O 28 2
+11 - +11 2012 F 21 17u
+8 - +08 2016 O 22
+11 - +11 2018 Mar 11 4
+8 - +08 2018 O 7 4
+11 - +11 2019 Mar 17 3
+8 - +08 2019 O 4 3
+11 - +11 2020 Mar 8 3
+8 - +08 2020 O 4 0:1
+11 - +11
+Z Antarctica/Davis 0 - -00 1957 Ja 13
+7 - +07 1964 N
+0 - -00 1969 F
+7 - +07 2009 O 18 2
+5 - +05 2010 Mar 10 20u
+7 - +07 2011 O 28 2
+5 - +05 2012 F 21 20u
+7 - +07
+Z Antarctica/Mawson 0 - -00 1954 F 13
+6 - +06 2009 O 18 2
+5 - +05
+Z Indian/Kerguelen 0 - -00 1950
+5 - +05
+R Tr 2005 ma - Mar lastSu 1u 2 +02
+R Tr 2004 ma - O lastSu 1u 0 +00
+Z Antarctica/Troll 0 - -00 2005 F 12
+0 Tr %s
+Z Antarctica/Vostok 0 - -00 1957 D 16
+6 - +06
+Z Antarctica/Rothera 0 - -00 1976 D
+-3 - -03
+Z Asia/Kabul 4:36:48 - LMT 1890
+4 - +04 1945
+4:30 - +0430
+R AM 2011 o - Mar lastSu 2s 1 -
+R AM 2011 o - O lastSu 2s 0 -
+Z Asia/Yerevan 2:58 - LMT 1924 May 2
+3 - +03 1957 Mar
+4 R +04/+05 1991 Mar 31 2s
+3 R +03/+04 1995 S 24 2s
+4 - +04 1997
+4 R +04/+05 2011
+4 AM +04/+05
+R AZ 1997 2015 - Mar lastSu 4 1 -
+R AZ 1997 2015 - O lastSu 5 0 -
+Z Asia/Baku 3:19:24 - LMT 1924 May 2
+3 - +03 1957 Mar
+4 R +04/+05 1991 Mar 31 2s
+3 R +03/+04 1992 S lastSu 2s
+4 - +04 1996
+4 E +04/+05 1997
+4 AZ +04/+05
+R BD 2009 o - Jun 19 23 1 -
+R BD 2009 o - D 31 24 0 -
+Z Asia/Dhaka 6:1:40 - LMT 1890
+5:53:20 - HMT 1941 O
+6:30 - +0630 1942 May 15
+5:30 - +0530 1942 S
+6:30 - +0630 1951 S 30
+6 - +06 2009
+6 BD +06/+07
+Z Asia/Thimphu 5:58:36 - LMT 1947 Au 15
+5:30 - +0530 1987 O
+6 - +06
+Z Indian/Chagos 4:49:40 - LMT 1907
+5 - +05 1996
+6 - +06
+Z Asia/Brunei 7:39:40 - LMT 1926 Mar
+7:30 - +0730 1933
+8 - +08
+Z Asia/Yangon 6:24:47 - LMT 1880
+6:24:47 - RMT 1920
+6:30 - +0630 1942 May
+9 - +09 1945 May 3
+6:30 - +0630
+R Sh 1919 o - Ap 12 24 1 D
+R Sh 1919 o - S 30 24 0 S
+R Sh 1940 o - Jun 1 0 1 D
+R Sh 1940 o - O 12 24 0 S
+R Sh 1941 o - Mar 15 0 1 D
+R Sh 1941 o - N 1 24 0 S
+R Sh 1942 o - Ja 31 0 1 D
+R Sh 1945 o - S 1 24 0 S
+R Sh 1946 o - May 15 0 1 D
+R Sh 1946 o - S 30 24 0 S
+R Sh 1947 o - Ap 15 0 1 D
+R Sh 1947 o - O 31 24 0 S
+R Sh 1948 1949 - May 1 0 1 D
+R Sh 1948 1949 - S 30 24 0 S
+R CN 1986 o - May 4 2 1 D
+R CN 1986 1991 - S Su>=11 2 0 S
+R CN 1987 1991 - Ap Su>=11 2 1 D
+Z Asia/Shanghai 8:5:43 - LMT 1901
+8 Sh C%sT 1949 May 28
+8 CN C%sT
+Z Asia/Urumqi 5:50:20 - LMT 1928
+6 - +06
+R HK 1946 o - Ap 21 0 1 S
+R HK 1946 o - D 1 3:30s 0 -
+R HK 1947 o - Ap 13 3:30s 1 S
+R HK 1947 o - N 30 3:30s 0 -
+R HK 1948 o - May 2 3:30s 1 S
+R HK 1948 1952 - O Su>=28 3:30s 0 -
+R HK 1949 1953 - Ap Su>=1 3:30 1 S
+R HK 1953 1964 - O Su>=31 3:30 0 -
+R HK 1954 1964 - Mar Su>=18 3:30 1 S
+R HK 1965 1976 - Ap Su>=16 3:30 1 S
+R HK 1965 1976 - O Su>=16 3:30 0 -
+R HK 1973 o - D 30 3:30 1 S
+R HK 1979 o - May 13 3:30 1 S
+R HK 1979 o - O 21 3:30 0 -
+Z Asia/Hong_Kong 7:36:42 - LMT 1904 O 30 0:36:42
+8 - HKT 1941 Jun 15 3
+8 1 HKST 1941 O 1 4
+8 0:30 HKWT 1941 D 25
+9 - JST 1945 N 18 2
+8 HK HK%sT
+R f 1946 o - May 15 0 1 D
+R f 1946 o - O 1 0 0 S
+R f 1947 o - Ap 15 0 1 D
+R f 1947 o - N 1 0 0 S
+R f 1948 1951 - May 1 0 1 D
+R f 1948 1951 - O 1 0 0 S
+R f 1952 o - Mar 1 0 1 D
+R f 1952 1954 - N 1 0 0 S
+R f 1953 1959 - Ap 1 0 1 D
+R f 1955 1961 - O 1 0 0 S
+R f 1960 1961 - Jun 1 0 1 D
+R f 1974 1975 - Ap 1 0 1 D
+R f 1974 1975 - O 1 0 0 S
+R f 1979 o - Jul 1 0 1 D
+R f 1979 o - O 1 0 0 S
+Z Asia/Taipei 8:6 - LMT 1896
+8 - CST 1937 O
+9 - JST 1945 S 21 1
+8 f C%sT
+R _ 1942 1943 - Ap 30 23 1 -
+R _ 1942 o - N 17 23 0 -
+R _ 1943 o - S 30 23 0 S
+R _ 1946 o - Ap 30 23s 1 D
+R _ 1946 o - S 30 23s 0 S
+R _ 1947 o - Ap 19 23s 1 D
+R _ 1947 o - N 30 23s 0 S
+R _ 1948 o - May 2 23s 1 D
+R _ 1948 o - O 31 23s 0 S
+R _ 1949 1950 - Ap Sa>=1 23s 1 D
+R _ 1949 1950 - O lastSa 23s 0 S
+R _ 1951 o - Mar 31 23s 1 D
+R _ 1951 o - O 28 23s 0 S
+R _ 1952 1953 - Ap Sa>=1 23s 1 D
+R _ 1952 o - N 1 23s 0 S
+R _ 1953 1954 - O lastSa 23s 0 S
+R _ 1954 1956 - Mar Sa>=17 23s 1 D
+R _ 1955 o - N 5 23s 0 S
+R _ 1956 1964 - N Su>=1 3:30 0 S
+R _ 1957 1964 - Mar Su>=18 3:30 1 D
+R _ 1965 1973 - Ap Su>=16 3:30 1 D
+R _ 1965 1966 - O Su>=16 2:30 0 S
+R _ 1967 1976 - O Su>=16 3:30 0 S
+R _ 1973 o - D 30 3:30 1 D
+R _ 1975 1976 - Ap Su>=16 3:30 1 D
+R _ 1979 o - May 13 3:30 1 D
+R _ 1979 o - O Su>=16 3:30 0 S
+Z Asia/Macau 7:34:10 - LMT 1904 O 30
+8 - CST 1941 D 21 23
+9 _ +09/+10 1945 S 30 24
+8 _ C%sT
+R CY 1975 o - Ap 13 0 1 S
+R CY 1975 o - O 12 0 0 -
+R CY 1976 o - May 15 0 1 S
+R CY 1976 o - O 11 0 0 -
+R CY 1977 1980 - Ap Su>=1 0 1 S
+R CY 1977 o - S 25 0 0 -
+R CY 1978 o - O 2 0 0 -
+R CY 1979 1997 - S lastSu 0 0 -
+R CY 1981 1998 - Mar lastSu 0 1 S
+Z Asia/Nicosia 2:13:28 - LMT 1921 N 14
+2 CY EE%sT 1998 S
+2 E EE%sT
+Z Asia/Famagusta 2:15:48 - LMT 1921 N 14
+2 CY EE%sT 1998 S
+2 E EE%sT 2016 S 8
+3 - +03 2017 O 29 1u
+2 E EE%sT
+L Asia/Nicosia Europe/Nicosia
+Z Asia/Tbilisi 2:59:11 - LMT 1880
+2:59:11 - TBMT 1924 May 2
+3 - +03 1957 Mar
+4 R +04/+05 1991 Mar 31 2s
+3 R +03/+04 1992
+3 e +03/+04 1994 S lastSu
+4 e +04/+05 1996 O lastSu
+4 1 +05 1997 Mar lastSu
+4 e +04/+05 2004 Jun 27
+3 R +03/+04 2005 Mar lastSu 2
+4 - +04
+Z Asia/Dili 8:22:20 - LMT 1912
+8 - +08 1942 F 21 23
+9 - +09 1976 May 3
+8 - +08 2000 S 17
+9 - +09
+Z Asia/Kolkata 5:53:28 - LMT 1854 Jun 28
+5:53:20 - HMT 1870
+5:21:10 - MMT 1906
+5:30 - IST 1941 O
+5:30 1 +0630 1942 May 15
+5:30 - IST 1942 S
+5:30 1 +0630 1945 O 15
+5:30 - IST
+Z Asia/Jakarta 7:7:12 - LMT 1867 Au 10
+7:7:12 - BMT 1923 D 31 23:47:12
+7:20 - +0720 1932 N
+7:30 - +0730 1942 Mar 23
+9 - +09 1945 S 23
+7:30 - +0730 1948 May
+8 - +08 1950 May
+7:30 - +0730 1964
+7 - WIB
+Z Asia/Pontianak 7:17:20 - LMT 1908 May
+7:17:20 - PMT 1932 N
+7:30 - +0730 1942 Ja 29
+9 - +09 1945 S 23
+7:30 - +0730 1948 May
+8 - +08 1950 May
+7:30 - +0730 1964
+8 - WITA 1988
+7 - WIB
+Z Asia/Makassar 7:57:36 - LMT 1920
+7:57:36 - MMT 1932 N
+8 - +08 1942 F 9
+9 - +09 1945 S 23
+8 - WITA
+Z Asia/Jayapura 9:22:48 - LMT 1932 N
+9 - +09 1944 S
+9:30 - +0930 1964
+9 - WIT
+R i 1978 1980 - Mar 20 24 1 -
+R i 1978 o - O 20 24 0 -
+R i 1979 o - S 18 24 0 -
+R i 1980 o - S 22 24 0 -
+R i 1991 o - May 2 24 1 -
+R i 1992 1995 - Mar 21 24 1 -
+R i 1991 1995 - S 21 24 0 -
+R i 1996 o - Mar 20 24 1 -
+R i 1996 o - S 20 24 0 -
+R i 1997 1999 - Mar 21 24 1 -
+R i 1997 1999 - S 21 24 0 -
+R i 2000 o - Mar 20 24 1 -
+R i 2000 o - S 20 24 0 -
+R i 2001 2003 - Mar 21 24 1 -
+R i 2001 2003 - S 21 24 0 -
+R i 2004 o - Mar 20 24 1 -
+R i 2004 o - S 20 24 0 -
+R i 2005 o - Mar 21 24 1 -
+R i 2005 o - S 21 24 0 -
+R i 2008 o - Mar 20 24 1 -
+R i 2008 o - S 20 24 0 -
+R i 2009 2011 - Mar 21 24 1 -
+R i 2009 2011 - S 21 24 0 -
+R i 2012 o - Mar 20 24 1 -
+R i 2012 o - S 20 24 0 -
+R i 2013 2015 - Mar 21 24 1 -
+R i 2013 2015 - S 21 24 0 -
+R i 2016 o - Mar 20 24 1 -
+R i 2016 o - S 20 24 0 -
+R i 2017 2019 - Mar 21 24 1 -
+R i 2017 2019 - S 21 24 0 -
+R i 2020 o - Mar 20 24 1 -
+R i 2020 o - S 20 24 0 -
+R i 2021 2023 - Mar 21 24 1 -
+R i 2021 2023 - S 21 24 0 -
+R i 2024 o - Mar 20 24 1 -
+R i 2024 o - S 20 24 0 -
+R i 2025 2027 - Mar 21 24 1 -
+R i 2025 2027 - S 21 24 0 -
+R i 2028 2029 - Mar 20 24 1 -
+R i 2028 2029 - S 20 24 0 -
+R i 2030 2031 - Mar 21 24 1 -
+R i 2030 2031 - S 21 24 0 -
+R i 2032 2033 - Mar 20 24 1 -
+R i 2032 2033 - S 20 24 0 -
+R i 2034 2035 - Mar 21 24 1 -
+R i 2034 2035 - S 21 24 0 -
+R i 2036 2037 - Mar 20 24 1 -
+R i 2036 2037 - S 20 24 0 -
+R i 2038 2039 - Mar 21 24 1 -
+R i 2038 2039 - S 21 24 0 -
+R i 2040 2041 - Mar 20 24 1 -
+R i 2040 2041 - S 20 24 0 -
+R i 2042 2043 - Mar 21 24 1 -
+R i 2042 2043 - S 21 24 0 -
+R i 2044 2045 - Mar 20 24 1 -
+R i 2044 2045 - S 20 24 0 -
+R i 2046 2047 - Mar 21 24 1 -
+R i 2046 2047 - S 21 24 0 -
+R i 2048 2049 - Mar 20 24 1 -
+R i 2048 2049 - S 20 24 0 -
+R i 2050 2051 - Mar 21 24 1 -
+R i 2050 2051 - S 21 24 0 -
+R i 2052 2053 - Mar 20 24 1 -
+R i 2052 2053 - S 20 24 0 -
+R i 2054 2055 - Mar 21 24 1 -
+R i 2054 2055 - S 21 24 0 -
+R i 2056 2057 - Mar 20 24 1 -
+R i 2056 2057 - S 20 24 0 -
+R i 2058 2059 - Mar 21 24 1 -
+R i 2058 2059 - S 21 24 0 -
+R i 2060 2062 - Mar 20 24 1 -
+R i 2060 2062 - S 20 24 0 -
+R i 2063 o - Mar 21 24 1 -
+R i 2063 o - S 21 24 0 -
+R i 2064 2066 - Mar 20 24 1 -
+R i 2064 2066 - S 20 24 0 -
+R i 2067 o - Mar 21 24 1 -
+R i 2067 o - S 21 24 0 -
+R i 2068 2070 - Mar 20 24 1 -
+R i 2068 2070 - S 20 24 0 -
+R i 2071 o - Mar 21 24 1 -
+R i 2071 o - S 21 24 0 -
+R i 2072 2074 - Mar 20 24 1 -
+R i 2072 2074 - S 20 24 0 -
+R i 2075 o - Mar 21 24 1 -
+R i 2075 o - S 21 24 0 -
+R i 2076 2078 - Mar 20 24 1 -
+R i 2076 2078 - S 20 24 0 -
+R i 2079 o - Mar 21 24 1 -
+R i 2079 o - S 21 24 0 -
+R i 2080 2082 - Mar 20 24 1 -
+R i 2080 2082 - S 20 24 0 -
+R i 2083 o - Mar 21 24 1 -
+R i 2083 o - S 21 24 0 -
+R i 2084 2086 - Mar 20 24 1 -
+R i 2084 2086 - S 20 24 0 -
+R i 2087 o - Mar 21 24 1 -
+R i 2087 o - S 21 24 0 -
+R i 2088 ma - Mar 20 24 1 -
+R i 2088 ma - S 20 24 0 -
+Z Asia/Tehran 3:25:44 - LMT 1916
+3:25:44 - TMT 1946
+3:30 - +0330 1977 N
+4 i +04/+05 1979
+3:30 i +0330/+0430
+R IQ 1982 o - May 1 0 1 -
+R IQ 1982 1984 - O 1 0 0 -
+R IQ 1983 o - Mar 31 0 1 -
+R IQ 1984 1985 - Ap 1 0 1 -
+R IQ 1985 1990 - S lastSu 1s 0 -
+R IQ 1986 1990 - Mar lastSu 1s 1 -
+R IQ 1991 2007 - Ap 1 3s 1 -
+R IQ 1991 2007 - O 1 3s 0 -
+Z Asia/Baghdad 2:57:40 - LMT 1890
+2:57:36 - BMT 1918
+3 - +03 1982 May
+3 IQ +03/+04
+R Z 1940 o - May 31 24u 1 D
+R Z 1940 o - S 30 24u 0 S
+R Z 1940 o - N 16 24u 1 D
+R Z 1942 1946 - O 31 24u 0 S
+R Z 1943 1944 - Mar 31 24u 1 D
+R Z 1945 1946 - Ap 15 24u 1 D
+R Z 1948 o - May 22 24u 2 DD
+R Z 1948 o - Au 31 24u 1 D
+R Z 1948 1949 - O 31 24u 0 S
+R Z 1949 o - Ap 30 24u 1 D
+R Z 1950 o - Ap 15 24u 1 D
+R Z 1950 o - S 14 24u 0 S
+R Z 1951 o - Mar 31 24u 1 D
+R Z 1951 o - N 10 24u 0 S
+R Z 1952 o - Ap 19 24u 1 D
+R Z 1952 o - O 18 24u 0 S
+R Z 1953 o - Ap 11 24u 1 D
+R Z 1953 o - S 12 24u 0 S
+R Z 1954 o - Jun 12 24u 1 D
+R Z 1954 o - S 11 24u 0 S
+R Z 1955 o - Jun 11 24u 1 D
+R Z 1955 o - S 10 24u 0 S
+R Z 1956 o - Jun 2 24u 1 D
+R Z 1956 o - S 29 24u 0 S
+R Z 1957 o - Ap 27 24u 1 D
+R Z 1957 o - S 21 24u 0 S
+R Z 1974 o - Jul 6 24 1 D
+R Z 1974 o - O 12 24 0 S
+R Z 1975 o - Ap 19 24 1 D
+R Z 1975 o - Au 30 24 0 S
+R Z 1980 o - Au 2 24s 1 D
+R Z 1980 o - S 13 24s 0 S
+R Z 1984 o - May 5 24s 1 D
+R Z 1984 o - Au 25 24s 0 S
+R Z 1985 o - Ap 13 24 1 D
+R Z 1985 o - Au 31 24 0 S
+R Z 1986 o - May 17 24 1 D
+R Z 1986 o - S 6 24 0 S
+R Z 1987 o - Ap 14 24 1 D
+R Z 1987 o - S 12 24 0 S
+R Z 1988 o - Ap 9 24 1 D
+R Z 1988 o - S 3 24 0 S
+R Z 1989 o - Ap 29 24 1 D
+R Z 1989 o - S 2 24 0 S
+R Z 1990 o - Mar 24 24 1 D
+R Z 1990 o - Au 25 24 0 S
+R Z 1991 o - Mar 23 24 1 D
+R Z 1991 o - Au 31 24 0 S
+R Z 1992 o - Mar 28 24 1 D
+R Z 1992 o - S 5 24 0 S
+R Z 1993 o - Ap 2 0 1 D
+R Z 1993 o - S 5 0 0 S
+R Z 1994 o - Ap 1 0 1 D
+R Z 1994 o - Au 28 0 0 S
+R Z 1995 o - Mar 31 0 1 D
+R Z 1995 o - S 3 0 0 S
+R Z 1996 o - Mar 14 24 1 D
+R Z 1996 o - S 15 24 0 S
+R Z 1997 o - Mar 20 24 1 D
+R Z 1997 o - S 13 24 0 S
+R Z 1998 o - Mar 20 0 1 D
+R Z 1998 o - S 6 0 0 S
+R Z 1999 o - Ap 2 2 1 D
+R Z 1999 o - S 3 2 0 S
+R Z 2000 o - Ap 14 2 1 D
+R Z 2000 o - O 6 1 0 S
+R Z 2001 o - Ap 9 1 1 D
+R Z 2001 o - S 24 1 0 S
+R Z 2002 o - Mar 29 1 1 D
+R Z 2002 o - O 7 1 0 S
+R Z 2003 o - Mar 28 1 1 D
+R Z 2003 o - O 3 1 0 S
+R Z 2004 o - Ap 7 1 1 D
+R Z 2004 o - S 22 1 0 S
+R Z 2005 2012 - Ap F<=1 2 1 D
+R Z 2005 o - O 9 2 0 S
+R Z 2006 o - O 1 2 0 S
+R Z 2007 o - S 16 2 0 S
+R Z 2008 o - O 5 2 0 S
+R Z 2009 o - S 27 2 0 S
+R Z 2010 o - S 12 2 0 S
+R Z 2011 o - O 2 2 0 S
+R Z 2012 o - S 23 2 0 S
+R Z 2013 ma - Mar F>=23 2 1 D
+R Z 2013 ma - O lastSu 2 0 S
+Z Asia/Jerusalem 2:20:54 - LMT 1880
+2:20:40 - JMT 1918
+2 Z I%sT
+R JP 1948 o - May Sa>=1 24 1 D
+R JP 1948 1951 - S Sa>=8 25 0 S
+R JP 1949 o - Ap Sa>=1 24 1 D
+R JP 1950 1951 - May Sa>=1 24 1 D
+Z Asia/Tokyo 9:18:59 - LMT 1887 D 31 15u
+9 JP J%sT
+R J 1973 o - Jun 6 0 1 S
+R J 1973 1975 - O 1 0 0 -
+R J 1974 1977 - May 1 0 1 S
+R J 1976 o - N 1 0 0 -
+R J 1977 o - O 1 0 0 -
+R J 1978 o - Ap 30 0 1 S
+R J 1978 o - S 30 0 0 -
+R J 1985 o - Ap 1 0 1 S
+R J 1985 o - O 1 0 0 -
+R J 1986 1988 - Ap F>=1 0 1 S
+R J 1986 1990 - O F>=1 0 0 -
+R J 1989 o - May 8 0 1 S
+R J 1990 o - Ap 27 0 1 S
+R J 1991 o - Ap 17 0 1 S
+R J 1991 o - S 27 0 0 -
+R J 1992 o - Ap 10 0 1 S
+R J 1992 1993 - O F>=1 0 0 -
+R J 1993 1998 - Ap F>=1 0 1 S
+R J 1994 o - S F>=15 0 0 -
+R J 1995 1998 - S F>=15 0s 0 -
+R J 1999 o - Jul 1 0s 1 S
+R J 1999 2002 - S lastF 0s 0 -
+R J 2000 2001 - Mar lastTh 0s 1 S
+R J 2002 2012 - Mar lastTh 24 1 S
+R J 2003 o - O 24 0s 0 -
+R J 2004 o - O 15 0s 0 -
+R J 2005 o - S lastF 0s 0 -
+R J 2006 2011 - O lastF 0s 0 -
+R J 2013 o - D 20 0 0 -
+R J 2014 2021 - Mar lastTh 24 1 S
+R J 2014 ma - O lastF 0s 0 -
+R J 2022 ma - F lastTh 24 1 S
+Z Asia/Amman 2:23:44 - LMT 1931
+2 J EE%sT
+Z Asia/Almaty 5:7:48 - LMT 1924 May 2
+5 - +05 1930 Jun 21
+6 R +06/+07 1991 Mar 31 2s
+5 R +05/+06 1992 Ja 19 2s
+6 R +06/+07 2004 O 31 2s
+6 - +06
+Z Asia/Qyzylorda 4:21:52 - LMT 1924 May 2
+4 - +04 1930 Jun 21
+5 - +05 1981 Ap
+5 1 +06 1981 O
+6 - +06 1982 Ap
+5 R +05/+06 1991 Mar 31 2s
+4 R +04/+05 1991 S 29 2s
+5 R +05/+06 1992 Ja 19 2s
+6 R +06/+07 1992 Mar 29 2s
+5 R +05/+06 2004 O 31 2s
+6 - +06 2018 D 21
+5 - +05
+Z Asia/Qostanay 4:14:28 - LMT 1924 May 2
+4 - +04 1930 Jun 21
+5 - +05 1981 Ap
+5 1 +06 1981 O
+6 - +06 1982 Ap
+5 R +05/+06 1991 Mar 31 2s
+4 R +04/+05 1992 Ja 19 2s
+5 R +05/+06 2004 O 31 2s
+6 - +06
+Z Asia/Aqtobe 3:48:40 - LMT 1924 May 2
+4 - +04 1930 Jun 21
+5 - +05 1981 Ap
+5 1 +06 1981 O
+6 - +06 1982 Ap
+5 R +05/+06 1991 Mar 31 2s
+4 R +04/+05 1992 Ja 19 2s
+5 R +05/+06 2004 O 31 2s
+5 - +05
+Z Asia/Aqtau 3:21:4 - LMT 1924 May 2
+4 - +04 1930 Jun 21
+5 - +05 1981 O
+6 - +06 1982 Ap
+5 R +05/+06 1991 Mar 31 2s
+4 R +04/+05 1992 Ja 19 2s
+5 R +05/+06 1994 S 25 2s
+4 R +04/+05 2004 O 31 2s
+5 - +05
+Z Asia/Atyrau 3:27:44 - LMT 1924 May 2
+3 - +03 1930 Jun 21
+5 - +05 1981 O
+6 - +06 1982 Ap
+5 R +05/+06 1991 Mar 31 2s
+4 R +04/+05 1992 Ja 19 2s
+5 R +05/+06 1999 Mar 28 2s
+4 R +04/+05 2004 O 31 2s
+5 - +05
+Z Asia/Oral 3:25:24 - LMT 1924 May 2
+3 - +03 1930 Jun 21
+5 - +05 1981 Ap
+5 1 +06 1981 O
+6 - +06 1982 Ap
+5 R +05/+06 1989 Mar 26 2s
+4 R +04/+05 1992 Ja 19 2s
+5 R +05/+06 1992 Mar 29 2s
+4 R +04/+05 2004 O 31 2s
+5 - +05
+R KG 1992 1996 - Ap Su>=7 0s 1 -
+R KG 1992 1996 - S lastSu 0 0 -
+R KG 1997 2005 - Mar lastSu 2:30 1 -
+R KG 1997 2004 - O lastSu 2:30 0 -
+Z Asia/Bishkek 4:58:24 - LMT 1924 May 2
+5 - +05 1930 Jun 21
+6 R +06/+07 1991 Mar 31 2s
+5 R +05/+06 1991 Au 31 2
+5 KG +05/+06 2005 Au 12
+6 - +06
+R KR 1948 o - Jun 1 0 1 D
+R KR 1948 o - S 12 24 0 S
+R KR 1949 o - Ap 3 0 1 D
+R KR 1949 1951 - S Sa>=7 24 0 S
+R KR 1950 o - Ap 1 0 1 D
+R KR 1951 o - May 6 0 1 D
+R KR 1955 o - May 5 0 1 D
+R KR 1955 o - S 8 24 0 S
+R KR 1956 o - May 20 0 1 D
+R KR 1956 o - S 29 24 0 S
+R KR 1957 1960 - May Su>=1 0 1 D
+R KR 1957 1960 - S Sa>=17 24 0 S
+R KR 1987 1988 - May Su>=8 2 1 D
+R KR 1987 1988 - O Su>=8 3 0 S
+Z Asia/Seoul 8:27:52 - LMT 1908 Ap
+8:30 - KST 1912
+9 - JST 1945 S 8
+9 KR K%sT 1954 Mar 21
+8:30 KR K%sT 1961 Au 10
+9 KR K%sT
+Z Asia/Pyongyang 8:23 - LMT 1908 Ap
+8:30 - KST 1912
+9 - JST 1945 Au 24
+9 - KST 2015 Au 15
+8:30 - KST 2018 May 4 23:30
+9 - KST
+R l 1920 o - Mar 28 0 1 S
+R l 1920 o - O 25 0 0 -
+R l 1921 o - Ap 3 0 1 S
+R l 1921 o - O 3 0 0 -
+R l 1922 o - Mar 26 0 1 S
+R l 1922 o - O 8 0 0 -
+R l 1923 o - Ap 22 0 1 S
+R l 1923 o - S 16 0 0 -
+R l 1957 1961 - May 1 0 1 S
+R l 1957 1961 - O 1 0 0 -
+R l 1972 o - Jun 22 0 1 S
+R l 1972 1977 - O 1 0 0 -
+R l 1973 1977 - May 1 0 1 S
+R l 1978 o - Ap 30 0 1 S
+R l 1978 o - S 30 0 0 -
+R l 1984 1987 - May 1 0 1 S
+R l 1984 1991 - O 16 0 0 -
+R l 1988 o - Jun 1 0 1 S
+R l 1989 o - May 10 0 1 S
+R l 1990 1992 - May 1 0 1 S
+R l 1992 o - O 4 0 0 -
+R l 1993 ma - Mar lastSu 0 1 S
+R l 1993 1998 - S lastSu 0 0 -
+R l 1999 ma - O lastSu 0 0 -
+Z Asia/Beirut 2:22 - LMT 1880
+2 l EE%sT
+R NB 1935 1941 - S 14 0 0:20 -
+R NB 1935 1941 - D 14 0 0 -
+Z Asia/Kuala_Lumpur 6:46:46 - LMT 1901
+6:55:25 - SMT 1905 Jun
+7 - +07 1933
+7 0:20 +0720 1936
+7:20 - +0720 1941 S
+7:30 - +0730 1942 F 16
+9 - +09 1945 S 12
+7:30 - +0730 1982
+8 - +08
+Z Asia/Kuching 7:21:20 - LMT 1926 Mar
+7:30 - +0730 1933
+8 NB +08/+0820 1942 F 16
+9 - +09 1945 S 12
+8 - +08
+Z Indian/Maldives 4:54 - LMT 1880
+4:54 - MMT 1960
+5 - +05
+R X 1983 1984 - Ap 1 0 1 -
+R X 1983 o - O 1 0 0 -
+R X 1985 1998 - Mar lastSu 0 1 -
+R X 1984 1998 - S lastSu 0 0 -
+R X 2001 o - Ap lastSa 2 1 -
+R X 2001 2006 - S lastSa 2 0 -
+R X 2002 2006 - Mar lastSa 2 1 -
+R X 2015 2016 - Mar lastSa 2 1 -
+R X 2015 2016 - S lastSa 0 0 -
+Z Asia/Hovd 6:6:36 - LMT 1905 Au
+6 - +06 1978
+7 X +07/+08
+Z Asia/Ulaanbaatar 7:7:32 - LMT 1905 Au
+7 - +07 1978
+8 X +08/+09
+Z Asia/Choibalsan 7:38 - LMT 1905 Au
+7 - +07 1978
+8 - +08 1983 Ap
+9 X +09/+10 2008 Mar 31
+8 X +08/+09
+Z Asia/Kathmandu 5:41:16 - LMT 1920
+5:30 - +0530 1986
+5:45 - +0545
+R PK 2002 o - Ap Su>=2 0 1 S
+R PK 2002 o - O Su>=2 0 0 -
+R PK 2008 o - Jun 1 0 1 S
+R PK 2008 2009 - N 1 0 0 -
+R PK 2009 o - Ap 15 0 1 S
+Z Asia/Karachi 4:28:12 - LMT 1907
+5:30 - +0530 1942 S
+5:30 1 +0630 1945 O 15
+5:30 - +0530 1951 S 30
+5 - +05 1971 Mar 26
+5 PK PK%sT
+R P 1999 2005 - Ap F>=15 0 1 S
+R P 1999 2003 - O F>=15 0 0 -
+R P 2004 o - O 1 1 0 -
+R P 2005 o - O 4 2 0 -
+R P 2006 2007 - Ap 1 0 1 S
+R P 2006 o - S 22 0 0 -
+R P 2007 o - S 13 2 0 -
+R P 2008 2009 - Mar lastF 0 1 S
+R P 2008 o - S 1 0 0 -
+R P 2009 o - S 4 1 0 -
+R P 2010 o - Mar 26 0 1 S
+R P 2010 o - Au 11 0 0 -
+R P 2011 o - Ap 1 0:1 1 S
+R P 2011 o - Au 1 0 0 -
+R P 2011 o - Au 30 0 1 S
+R P 2011 o - S 30 0 0 -
+R P 2012 2014 - Mar lastTh 24 1 S
+R P 2012 o - S 21 1 0 -
+R P 2013 o - S 27 0 0 -
+R P 2014 o - O 24 0 0 -
+R P 2015 o - Mar 28 0 1 S
+R P 2015 o - O 23 1 0 -
+R P 2016 2018 - Mar Sa>=24 1 1 S
+R P 2016 2018 - O Sa>=24 1 0 -
+R P 2019 o - Mar 29 0 1 S
+R P 2019 o - O Sa>=24 0 0 -
+R P 2020 ma - Mar Sa>=24 0 1 S
+R P 2020 ma - O Sa>=24 1 0 -
+Z Asia/Gaza 2:17:52 - LMT 1900 O
+2 Z EET/EEST 1948 May 15
+2 K EE%sT 1967 Jun 5
+2 Z I%sT 1996
+2 J EE%sT 1999
+2 P EE%sT 2008 Au 29
+2 - EET 2008 S
+2 P EE%sT 2010
+2 - EET 2010 Mar 27 0:1
+2 P EE%sT 2011 Au
+2 - EET 2012
+2 P EE%sT
+Z Asia/Hebron 2:20:23 - LMT 1900 O
+2 Z EET/EEST 1948 May 15
+2 K EE%sT 1967 Jun 5
+2 Z I%sT 1996
+2 J EE%sT 1999
+2 P EE%sT
+R PH 1936 o - N 1 0 1 D
+R PH 1937 o - F 1 0 0 S
+R PH 1954 o - Ap 12 0 1 D
+R PH 1954 o - Jul 1 0 0 S
+R PH 1978 o - Mar 22 0 1 D
+R PH 1978 o - S 21 0 0 S
+Z Asia/Manila -15:56 - LMT 1844 D 31
+8:4 - LMT 1899 May 11
+8 PH P%sT 1942 May
+9 - JST 1944 N
+8 PH P%sT
+Z Asia/Qatar 3:26:8 - LMT 1920
+4 - +04 1972 Jun
+3 - +03
+L Asia/Qatar Asia/Bahrain
+Z Asia/Riyadh 3:6:52 - LMT 1947 Mar 14
+3 - +03
+L Asia/Riyadh Antarctica/Syowa
+L Asia/Riyadh Asia/Aden
+L Asia/Riyadh Asia/Kuwait
+Z Asia/Singapore 6:55:25 - LMT 1901
+6:55:25 - SMT 1905 Jun
+7 - +07 1933
+7 0:20 +0720 1936
+7:20 - +0720 1941 S
+7:30 - +0730 1942 F 16
+9 - +09 1945 S 12
+7:30 - +0730 1982
+8 - +08
+Z Asia/Colombo 5:19:24 - LMT 1880
+5:19:32 - MMT 1906
+5:30 - +0530 1942 Ja 5
+5:30 0:30 +06 1942 S
+5:30 1 +0630 1945 O 16 2
+5:30 - +0530 1996 May 25
+6:30 - +0630 1996 O 26 0:30
+6 - +06 2006 Ap 15 0:30
+5:30 - +0530
+R S 1920 1923 - Ap Su>=15 2 1 S
+R S 1920 1923 - O Su>=1 2 0 -
+R S 1962 o - Ap 29 2 1 S
+R S 1962 o - O 1 2 0 -
+R S 1963 1965 - May 1 2 1 S
+R S 1963 o - S 30 2 0 -
+R S 1964 o - O 1 2 0 -
+R S 1965 o - S 30 2 0 -
+R S 1966 o - Ap 24 2 1 S
+R S 1966 1976 - O 1 2 0 -
+R S 1967 1978 - May 1 2 1 S
+R S 1977 1978 - S 1 2 0 -
+R S 1983 1984 - Ap 9 2 1 S
+R S 1983 1984 - O 1 2 0 -
+R S 1986 o - F 16 2 1 S
+R S 1986 o - O 9 2 0 -
+R S 1987 o - Mar 1 2 1 S
+R S 1987 1988 - O 31 2 0 -
+R S 1988 o - Mar 15 2 1 S
+R S 1989 o - Mar 31 2 1 S
+R S 1989 o - O 1 2 0 -
+R S 1990 o - Ap 1 2 1 S
+R S 1990 o - S 30 2 0 -
+R S 1991 o - Ap 1 0 1 S
+R S 1991 1992 - O 1 0 0 -
+R S 1992 o - Ap 8 0 1 S
+R S 1993 o - Mar 26 0 1 S
+R S 1993 o - S 25 0 0 -
+R S 1994 1996 - Ap 1 0 1 S
+R S 1994 2005 - O 1 0 0 -
+R S 1997 1998 - Mar lastM 0 1 S
+R S 1999 2006 - Ap 1 0 1 S
+R S 2006 o - S 22 0 0 -
+R S 2007 o - Mar lastF 0 1 S
+R S 2007 o - N F>=1 0 0 -
+R S 2008 o - Ap F>=1 0 1 S
+R S 2008 o - N 1 0 0 -
+R S 2009 o - Mar lastF 0 1 S
+R S 2010 2011 - Ap F>=1 0 1 S
+R S 2012 ma - Mar lastF 0 1 S
+R S 2009 ma - O lastF 0 0 -
+Z Asia/Damascus 2:25:12 - LMT 1920
+2 S EE%sT
+Z Asia/Dushanbe 4:35:12 - LMT 1924 May 2
+5 - +05 1930 Jun 21
+6 R +06/+07 1991 Mar 31 2s
+5 1 +05/+06 1991 S 9 2s
+5 - +05
+Z Asia/Bangkok 6:42:4 - LMT 1880
+6:42:4 - BMT 1920 Ap
+7 - +07
+L Asia/Bangkok Asia/Phnom_Penh
+L Asia/Bangkok Asia/Vientiane
+Z Asia/Ashgabat 3:53:32 - LMT 1924 May 2
+4 - +04 1930 Jun 21
+5 R +05/+06 1991 Mar 31 2
+4 R +04/+05 1992 Ja 19 2
+5 - +05
+Z Asia/Dubai 3:41:12 - LMT 1920
+4 - +04
+L Asia/Dubai Asia/Muscat
+Z Asia/Samarkand 4:27:53 - LMT 1924 May 2
+4 - +04 1930 Jun 21
+5 - +05 1981 Ap
+5 1 +06 1981 O
+6 - +06 1982 Ap
+5 R +05/+06 1992
+5 - +05
+Z Asia/Tashkent 4:37:11 - LMT 1924 May 2
+5 - +05 1930 Jun 21
+6 R +06/+07 1991 Mar 31 2
+5 R +05/+06 1992
+5 - +05
+Z Asia/Ho_Chi_Minh 7:6:40 - LMT 1906 Jul
+7:6:30 - PLMT 1911 May
+7 - +07 1942 D 31 23
+8 - +08 1945 Mar 14 23
+9 - +09 1945 S 2
+7 - +07 1947 Ap
+8 - +08 1955 Jul
+7 - +07 1959 D 31 23
+8 - +08 1975 Jun 13
+7 - +07
+R AU 1917 o - Ja 1 2s 1 D
+R AU 1917 o - Mar lastSu 2s 0 S
+R AU 1942 o - Ja 1 2s 1 D
+R AU 1942 o - Mar lastSu 2s 0 S
+R AU 1942 o - S 27 2s 1 D
+R AU 1943 1944 - Mar lastSu 2s 0 S
+R AU 1943 o - O 3 2s 1 D
+Z Australia/Darwin 8:43:20 - LMT 1895 F
+9 - ACST 1899 May
+9:30 AU AC%sT
+R AW 1974 o - O lastSu 2s 1 D
+R AW 1975 o - Mar Su>=1 2s 0 S
+R AW 1983 o - O lastSu 2s 1 D
+R AW 1984 o - Mar Su>=1 2s 0 S
+R AW 1991 o - N 17 2s 1 D
+R AW 1992 o - Mar Su>=1 2s 0 S
+R AW 2006 o - D 3 2s 1 D
+R AW 2007 2009 - Mar lastSu 2s 0 S
+R AW 2007 2008 - O lastSu 2s 1 D
+Z Australia/Perth 7:43:24 - LMT 1895 D
+8 AU AW%sT 1943 Jul
+8 AW AW%sT
+Z Australia/Eucla 8:35:28 - LMT 1895 D
+8:45 AU +0845/+0945 1943 Jul
+8:45 AW +0845/+0945
+R AQ 1971 o - O lastSu 2s 1 D
+R AQ 1972 o - F lastSu 2s 0 S
+R AQ 1989 1991 - O lastSu 2s 1 D
+R AQ 1990 1992 - Mar Su>=1 2s 0 S
+R Ho 1992 1993 - O lastSu 2s 1 D
+R Ho 1993 1994 - Mar Su>=1 2s 0 S
+Z Australia/Brisbane 10:12:8 - LMT 1895
+10 AU AE%sT 1971
+10 AQ AE%sT
+Z Australia/Lindeman 9:55:56 - LMT 1895
+10 AU AE%sT 1971
+10 AQ AE%sT 1992 Jul
+10 Ho AE%sT
+R AS 1971 1985 - O lastSu 2s 1 D
+R AS 1986 o - O 19 2s 1 D
+R AS 1987 2007 - O lastSu 2s 1 D
+R AS 1972 o - F 27 2s 0 S
+R AS 1973 1985 - Mar Su>=1 2s 0 S
+R AS 1986 1990 - Mar Su>=15 2s 0 S
+R AS 1991 o - Mar 3 2s 0 S
+R AS 1992 o - Mar 22 2s 0 S
+R AS 1993 o - Mar 7 2s 0 S
+R AS 1994 o - Mar 20 2s 0 S
+R AS 1995 2005 - Mar lastSu 2s 0 S
+R AS 2006 o - Ap 2 2s 0 S
+R AS 2007 o - Mar lastSu 2s 0 S
+R AS 2008 ma - Ap Su>=1 2s 0 S
+R AS 2008 ma - O Su>=1 2s 1 D
+Z Australia/Adelaide 9:14:20 - LMT 1895 F
+9 - ACST 1899 May
+9:30 AU AC%sT 1971
+9:30 AS AC%sT
+R AT 1916 o - O Su>=1 2s 1 D
+R AT 1917 o - Mar lastSu 2s 0 S
+R AT 1917 1918 - O Su>=22 2s 1 D
+R AT 1918 1919 - Mar Su>=1 2s 0 S
+R AT 1967 o - O Su>=1 2s 1 D
+R AT 1968 o - Mar Su>=29 2s 0 S
+R AT 1968 1985 - O lastSu 2s 1 D
+R AT 1969 1971 - Mar Su>=8 2s 0 S
+R AT 1972 o - F lastSu 2s 0 S
+R AT 1973 1981 - Mar Su>=1 2s 0 S
+R AT 1982 1983 - Mar lastSu 2s 0 S
+R AT 1984 1986 - Mar Su>=1 2s 0 S
+R AT 1986 o - O Su>=15 2s 1 D
+R AT 1987 1990 - Mar Su>=15 2s 0 S
+R AT 1987 o - O Su>=22 2s 1 D
+R AT 1988 1990 - O lastSu 2s 1 D
+R AT 1991 1999 - O Su>=1 2s 1 D
+R AT 1991 2005 - Mar lastSu 2s 0 S
+R AT 2000 o - Au lastSu 2s 1 D
+R AT 2001 ma - O Su>=1 2s 1 D
+R AT 2006 o - Ap Su>=1 2s 0 S
+R AT 2007 o - Mar lastSu 2s 0 S
+R AT 2008 ma - Ap Su>=1 2s 0 S
+Z Australia/Hobart 9:49:16 - LMT 1895 S
+10 AT AE%sT 1919 O 24
+10 AU AE%sT 1967
+10 AT AE%sT
+R AV 1971 1985 - O lastSu 2s 1 D
+R AV 1972 o - F lastSu 2s 0 S
+R AV 1973 1985 - Mar Su>=1 2s 0 S
+R AV 1986 1990 - Mar Su>=15 2s 0 S
+R AV 1986 1987 - O Su>=15 2s 1 D
+R AV 1988 1999 - O lastSu 2s 1 D
+R AV 1991 1994 - Mar Su>=1 2s 0 S
+R AV 1995 2005 - Mar lastSu 2s 0 S
+R AV 2000 o - Au lastSu 2s 1 D
+R AV 2001 2007 - O lastSu 2s 1 D
+R AV 2006 o - Ap Su>=1 2s 0 S
+R AV 2007 o - Mar lastSu 2s 0 S
+R AV 2008 ma - Ap Su>=1 2s 0 S
+R AV 2008 ma - O Su>=1 2s 1 D
+Z Australia/Melbourne 9:39:52 - LMT 1895 F
+10 AU AE%sT 1971
+10 AV AE%sT
+R AN 1971 1985 - O lastSu 2s 1 D
+R AN 1972 o - F 27 2s 0 S
+R AN 1973 1981 - Mar Su>=1 2s 0 S
+R AN 1982 o - Ap Su>=1 2s 0 S
+R AN 1983 1985 - Mar Su>=1 2s 0 S
+R AN 1986 1989 - Mar Su>=15 2s 0 S
+R AN 1986 o - O 19 2s 1 D
+R AN 1987 1999 - O lastSu 2s 1 D
+R AN 1990 1995 - Mar Su>=1 2s 0 S
+R AN 1996 2005 - Mar lastSu 2s 0 S
+R AN 2000 o - Au lastSu 2s 1 D
+R AN 2001 2007 - O lastSu 2s 1 D
+R AN 2006 o - Ap Su>=1 2s 0 S
+R AN 2007 o - Mar lastSu 2s 0 S
+R AN 2008 ma - Ap Su>=1 2s 0 S
+R AN 2008 ma - O Su>=1 2s 1 D
+Z Australia/Sydney 10:4:52 - LMT 1895 F
+10 AU AE%sT 1971
+10 AN AE%sT
+Z Australia/Broken_Hill 9:25:48 - LMT 1895 F
+10 - AEST 1896 Au 23
+9 - ACST 1899 May
+9:30 AU AC%sT 1971
+9:30 AN AC%sT 2000
+9:30 AS AC%sT
+R LH 1981 1984 - O lastSu 2 1 -
+R LH 1982 1985 - Mar Su>=1 2 0 -
+R LH 1985 o - O lastSu 2 0:30 -
+R LH 1986 1989 - Mar Su>=15 2 0 -
+R LH 1986 o - O 19 2 0:30 -
+R LH 1987 1999 - O lastSu 2 0:30 -
+R LH 1990 1995 - Mar Su>=1 2 0 -
+R LH 1996 2005 - Mar lastSu 2 0 -
+R LH 2000 o - Au lastSu 2 0:30 -
+R LH 2001 2007 - O lastSu 2 0:30 -
+R LH 2006 o - Ap Su>=1 2 0 -
+R LH 2007 o - Mar lastSu 2 0 -
+R LH 2008 ma - Ap Su>=1 2 0 -
+R LH 2008 ma - O Su>=1 2 0:30 -
+Z Australia/Lord_Howe 10:36:20 - LMT 1895 F
+10 - AEST 1981 Mar
+10:30 LH +1030/+1130 1985 Jul
+10:30 LH +1030/+11
+Z Antarctica/Macquarie 0 - -00 1899 N
+10 - AEST 1916 O 1 2
+10 1 AEDT 1917 F
+10 AU AE%sT 1919 Ap 1 0s
+0 - -00 1948 Mar 25
+10 AU AE%sT 1967
+10 AT AE%sT 2010
+10 1 AEDT 2011
+10 AT AE%sT
+Z Indian/Christmas 7:2:52 - LMT 1895 F
+7 - +07
+Z Indian/Cocos 6:27:40 - LMT 1900
+6:30 - +0630
+R FJ 1998 1999 - N Su>=1 2 1 -
+R FJ 1999 2000 - F lastSu 3 0 -
+R FJ 2009 o - N 29 2 1 -
+R FJ 2010 o - Mar lastSu 3 0 -
+R FJ 2010 2013 - O Su>=21 2 1 -
+R FJ 2011 o - Mar Su>=1 3 0 -
+R FJ 2012 2013 - Ja Su>=18 3 0 -
+R FJ 2014 o - Ja Su>=18 2 0 -
+R FJ 2014 2018 - N Su>=1 2 1 -
+R FJ 2015 ma - Ja Su>=12 3 0 -
+R FJ 2019 o - N Su>=8 2 1 -
+R FJ 2020 o - D 20 2 1 -
+R FJ 2021 ma - N Su>=8 2 1 -
+Z Pacific/Fiji 11:55:44 - LMT 1915 O 26
+12 FJ +12/+13
+Z Pacific/Gambier -8:59:48 - LMT 1912 O
+-9 - -09
+Z Pacific/Marquesas -9:18 - LMT 1912 O
+-9:30 - -0930
+Z Pacific/Tahiti -9:58:16 - LMT 1912 O
+-10 - -10
+R Gu 1959 o - Jun 27 2 1 D
+R Gu 1961 o - Ja 29 2 0 S
+R Gu 1967 o - S 1 2 1 D
+R Gu 1969 o - Ja 26 0:1 0 S
+R Gu 1969 o - Jun 22 2 1 D
+R Gu 1969 o - Au 31 2 0 S
+R Gu 1970 1971 - Ap lastSu 2 1 D
+R Gu 1970 1971 - S Su>=1 2 0 S
+R Gu 1973 o - D 16 2 1 D
+R Gu 1974 o - F 24 2 0 S
+R Gu 1976 o - May 26 2 1 D
+R Gu 1976 o - Au 22 2:1 0 S
+R Gu 1977 o - Ap 24 2 1 D
+R Gu 1977 o - Au 28 2 0 S
+Z Pacific/Guam -14:21 - LMT 1844 D 31
+9:39 - LMT 1901
+10 - GST 1941 D 10
+9 - +09 1944 Jul 31
+10 Gu G%sT 2000 D 23
+10 - ChST
+L Pacific/Guam Pacific/Saipan
+Z Pacific/Tarawa 11:32:4 - LMT 1901
+12 - +12
+Z Pacific/Kanton 0 - -00 1937 Au 31
+-12 - -12 1979 O
+-11 - -11 1994 D 31
+13 - +13
+Z Pacific/Kiritimati -10:29:20 - LMT 1901
+-10:40 - -1040 1979 O
+-10 - -10 1994 D 31
+14 - +14
+Z Pacific/Majuro 11:24:48 - LMT 1901
+11 - +11 1914 O
+9 - +09 1919 F
+11 - +11 1937
+10 - +10 1941 Ap
+9 - +09 1944 Ja 30
+11 - +11 1969 O
+12 - +12
+Z Pacific/Kwajalein 11:9:20 - LMT 1901
+11 - +11 1937
+10 - +10 1941 Ap
+9 - +09 1944 F 6
+11 - +11 1969 O
+-12 - -12 1993 Au 20 24
+12 - +12
+Z Pacific/Chuuk -13:52:52 - LMT 1844 D 31
+10:7:8 - LMT 1901
+10 - +10 1914 O
+9 - +09 1919 F
+10 - +10 1941 Ap
+9 - +09 1945 Au
+10 - +10
+Z Pacific/Pohnpei -13:27:8 - LMT 1844 D 31
+10:32:52 - LMT 1901
+11 - +11 1914 O
+9 - +09 1919 F
+11 - +11 1937
+10 - +10 1941 Ap
+9 - +09 1945 Au
+11 - +11
+Z Pacific/Kosrae -13:8:4 - LMT 1844 D 31
+10:51:56 - LMT 1901
+11 - +11 1914 O
+9 - +09 1919 F
+11 - +11 1937
+10 - +10 1941 Ap
+9 - +09 1945 Au
+11 - +11 1969 O
+12 - +12 1999
+11 - +11
+Z Pacific/Nauru 11:7:40 - LMT 1921 Ja 15
+11:30 - +1130 1942 Au 29
+9 - +09 1945 S 8
+11:30 - +1130 1979 F 10 2
+12 - +12
+R NC 1977 1978 - D Su>=1 0 1 -
+R NC 1978 1979 - F 27 0 0 -
+R NC 1996 o - D 1 2s 1 -
+R NC 1997 o - Mar 2 2s 0 -
+Z Pacific/Noumea 11:5:48 - LMT 1912 Ja 13
+11 NC +11/+12
+R NZ 1927 o - N 6 2 1 S
+R NZ 1928 o - Mar 4 2 0 M
+R NZ 1928 1933 - O Su>=8 2 0:30 S
+R NZ 1929 1933 - Mar Su>=15 2 0 M
+R NZ 1934 1940 - Ap lastSu 2 0 M
+R NZ 1934 1940 - S lastSu 2 0:30 S
+R NZ 1946 o - Ja 1 0 0 S
+R NZ 1974 o - N Su>=1 2s 1 D
+R k 1974 o - N Su>=1 2:45s 1 -
+R NZ 1975 o - F lastSu 2s 0 S
+R k 1975 o - F lastSu 2:45s 0 -
+R NZ 1975 1988 - O lastSu 2s 1 D
+R k 1975 1988 - O lastSu 2:45s 1 -
+R NZ 1976 1989 - Mar Su>=1 2s 0 S
+R k 1976 1989 - Mar Su>=1 2:45s 0 -
+R NZ 1989 o - O Su>=8 2s 1 D
+R k 1989 o - O Su>=8 2:45s 1 -
+R NZ 1990 2006 - O Su>=1 2s 1 D
+R k 1990 2006 - O Su>=1 2:45s 1 -
+R NZ 1990 2007 - Mar Su>=15 2s 0 S
+R k 1990 2007 - Mar Su>=15 2:45s 0 -
+R NZ 2007 ma - S lastSu 2s 1 D
+R k 2007 ma - S lastSu 2:45s 1 -
+R NZ 2008 ma - Ap Su>=1 2s 0 S
+R k 2008 ma - Ap Su>=1 2:45s 0 -
+Z Pacific/Auckland 11:39:4 - LMT 1868 N 2
+11:30 NZ NZ%sT 1946
+12 NZ NZ%sT
+Z Pacific/Chatham 12:13:48 - LMT 1868 N 2
+12:15 - +1215 1946
+12:45 k +1245/+1345
+L Pacific/Auckland Antarctica/McMurdo
+R CK 1978 o - N 12 0 0:30 -
+R CK 1979 1991 - Mar Su>=1 0 0 -
+R CK 1979 1990 - O lastSu 0 0:30 -
+Z Pacific/Rarotonga 13:20:56 - LMT 1899 D 26
+-10:39:4 - LMT 1952 O 16
+-10:30 - -1030 1978 N 12
+-10 CK -10/-0930
+Z Pacific/Niue -11:19:40 - LMT 1952 O 16
+-11:20 - -1120 1964 Jul
+-11 - -11
+Z Pacific/Norfolk 11:11:52 - LMT 1901
+11:12 - +1112 1951
+11:30 - +1130 1974 O 27 2s
+11:30 1 +1230 1975 Mar 2 2s
+11:30 - +1130 2015 O 4 2s
+11 - +11 2019 Jul
+11 AN +11/+12
+Z Pacific/Palau -15:2:4 - LMT 1844 D 31
+8:57:56 - LMT 1901
+9 - +09
+Z Pacific/Port_Moresby 9:48:40 - LMT 1880
+9:48:32 - PMMT 1895
+10 - +10
+L Pacific/Port_Moresby Antarctica/DumontDUrville
+Z Pacific/Bougainville 10:22:16 - LMT 1880
+9:48:32 - PMMT 1895
+10 - +10 1942 Jul
+9 - +09 1945 Au 21
+10 - +10 2014 D 28 2
+11 - +11
+Z Pacific/Pitcairn -8:40:20 - LMT 1901
+-8:30 - -0830 1998 Ap 27
+-8 - -08
+Z Pacific/Pago_Pago 12:37:12 - LMT 1892 Jul 5
+-11:22:48 - LMT 1911
+-11 - SST
+L Pacific/Pago_Pago Pacific/Midway
+R WS 2010 o - S lastSu 0 1 -
+R WS 2011 o - Ap Sa>=1 4 0 -
+R WS 2011 o - S lastSa 3 1 -
+R WS 2012 2021 - Ap Su>=1 4 0 -
+R WS 2012 2020 - S lastSu 3 1 -
+Z Pacific/Apia 12:33:4 - LMT 1892 Jul 5
+-11:26:56 - LMT 1911
+-11:30 - -1130 1950
+-11 WS -11/-10 2011 D 29 24
+13 WS +13/+14
+Z Pacific/Guadalcanal 10:39:48 - LMT 1912 O
+11 - +11
+Z Pacific/Fakaofo -11:24:56 - LMT 1901
+-11 - -11 2011 D 30
+13 - +13
+R TO 1999 o - O 7 2s 1 -
+R TO 2000 o - Mar 19 2s 0 -
+R TO 2000 2001 - N Su>=1 2 1 -
+R TO 2001 2002 - Ja lastSu 2 0 -
+R TO 2016 o - N Su>=1 2 1 -
+R TO 2017 o - Ja Su>=15 3 0 -
+Z Pacific/Tongatapu 12:19:12 - LMT 1945 S 10
+12:20 - +1220 1961
+13 - +13 1999
+13 TO +13/+14
+Z Pacific/Funafuti 11:56:52 - LMT 1901
+12 - +12
+Z Pacific/Wake 11:6:28 - LMT 1901
+12 - +12
+R VU 1973 o - D 22 12u 1 -
+R VU 1974 o - Mar 30 12u 0 -
+R VU 1983 1991 - S Sa>=22 24 1 -
+R VU 1984 1991 - Mar Sa>=22 24 0 -
+R VU 1992 1993 - Ja Sa>=22 24 0 -
+R VU 1992 o - O Sa>=22 24 1 -
+Z Pacific/Efate 11:13:16 - LMT 1912 Ja 13
+11 VU +11/+12
+Z Pacific/Wallis 12:15:20 - LMT 1901
+12 - +12
+R G 1916 o - May 21 2s 1 BST
+R G 1916 o - O 1 2s 0 GMT
+R G 1917 o - Ap 8 2s 1 BST
+R G 1917 o - S 17 2s 0 GMT
+R G 1918 o - Mar 24 2s 1 BST
+R G 1918 o - S 30 2s 0 GMT
+R G 1919 o - Mar 30 2s 1 BST
+R G 1919 o - S 29 2s 0 GMT
+R G 1920 o - Mar 28 2s 1 BST
+R G 1920 o - O 25 2s 0 GMT
+R G 1921 o - Ap 3 2s 1 BST
+R G 1921 o - O 3 2s 0 GMT
+R G 1922 o - Mar 26 2s 1 BST
+R G 1922 o - O 8 2s 0 GMT
+R G 1923 o - Ap Su>=16 2s 1 BST
+R G 1923 1924 - S Su>=16 2s 0 GMT
+R G 1924 o - Ap Su>=9 2s 1 BST
+R G 1925 1926 - Ap Su>=16 2s 1 BST
+R G 1925 1938 - O Su>=2 2s 0 GMT
+R G 1927 o - Ap Su>=9 2s 1 BST
+R G 1928 1929 - Ap Su>=16 2s 1 BST
+R G 1930 o - Ap Su>=9 2s 1 BST
+R G 1931 1932 - Ap Su>=16 2s 1 BST
+R G 1933 o - Ap Su>=9 2s 1 BST
+R G 1934 o - Ap Su>=16 2s 1 BST
+R G 1935 o - Ap Su>=9 2s 1 BST
+R G 1936 1937 - Ap Su>=16 2s 1 BST
+R G 1938 o - Ap Su>=9 2s 1 BST
+R G 1939 o - Ap Su>=16 2s 1 BST
+R G 1939 o - N Su>=16 2s 0 GMT
+R G 1940 o - F Su>=23 2s 1 BST
+R G 1941 o - May Su>=2 1s 2 BDST
+R G 1941 1943 - Au Su>=9 1s 1 BST
+R G 1942 1944 - Ap Su>=2 1s 2 BDST
+R G 1944 o - S Su>=16 1s 1 BST
+R G 1945 o - Ap M>=2 1s 2 BDST
+R G 1945 o - Jul Su>=9 1s 1 BST
+R G 1945 1946 - O Su>=2 2s 0 GMT
+R G 1946 o - Ap Su>=9 2s 1 BST
+R G 1947 o - Mar 16 2s 1 BST
+R G 1947 o - Ap 13 1s 2 BDST
+R G 1947 o - Au 10 1s 1 BST
+R G 1947 o - N 2 2s 0 GMT
+R G 1948 o - Mar 14 2s 1 BST
+R G 1948 o - O 31 2s 0 GMT
+R G 1949 o - Ap 3 2s 1 BST
+R G 1949 o - O 30 2s 0 GMT
+R G 1950 1952 - Ap Su>=14 2s 1 BST
+R G 1950 1952 - O Su>=21 2s 0 GMT
+R G 1953 o - Ap Su>=16 2s 1 BST
+R G 1953 1960 - O Su>=2 2s 0 GMT
+R G 1954 o - Ap Su>=9 2s 1 BST
+R G 1955 1956 - Ap Su>=16 2s 1 BST
+R G 1957 o - Ap Su>=9 2s 1 BST
+R G 1958 1959 - Ap Su>=16 2s 1 BST
+R G 1960 o - Ap Su>=9 2s 1 BST
+R G 1961 1963 - Mar lastSu 2s 1 BST
+R G 1961 1968 - O Su>=23 2s 0 GMT
+R G 1964 1967 - Mar Su>=19 2s 1 BST
+R G 1968 o - F 18 2s 1 BST
+R G 1972 1980 - Mar Su>=16 2s 1 BST
+R G 1972 1980 - O Su>=23 2s 0 GMT
+R G 1981 1995 - Mar lastSu 1u 1 BST
+R G 1981 1989 - O Su>=23 1u 0 GMT
+R G 1990 1995 - O Su>=22 1u 0 GMT
+Z Europe/London -0:1:15 - LMT 1847 D 1 0s
+0 G %s 1968 O 27
+1 - BST 1971 O 31 2u
+0 G %s 1996
+0 E GMT/BST
+L Europe/London Europe/Jersey
+L Europe/London Europe/Guernsey
+L Europe/London Europe/Isle_of_Man
+R IE 1971 o - O 31 2u -1 -
+R IE 1972 1980 - Mar Su>=16 2u 0 -
+R IE 1972 1980 - O Su>=23 2u -1 -
+R IE 1981 ma - Mar lastSu 1u 0 -
+R IE 1981 1989 - O Su>=23 1u -1 -
+R IE 1990 1995 - O Su>=22 1u -1 -
+R IE 1996 ma - O lastSu 1u -1 -
+Z Europe/Dublin -0:25 - LMT 1880 Au 2
+-0:25:21 - DMT 1916 May 21 2s
+-0:25:21 1 IST 1916 O 1 2s
+0 G %s 1921 D 6
+0 G GMT/IST 1940 F 25 2s
+0 1 IST 1946 O 6 2s
+0 - GMT 1947 Mar 16 2s
+0 1 IST 1947 N 2 2s
+0 - GMT 1948 Ap 18 2s
+0 G GMT/IST 1968 O 27
+1 IE IST/GMT
+R E 1977 1980 - Ap Su>=1 1u 1 S
+R E 1977 o - S lastSu 1u 0 -
+R E 1978 o - O 1 1u 0 -
+R E 1979 1995 - S lastSu 1u 0 -
+R E 1981 ma - Mar lastSu 1u 1 S
+R E 1996 ma - O lastSu 1u 0 -
+R W- 1977 1980 - Ap Su>=1 1s 1 S
+R W- 1977 o - S lastSu 1s 0 -
+R W- 1978 o - O 1 1s 0 -
+R W- 1979 1995 - S lastSu 1s 0 -
+R W- 1981 ma - Mar lastSu 1s 1 S
+R W- 1996 ma - O lastSu 1s 0 -
+R c 1916 o - Ap 30 23 1 S
+R c 1916 o - O 1 1 0 -
+R c 1917 1918 - Ap M>=15 2s 1 S
+R c 1917 1918 - S M>=15 2s 0 -
+R c 1940 o - Ap 1 2s 1 S
+R c 1942 o - N 2 2s 0 -
+R c 1943 o - Mar 29 2s 1 S
+R c 1943 o - O 4 2s 0 -
+R c 1944 1945 - Ap M>=1 2s 1 S
+R c 1944 o - O 2 2s 0 -
+R c 1945 o - S 16 2s 0 -
+R c 1977 1980 - Ap Su>=1 2s 1 S
+R c 1977 o - S lastSu 2s 0 -
+R c 1978 o - O 1 2s 0 -
+R c 1979 1995 - S lastSu 2s 0 -
+R c 1981 ma - Mar lastSu 2s 1 S
+R c 1996 ma - O lastSu 2s 0 -
+R e 1977 1980 - Ap Su>=1 0 1 S
+R e 1977 o - S lastSu 0 0 -
+R e 1978 o - O 1 0 0 -
+R e 1979 1995 - S lastSu 0 0 -
+R e 1981 ma - Mar lastSu 0 1 S
+R e 1996 ma - O lastSu 0 0 -
+R R 1917 o - Jul 1 23 1 MST
+R R 1917 o - D 28 0 0 MMT
+R R 1918 o - May 31 22 2 MDST
+R R 1918 o - S 16 1 1 MST
+R R 1919 o - May 31 23 2 MDST
+R R 1919 o - Jul 1 0u 1 MSD
+R R 1919 o - Au 16 0 0 MSK
+R R 1921 o - F 14 23 1 MSD
+R R 1921 o - Mar 20 23 2 +05
+R R 1921 o - S 1 0 1 MSD
+R R 1921 o - O 1 0 0 -
+R R 1981 1984 - Ap 1 0 1 S
+R R 1981 1983 - O 1 0 0 -
+R R 1984 1995 - S lastSu 2s 0 -
+R R 1985 2010 - Mar lastSu 2s 1 S
+R R 1996 2010 - O lastSu 2s 0 -
+Z WET 0 E WE%sT
+Z CET 1 c CE%sT
+Z MET 1 c ME%sT
+Z EET 2 E EE%sT
+R q 1940 o - Jun 16 0 1 S
+R q 1942 o - N 2 3 0 -
+R q 1943 o - Mar 29 2 1 S
+R q 1943 o - Ap 10 3 0 -
+R q 1974 o - May 4 0 1 S
+R q 1974 o - O 2 0 0 -
+R q 1975 o - May 1 0 1 S
+R q 1975 o - O 2 0 0 -
+R q 1976 o - May 2 0 1 S
+R q 1976 o - O 3 0 0 -
+R q 1977 o - May 8 0 1 S
+R q 1977 o - O 2 0 0 -
+R q 1978 o - May 6 0 1 S
+R q 1978 o - O 1 0 0 -
+R q 1979 o - May 5 0 1 S
+R q 1979 o - S 30 0 0 -
+R q 1980 o - May 3 0 1 S
+R q 1980 o - O 4 0 0 -
+R q 1981 o - Ap 26 0 1 S
+R q 1981 o - S 27 0 0 -
+R q 1982 o - May 2 0 1 S
+R q 1982 o - O 3 0 0 -
+R q 1983 o - Ap 18 0 1 S
+R q 1983 o - O 1 0 0 -
+R q 1984 o - Ap 1 0 1 S
+Z Europe/Tirane 1:19:20 - LMT 1914
+1 - CET 1940 Jun 16
+1 q CE%sT 1984 Jul
+1 E CE%sT
+Z Europe/Andorra 0:6:4 - LMT 1901
+0 - WET 1946 S 30
+1 - CET 1985 Mar 31 2
+1 E CE%sT
+R a 1920 o - Ap 5 2s 1 S
+R a 1920 o - S 13 2s 0 -
+R a 1946 o - Ap 14 2s 1 S
+R a 1946 o - O 7 2s 0 -
+R a 1947 1948 - O Su>=1 2s 0 -
+R a 1947 o - Ap 6 2s 1 S
+R a 1948 o - Ap 18 2s 1 S
+R a 1980 o - Ap 6 0 1 S
+R a 1980 o - S 28 0 0 -
+Z Europe/Vienna 1:5:21 - LMT 1893 Ap
+1 c CE%sT 1920
+1 a CE%sT 1940 Ap 1 2s
+1 c CE%sT 1945 Ap 2 2s
+1 1 CEST 1945 Ap 12 2s
+1 - CET 1946
+1 a CE%sT 1981
+1 E CE%sT
+Z Europe/Minsk 1:50:16 - LMT 1880
+1:50 - MMT 1924 May 2
+2 - EET 1930 Jun 21
+3 - MSK 1941 Jun 28
+1 c CE%sT 1944 Jul 3
+3 R MSK/MSD 1990
+3 - MSK 1991 Mar 31 2s
+2 R EE%sT 2011 Mar 27 2s
+3 - +03
+R b 1918 o - Mar 9 0s 1 S
+R b 1918 1919 - O Sa>=1 23s 0 -
+R b 1919 o - Mar 1 23s 1 S
+R b 1920 o - F 14 23s 1 S
+R b 1920 o - O 23 23s 0 -
+R b 1921 o - Mar 14 23s 1 S
+R b 1921 o - O 25 23s 0 -
+R b 1922 o - Mar 25 23s 1 S
+R b 1922 1927 - O Sa>=1 23s 0 -
+R b 1923 o - Ap 21 23s 1 S
+R b 1924 o - Mar 29 23s 1 S
+R b 1925 o - Ap 4 23s 1 S
+R b 1926 o - Ap 17 23s 1 S
+R b 1927 o - Ap 9 23s 1 S
+R b 1928 o - Ap 14 23s 1 S
+R b 1928 1938 - O Su>=2 2s 0 -
+R b 1929 o - Ap 21 2s 1 S
+R b 1930 o - Ap 13 2s 1 S
+R b 1931 o - Ap 19 2s 1 S
+R b 1932 o - Ap 3 2s 1 S
+R b 1933 o - Mar 26 2s 1 S
+R b 1934 o - Ap 8 2s 1 S
+R b 1935 o - Mar 31 2s 1 S
+R b 1936 o - Ap 19 2s 1 S
+R b 1937 o - Ap 4 2s 1 S
+R b 1938 o - Mar 27 2s 1 S
+R b 1939 o - Ap 16 2s 1 S
+R b 1939 o - N 19 2s 0 -
+R b 1940 o - F 25 2s 1 S
+R b 1944 o - S 17 2s 0 -
+R b 1945 o - Ap 2 2s 1 S
+R b 1945 o - S 16 2s 0 -
+R b 1946 o - May 19 2s 1 S
+R b 1946 o - O 7 2s 0 -
+Z Europe/Brussels 0:17:30 - LMT 1880
+0:17:30 - BMT 1892 May 1 0:17:30
+0 - WET 1914 N 8
+1 - CET 1916 May
+1 c CE%sT 1918 N 11 11u
+0 b WE%sT 1940 May 20 2s
+1 c CE%sT 1944 S 3
+1 b CE%sT 1977
+1 E CE%sT
+R BG 1979 o - Mar 31 23 1 S
+R BG 1979 o - O 1 1 0 -
+R BG 1980 1982 - Ap Sa>=1 23 1 S
+R BG 1980 o - S 29 1 0 -
+R BG 1981 o - S 27 2 0 -
+Z Europe/Sofia 1:33:16 - LMT 1880
+1:56:56 - IMT 1894 N 30
+2 - EET 1942 N 2 3
+1 c CE%sT 1945
+1 - CET 1945 Ap 2 3
+2 - EET 1979 Mar 31 23
+2 BG EE%sT 1982 S 26 3
+2 c EE%sT 1991
+2 e EE%sT 1997
+2 E EE%sT
+R CZ 1945 o - Ap M>=1 2s 1 S
+R CZ 1945 o - O 1 2s 0 -
+R CZ 1946 o - May 6 2s 1 S
+R CZ 1946 1949 - O Su>=1 2s 0 -
+R CZ 1947 1948 - Ap Su>=15 2s 1 S
+R CZ 1949 o - Ap 9 2s 1 S
+Z Europe/Prague 0:57:44 - LMT 1850
+0:57:44 - PMT 1891 O
+1 c CE%sT 1945 May 9
+1 CZ CE%sT 1946 D 1 3
+1 -1 GMT 1947 F 23 2
+1 CZ CE%sT 1979
+1 E CE%sT
+R D 1916 o - May 14 23 1 S
+R D 1916 o - S 30 23 0 -
+R D 1940 o - May 15 0 1 S
+R D 1945 o - Ap 2 2s 1 S
+R D 1945 o - Au 15 2s 0 -
+R D 1946 o - May 1 2s 1 S
+R D 1946 o - S 1 2s 0 -
+R D 1947 o - May 4 2s 1 S
+R D 1947 o - Au 10 2s 0 -
+R D 1948 o - May 9 2s 1 S
+R D 1948 o - Au 8 2s 0 -
+Z Europe/Copenhagen 0:50:20 - LMT 1890
+0:50:20 - CMT 1894
+1 D CE%sT 1942 N 2 2s
+1 c CE%sT 1945 Ap 2 2
+1 D CE%sT 1980
+1 E CE%sT
+Z Atlantic/Faroe -0:27:4 - LMT 1908 Ja 11
+0 - WET 1981
+0 E WE%sT
+R Th 1991 1992 - Mar lastSu 2 1 D
+R Th 1991 1992 - S lastSu 2 0 S
+R Th 1993 2006 - Ap Su>=1 2 1 D
+R Th 1993 2006 - O lastSu 2 0 S
+R Th 2007 ma - Mar Su>=8 2 1 D
+R Th 2007 ma - N Su>=1 2 0 S
+Z America/Danmarkshavn -1:14:40 - LMT 1916 Jul 28
+-3 - -03 1980 Ap 6 2
+-3 E -03/-02 1996
+0 - GMT
+Z America/Scoresbysund -1:27:52 - LMT 1916 Jul 28
+-2 - -02 1980 Ap 6 2
+-2 c -02/-01 1981 Mar 29
+-1 E -01/+00
+Z America/Nuuk -3:26:56 - LMT 1916 Jul 28
+-3 - -03 1980 Ap 6 2
+-3 E -03/-02
+Z America/Thule -4:35:8 - LMT 1916 Jul 28
+-4 Th A%sT
+Z Europe/Tallinn 1:39 - LMT 1880
+1:39 - TMT 1918 F
+1 c CE%sT 1919 Jul
+1:39 - TMT 1921 May
+2 - EET 1940 Au 6
+3 - MSK 1941 S 15
+1 c CE%sT 1944 S 22
+3 R MSK/MSD 1989 Mar 26 2s
+2 1 EEST 1989 S 24 2s
+2 c EE%sT 1998 S 22
+2 E EE%sT 1999 O 31 4
+2 - EET 2002 F 21
+2 E EE%sT
+R FI 1942 o - Ap 2 24 1 S
+R FI 1942 o - O 4 1 0 -
+R FI 1981 1982 - Mar lastSu 2 1 S
+R FI 1981 1982 - S lastSu 3 0 -
+Z Europe/Helsinki 1:39:49 - LMT 1878 May 31
+1:39:49 - HMT 1921 May
+2 FI EE%sT 1983
+2 E EE%sT
+L Europe/Helsinki Europe/Mariehamn
+R F 1916 o - Jun 14 23s 1 S
+R F 1916 1919 - O Su>=1 23s 0 -
+R F 1917 o - Mar 24 23s 1 S
+R F 1918 o - Mar 9 23s 1 S
+R F 1919 o - Mar 1 23s 1 S
+R F 1920 o - F 14 23s 1 S
+R F 1920 o - O 23 23s 0 -
+R F 1921 o - Mar 14 23s 1 S
+R F 1921 o - O 25 23s 0 -
+R F 1922 o - Mar 25 23s 1 S
+R F 1922 1938 - O Sa>=1 23s 0 -
+R F 1923 o - May 26 23s 1 S
+R F 1924 o - Mar 29 23s 1 S
+R F 1925 o - Ap 4 23s 1 S
+R F 1926 o - Ap 17 23s 1 S
+R F 1927 o - Ap 9 23s 1 S
+R F 1928 o - Ap 14 23s 1 S
+R F 1929 o - Ap 20 23s 1 S
+R F 1930 o - Ap 12 23s 1 S
+R F 1931 o - Ap 18 23s 1 S
+R F 1932 o - Ap 2 23s 1 S
+R F 1933 o - Mar 25 23s 1 S
+R F 1934 o - Ap 7 23s 1 S
+R F 1935 o - Mar 30 23s 1 S
+R F 1936 o - Ap 18 23s 1 S
+R F 1937 o - Ap 3 23s 1 S
+R F 1938 o - Mar 26 23s 1 S
+R F 1939 o - Ap 15 23s 1 S
+R F 1939 o - N 18 23s 0 -
+R F 1940 o - F 25 2 1 S
+R F 1941 o - May 5 0 2 M
+R F 1941 o - O 6 0 1 S
+R F 1942 o - Mar 9 0 2 M
+R F 1942 o - N 2 3 1 S
+R F 1943 o - Mar 29 2 2 M
+R F 1943 o - O 4 3 1 S
+R F 1944 o - Ap 3 2 2 M
+R F 1944 o - O 8 1 1 S
+R F 1945 o - Ap 2 2 2 M
+R F 1945 o - S 16 3 0 -
+R F 1976 o - Mar 28 1 1 S
+R F 1976 o - S 26 1 0 -
+Z Europe/Paris 0:9:21 - LMT 1891 Mar 16
+0:9:21 - PMT 1911 Mar 11
+0 F WE%sT 1940 Jun 14 23
+1 c CE%sT 1944 Au 25
+0 F WE%sT 1945 S 16 3
+1 F CE%sT 1977
+1 E CE%sT
+R DE 1946 o - Ap 14 2s 1 S
+R DE 1946 o - O 7 2s 0 -
+R DE 1947 1949 - O Su>=1 2s 0 -
+R DE 1947 o - Ap 6 3s 1 S
+R DE 1947 o - May 11 2s 2 M
+R DE 1947 o - Jun 29 3 1 S
+R DE 1948 o - Ap 18 2s 1 S
+R DE 1949 o - Ap 10 2s 1 S
+R So 1945 o - May 24 2 2 M
+R So 1945 o - S 24 3 1 S
+R So 1945 o - N 18 2s 0 -
+Z Europe/Berlin 0:53:28 - LMT 1893 Ap
+1 c CE%sT 1945 May 24 2
+1 So CE%sT 1946
+1 DE CE%sT 1980
+1 E CE%sT
+L Europe/Zurich Europe/Busingen
+Z Europe/Gibraltar -0:21:24 - LMT 1880 Au 2 0s
+0 G %s 1957 Ap 14 2
+1 - CET 1982
+1 E CE%sT
+R g 1932 o - Jul 7 0 1 S
+R g 1932 o - S 1 0 0 -
+R g 1941 o - Ap 7 0 1 S
+R g 1942 o - N 2 3 0 -
+R g 1943 o - Mar 30 0 1 S
+R g 1943 o - O 4 0 0 -
+R g 1952 o - Jul 1 0 1 S
+R g 1952 o - N 2 0 0 -
+R g 1975 o - Ap 12 0s 1 S
+R g 1975 o - N 26 0s 0 -
+R g 1976 o - Ap 11 2s 1 S
+R g 1976 o - O 10 2s 0 -
+R g 1977 1978 - Ap Su>=1 2s 1 S
+R g 1977 o - S 26 2s 0 -
+R g 1978 o - S 24 4 0 -
+R g 1979 o - Ap 1 9 1 S
+R g 1979 o - S 29 2 0 -
+R g 1980 o - Ap 1 0 1 S
+R g 1980 o - S 28 0 0 -
+Z Europe/Athens 1:34:52 - LMT 1895 S 14
+1:34:52 - AMT 1916 Jul 28 0:1
+2 g EE%sT 1941 Ap 30
+1 g CE%sT 1944 Ap 4
+2 g EE%sT 1981
+2 E EE%sT
+R h 1918 1919 - Ap 15 2 1 S
+R h 1918 1920 - S M>=15 3 0 -
+R h 1920 o - Ap 5 2 1 S
+R h 1945 o - May 1 23 1 S
+R h 1945 o - N 1 1 0 -
+R h 1946 o - Mar 31 2s 1 S
+R h 1946 o - O 7 2 0 -
+R h 1947 1949 - Ap Su>=4 2s 1 S
+R h 1947 1949 - O Su>=1 2s 0 -
+R h 1954 o - May 23 0 1 S
+R h 1954 o - O 3 0 0 -
+R h 1955 o - May 22 2 1 S
+R h 1955 o - O 2 3 0 -
+R h 1956 1957 - Jun Su>=1 2 1 S
+R h 1956 1957 - S lastSu 3 0 -
+R h 1980 o - Ap 6 0 1 S
+R h 1980 o - S 28 1 0 -
+R h 1981 1983 - Mar lastSu 0 1 S
+R h 1981 1983 - S lastSu 1 0 -
+Z Europe/Budapest 1:16:20 - LMT 1890 N
+1 c CE%sT 1918
+1 h CE%sT 1941 Ap 7 23
+1 c CE%sT 1945
+1 h CE%sT 1984
+1 E CE%sT
+R w 1917 1919 - F 19 23 1 -
+R w 1917 o - O 21 1 0 -
+R w 1918 1919 - N 16 1 0 -
+R w 1921 o - Mar 19 23 1 -
+R w 1921 o - Jun 23 1 0 -
+R w 1939 o - Ap 29 23 1 -
+R w 1939 o - O 29 2 0 -
+R w 1940 o - F 25 2 1 -
+R w 1940 1941 - N Su>=2 1s 0 -
+R w 1941 1942 - Mar Su>=2 1s 1 -
+R w 1943 1946 - Mar Su>=1 1s 1 -
+R w 1942 1948 - O Su>=22 1s 0 -
+R w 1947 1967 - Ap Su>=1 1s 1 -
+R w 1949 o - O 30 1s 0 -
+R w 1950 1966 - O Su>=22 1s 0 -
+R w 1967 o - O 29 1s 0 -
+Z Atlantic/Reykjavik -1:28 - LMT 1908
+-1 w -01/+00 1968 Ap 7 1s
+0 - GMT
+R I 1916 o - Jun 3 24 1 S
+R I 1916 1917 - S 30 24 0 -
+R I 1917 o - Mar 31 24 1 S
+R I 1918 o - Mar 9 24 1 S
+R I 1918 o - O 6 24 0 -
+R I 1919 o - Mar 1 24 1 S
+R I 1919 o - O 4 24 0 -
+R I 1920 o - Mar 20 24 1 S
+R I 1920 o - S 18 24 0 -
+R I 1940 o - Jun 14 24 1 S
+R I 1942 o - N 2 2s 0 -
+R I 1943 o - Mar 29 2s 1 S
+R I 1943 o - O 4 2s 0 -
+R I 1944 o - Ap 2 2s 1 S
+R I 1944 o - S 17 2s 0 -
+R I 1945 o - Ap 2 2 1 S
+R I 1945 o - S 15 1 0 -
+R I 1946 o - Mar 17 2s 1 S
+R I 1946 o - O 6 2s 0 -
+R I 1947 o - Mar 16 0s 1 S
+R I 1947 o - O 5 0s 0 -
+R I 1948 o - F 29 2s 1 S
+R I 1948 o - O 3 2s 0 -
+R I 1966 1968 - May Su>=22 0s 1 S
+R I 1966 o - S 24 24 0 -
+R I 1967 1969 - S Su>=22 0s 0 -
+R I 1969 o - Jun 1 0s 1 S
+R I 1970 o - May 31 0s 1 S
+R I 1970 o - S lastSu 0s 0 -
+R I 1971 1972 - May Su>=22 0s 1 S
+R I 1971 o - S lastSu 0s 0 -
+R I 1972 o - O 1 0s 0 -
+R I 1973 o - Jun 3 0s 1 S
+R I 1973 1974 - S lastSu 0s 0 -
+R I 1974 o - May 26 0s 1 S
+R I 1975 o - Jun 1 0s 1 S
+R I 1975 1977 - S lastSu 0s 0 -
+R I 1976 o - May 30 0s 1 S
+R I 1977 1979 - May Su>=22 0s 1 S
+R I 1978 o - O 1 0s 0 -
+R I 1979 o - S 30 0s 0 -
+Z Europe/Rome 0:49:56 - LMT 1866 D 12
+0:49:56 - RMT 1893 O 31 23:49:56
+1 I CE%sT 1943 S 10
+1 c CE%sT 1944 Jun 4
+1 I CE%sT 1980
+1 E CE%sT
+L Europe/Rome Europe/Vatican
+L Europe/Rome Europe/San_Marino
+R LV 1989 1996 - Mar lastSu 2s 1 S
+R LV 1989 1996 - S lastSu 2s 0 -
+Z Europe/Riga 1:36:34 - LMT 1880
+1:36:34 - RMT 1918 Ap 15 2
+1:36:34 1 LST 1918 S 16 3
+1:36:34 - RMT 1919 Ap 1 2
+1:36:34 1 LST 1919 May 22 3
+1:36:34 - RMT 1926 May 11
+2 - EET 1940 Au 5
+3 - MSK 1941 Jul
+1 c CE%sT 1944 O 13
+3 R MSK/MSD 1989 Mar lastSu 2s
+2 1 EEST 1989 S lastSu 2s
+2 LV EE%sT 1997 Ja 21
+2 E EE%sT 2000 F 29
+2 - EET 2001 Ja 2
+2 E EE%sT
+L Europe/Zurich Europe/Vaduz
+Z Europe/Vilnius 1:41:16 - LMT 1880
+1:24 - WMT 1917
+1:35:36 - KMT 1919 O 10
+1 - CET 1920 Jul 12
+2 - EET 1920 O 9
+1 - CET 1940 Au 3
+3 - MSK 1941 Jun 24
+1 c CE%sT 1944 Au
+3 R MSK/MSD 1989 Mar 26 2s
+2 R EE%sT 1991 S 29 2s
+2 c EE%sT 1998
+2 - EET 1998 Mar 29 1u
+1 E CE%sT 1999 O 31 1u
+2 - EET 2003
+2 E EE%sT
+R LX 1916 o - May 14 23 1 S
+R LX 1916 o - O 1 1 0 -
+R LX 1917 o - Ap 28 23 1 S
+R LX 1917 o - S 17 1 0 -
+R LX 1918 o - Ap M>=15 2s 1 S
+R LX 1918 o - S M>=15 2s 0 -
+R LX 1919 o - Mar 1 23 1 S
+R LX 1919 o - O 5 3 0 -
+R LX 1920 o - F 14 23 1 S
+R LX 1920 o - O 24 2 0 -
+R LX 1921 o - Mar 14 23 1 S
+R LX 1921 o - O 26 2 0 -
+R LX 1922 o - Mar 25 23 1 S
+R LX 1922 o - O Su>=2 1 0 -
+R LX 1923 o - Ap 21 23 1 S
+R LX 1923 o - O Su>=2 2 0 -
+R LX 1924 o - Mar 29 23 1 S
+R LX 1924 1928 - O Su>=2 1 0 -
+R LX 1925 o - Ap 5 23 1 S
+R LX 1926 o - Ap 17 23 1 S
+R LX 1927 o - Ap 9 23 1 S
+R LX 1928 o - Ap 14 23 1 S
+R LX 1929 o - Ap 20 23 1 S
+Z Europe/Luxembourg 0:24:36 - LMT 1904 Jun
+1 LX CE%sT 1918 N 25
+0 LX WE%sT 1929 O 6 2s
+0 b WE%sT 1940 May 14 3
+1 c WE%sT 1944 S 18 3
+1 b CE%sT 1977
+1 E CE%sT
+R MT 1973 o - Mar 31 0s 1 S
+R MT 1973 o - S 29 0s 0 -
+R MT 1974 o - Ap 21 0s 1 S
+R MT 1974 o - S 16 0s 0 -
+R MT 1975 1979 - Ap Su>=15 2 1 S
+R MT 1975 1980 - S Su>=15 2 0 -
+R MT 1980 o - Mar 31 2 1 S
+Z Europe/Malta 0:58:4 - LMT 1893 N 2 0s
+1 I CE%sT 1973 Mar 31
+1 MT CE%sT 1981
+1 E CE%sT
+R MD 1997 ma - Mar lastSu 2 1 S
+R MD 1997 ma - O lastSu 3 0 -
+Z Europe/Chisinau 1:55:20 - LMT 1880
+1:55 - CMT 1918 F 15
+1:44:24 - BMT 1931 Jul 24
+2 z EE%sT 1940 Au 15
+2 1 EEST 1941 Jul 17
+1 c CE%sT 1944 Au 24
+3 R MSK/MSD 1990 May 6 2
+2 R EE%sT 1992
+2 e EE%sT 1997
+2 MD EE%sT
+Z Europe/Monaco 0:29:32 - LMT 1892 Jun
+0:9:21 - PMT 1911 Mar 29
+0 F WE%sT 1945 S 16 3
+1 F CE%sT 1977
+1 E CE%sT
+R N 1916 o - May 1 0 1 NST
+R N 1916 o - O 1 0 0 AMT
+R N 1917 o - Ap 16 2s 1 NST
+R N 1917 o - S 17 2s 0 AMT
+R N 1918 1921 - Ap M>=1 2s 1 NST
+R N 1918 1921 - S lastM 2s 0 AMT
+R N 1922 o - Mar lastSu 2s 1 NST
+R N 1922 1936 - O Su>=2 2s 0 AMT
+R N 1923 o - Jun F>=1 2s 1 NST
+R N 1924 o - Mar lastSu 2s 1 NST
+R N 1925 o - Jun F>=1 2s 1 NST
+R N 1926 1931 - May 15 2s 1 NST
+R N 1932 o - May 22 2s 1 NST
+R N 1933 1936 - May 15 2s 1 NST
+R N 1937 o - May 22 2s 1 NST
+R N 1937 o - Jul 1 0 1 S
+R N 1937 1939 - O Su>=2 2s 0 -
+R N 1938 1939 - May 15 2s 1 S
+R N 1945 o - Ap 2 2s 1 S
+R N 1945 o - S 16 2s 0 -
+Z Europe/Amsterdam 0:19:32 - LMT 1835
+0:19:32 N %s 1937 Jul
+0:20 N +0020/+0120 1940 May 16
+1 c CE%sT 1945 Ap 2 2
+1 N CE%sT 1977
+1 E CE%sT
+R NO 1916 o - May 22 1 1 S
+R NO 1916 o - S 30 0 0 -
+R NO 1945 o - Ap 2 2s 1 S
+R NO 1945 o - O 1 2s 0 -
+R NO 1959 1964 - Mar Su>=15 2s 1 S
+R NO 1959 1965 - S Su>=15 2s 0 -
+R NO 1965 o - Ap 25 2s 1 S
+Z Europe/Oslo 0:43 - LMT 1895
+1 NO CE%sT 1940 Au 10 23
+1 c CE%sT 1945 Ap 2 2
+1 NO CE%sT 1980
+1 E CE%sT
+L Europe/Oslo Arctic/Longyearbyen
+R O 1918 1919 - S 16 2s 0 -
+R O 1919 o - Ap 15 2s 1 S
+R O 1944 o - Ap 3 2s 1 S
+R O 1944 o - O 4 2 0 -
+R O 1945 o - Ap 29 0 1 S
+R O 1945 o - N 1 0 0 -
+R O 1946 o - Ap 14 0s 1 S
+R O 1946 o - O 7 2s 0 -
+R O 1947 o - May 4 2s 1 S
+R O 1947 1949 - O Su>=1 2s 0 -
+R O 1948 o - Ap 18 2s 1 S
+R O 1949 o - Ap 10 2s 1 S
+R O 1957 o - Jun 2 1s 1 S
+R O 1957 1958 - S lastSu 1s 0 -
+R O 1958 o - Mar 30 1s 1 S
+R O 1959 o - May 31 1s 1 S
+R O 1959 1961 - O Su>=1 1s 0 -
+R O 1960 o - Ap 3 1s 1 S
+R O 1961 1964 - May lastSu 1s 1 S
+R O 1962 1964 - S lastSu 1s 0 -
+Z Europe/Warsaw 1:24 - LMT 1880
+1:24 - WMT 1915 Au 5
+1 c CE%sT 1918 S 16 3
+2 O EE%sT 1922 Jun
+1 O CE%sT 1940 Jun 23 2
+1 c CE%sT 1944 O
+1 O CE%sT 1977
+1 W- CE%sT 1988
+1 E CE%sT
+R p 1916 o - Jun 17 23 1 S
+R p 1916 o - N 1 1 0 -
+R p 1917 o - F 28 23s 1 S
+R p 1917 1921 - O 14 23s 0 -
+R p 1918 o - Mar 1 23s 1 S
+R p 1919 o - F 28 23s 1 S
+R p 1920 o - F 29 23s 1 S
+R p 1921 o - F 28 23s 1 S
+R p 1924 o - Ap 16 23s 1 S
+R p 1924 o - O 14 23s 0 -
+R p 1926 o - Ap 17 23s 1 S
+R p 1926 1929 - O Sa>=1 23s 0 -
+R p 1927 o - Ap 9 23s 1 S
+R p 1928 o - Ap 14 23s 1 S
+R p 1929 o - Ap 20 23s 1 S
+R p 1931 o - Ap 18 23s 1 S
+R p 1931 1932 - O Sa>=1 23s 0 -
+R p 1932 o - Ap 2 23s 1 S
+R p 1934 o - Ap 7 23s 1 S
+R p 1934 1938 - O Sa>=1 23s 0 -
+R p 1935 o - Mar 30 23s 1 S
+R p 1936 o - Ap 18 23s 1 S
+R p 1937 o - Ap 3 23s 1 S
+R p 1938 o - Mar 26 23s 1 S
+R p 1939 o - Ap 15 23s 1 S
+R p 1939 o - N 18 23s 0 -
+R p 1940 o - F 24 23s 1 S
+R p 1940 1941 - O 5 23s 0 -
+R p 1941 o - Ap 5 23s 1 S
+R p 1942 1945 - Mar Sa>=8 23s 1 S
+R p 1942 o - Ap 25 22s 2 M
+R p 1942 o - Au 15 22s 1 S
+R p 1942 1945 - O Sa>=24 23s 0 -
+R p 1943 o - Ap 17 22s 2 M
+R p 1943 1945 - Au Sa>=25 22s 1 S
+R p 1944 1945 - Ap Sa>=21 22s 2 M
+R p 1946 o - Ap Sa>=1 23s 1 S
+R p 1946 o - O Sa>=1 23s 0 -
+R p 1947 1965 - Ap Su>=1 2s 1 S
+R p 1947 1965 - O Su>=1 2s 0 -
+R p 1977 o - Mar 27 0s 1 S
+R p 1977 o - S 25 0s 0 -
+R p 1978 1979 - Ap Su>=1 0s 1 S
+R p 1978 o - O 1 0s 0 -
+R p 1979 1982 - S lastSu 1s 0 -
+R p 1980 o - Mar lastSu 0s 1 S
+R p 1981 1982 - Mar lastSu 1s 1 S
+R p 1983 o - Mar lastSu 2s 1 S
+Z Europe/Lisbon -0:36:45 - LMT 1884
+-0:36:45 - LMT 1912 Ja 1 0u
+0 p WE%sT 1966 Ap 3 2
+1 - CET 1976 S 26 1
+0 p WE%sT 1983 S 25 1s
+0 W- WE%sT 1992 S 27 1s
+1 E CE%sT 1996 Mar 31 1u
+0 E WE%sT
+Z Atlantic/Azores -1:42:40 - LMT 1884
+-1:54:32 - HMT 1912 Ja 1 2u
+-2 p -02/-01 1942 Ap 25 22s
+-2 p +00 1942 Au 15 22s
+-2 p -02/-01 1943 Ap 17 22s
+-2 p +00 1943 Au 28 22s
+-2 p -02/-01 1944 Ap 22 22s
+-2 p +00 1944 Au 26 22s
+-2 p -02/-01 1945 Ap 21 22s
+-2 p +00 1945 Au 25 22s
+-2 p -02/-01 1966 Ap 3 2
+-1 p -01/+00 1983 S 25 1s
+-1 W- -01/+00 1992 S 27 1s
+0 E WE%sT 1993 Mar 28 1u
+-1 E -01/+00
+Z Atlantic/Madeira -1:7:36 - LMT 1884
+-1:7:36 - FMT 1912 Ja 1 1u
+-1 p -01/+00 1942 Ap 25 22s
+-1 p +01 1942 Au 15 22s
+-1 p -01/+00 1943 Ap 17 22s
+-1 p +01 1943 Au 28 22s
+-1 p -01/+00 1944 Ap 22 22s
+-1 p +01 1944 Au 26 22s
+-1 p -01/+00 1945 Ap 21 22s
+-1 p +01 1945 Au 25 22s
+-1 p -01/+00 1966 Ap 3 2
+0 p WE%sT 1983 S 25 1s
+0 E WE%sT
+R z 1932 o - May 21 0s 1 S
+R z 1932 1939 - O Su>=1 0s 0 -
+R z 1933 1939 - Ap Su>=2 0s 1 S
+R z 1979 o - May 27 0 1 S
+R z 1979 o - S lastSu 0 0 -
+R z 1980 o - Ap 5 23 1 S
+R z 1980 o - S lastSu 1 0 -
+R z 1991 1993 - Mar lastSu 0s 1 S
+R z 1991 1993 - S lastSu 0s 0 -
+Z Europe/Bucharest 1:44:24 - LMT 1891 O
+1:44:24 - BMT 1931 Jul 24
+2 z EE%sT 1981 Mar 29 2s
+2 c EE%sT 1991
+2 z EE%sT 1994
+2 e EE%sT 1997
+2 E EE%sT
+Z Europe/Kaliningrad 1:22 - LMT 1893 Ap
+1 c CE%sT 1945 Ap 10
+2 O EE%sT 1946 Ap 7
+3 R MSK/MSD 1989 Mar 26 2s
+2 R EE%sT 2011 Mar 27 2s
+3 - +03 2014 O 26 2s
+2 - EET
+Z Europe/Moscow 2:30:17 - LMT 1880
+2:30:17 - MMT 1916 Jul 3
+2:31:19 R %s 1919 Jul 1 0u
+3 R %s 1921 O
+3 R MSK/MSD 1922 O
+2 - EET 1930 Jun 21
+3 R MSK/MSD 1991 Mar 31 2s
+2 R EE%sT 1992 Ja 19 2s
+3 R MSK/MSD 2011 Mar 27 2s
+4 - MSK 2014 O 26 2s
+3 - MSK
+Z Europe/Simferopol 2:16:24 - LMT 1880
+2:16 - SMT 1924 May 2
+2 - EET 1930 Jun 21
+3 - MSK 1941 N
+1 c CE%sT 1944 Ap 13
+3 R MSK/MSD 1990
+3 - MSK 1990 Jul 1 2
+2 - EET 1992
+2 e EE%sT 1994 May
+3 e MSK/MSD 1996 Mar 31 0s
+3 1 MSD 1996 O 27 3s
+3 R MSK/MSD 1997
+3 - MSK 1997 Mar lastSu 1u
+2 E EE%sT 2014 Mar 30 2
+4 - MSK 2014 O 26 2s
+3 - MSK
+Z Europe/Astrakhan 3:12:12 - LMT 1924 May
+3 - +03 1930 Jun 21
+4 R +04/+05 1989 Mar 26 2s
+3 R +03/+04 1991 Mar 31 2s
+4 - +04 1992 Mar 29 2s
+3 R +03/+04 2011 Mar 27 2s
+4 - +04 2014 O 26 2s
+3 - +03 2016 Mar 27 2s
+4 - +04
+Z Europe/Volgograd 2:57:40 - LMT 1920 Ja 3
+3 - +03 1930 Jun 21
+4 - +04 1961 N 11
+4 R +04/+05 1988 Mar 27 2s
+3 R +03/+04 1991 Mar 31 2s
+4 - +04 1992 Mar 29 2s
+3 R +03/+04 2011 Mar 27 2s
+4 - +04 2014 O 26 2s
+3 - +03 2018 O 28 2s
+4 - +04 2020 D 27 2s
+3 - +03
+Z Europe/Saratov 3:4:18 - LMT 1919 Jul 1 0u
+3 - +03 1930 Jun 21
+4 R +04/+05 1988 Mar 27 2s
+3 R +03/+04 1991 Mar 31 2s
+4 - +04 1992 Mar 29 2s
+3 R +03/+04 2011 Mar 27 2s
+4 - +04 2014 O 26 2s
+3 - +03 2016 D 4 2s
+4 - +04
+Z Europe/Kirov 3:18:48 - LMT 1919 Jul 1 0u
+3 - +03 1930 Jun 21
+4 R +04/+05 1989 Mar 26 2s
+3 R +03/+04 1991 Mar 31 2s
+4 - +04 1992 Mar 29 2s
+3 R +03/+04 2011 Mar 27 2s
+4 - +04 2014 O 26 2s
+3 - +03
+Z Europe/Samara 3:20:20 - LMT 1919 Jul 1 0u
+3 - +03 1930 Jun 21
+4 - +04 1935 Ja 27
+4 R +04/+05 1989 Mar 26 2s
+3 R +03/+04 1991 Mar 31 2s
+2 R +02/+03 1991 S 29 2s
+3 - +03 1991 O 20 3
+4 R +04/+05 2010 Mar 28 2s
+3 R +03/+04 2011 Mar 27 2s
+4 - +04
+Z Europe/Ulyanovsk 3:13:36 - LMT 1919 Jul 1 0u
+3 - +03 1930 Jun 21
+4 R +04/+05 1989 Mar 26 2s
+3 R +03/+04 1991 Mar 31 2s
+2 R +02/+03 1992 Ja 19 2s
+3 R +03/+04 2011 Mar 27 2s
+4 - +04 2014 O 26 2s
+3 - +03 2016 Mar 27 2s
+4 - +04
+Z Asia/Yekaterinburg 4:2:33 - LMT 1916 Jul 3
+3:45:5 - PMT 1919 Jul 15 4
+4 - +04 1930 Jun 21
+5 R +05/+06 1991 Mar 31 2s
+4 R +04/+05 1992 Ja 19 2s
+5 R +05/+06 2011 Mar 27 2s
+6 - +06 2014 O 26 2s
+5 - +05
+Z Asia/Omsk 4:53:30 - LMT 1919 N 14
+5 - +05 1930 Jun 21
+6 R +06/+07 1991 Mar 31 2s
+5 R +05/+06 1992 Ja 19 2s
+6 R +06/+07 2011 Mar 27 2s
+7 - +07 2014 O 26 2s
+6 - +06
+Z Asia/Barnaul 5:35 - LMT 1919 D 10
+6 - +06 1930 Jun 21
+7 R +07/+08 1991 Mar 31 2s
+6 R +06/+07 1992 Ja 19 2s
+7 R +07/+08 1995 May 28
+6 R +06/+07 2011 Mar 27 2s
+7 - +07 2014 O 26 2s
+6 - +06 2016 Mar 27 2s
+7 - +07
+Z Asia/Novosibirsk 5:31:40 - LMT 1919 D 14 6
+6 - +06 1930 Jun 21
+7 R +07/+08 1991 Mar 31 2s
+6 R +06/+07 1992 Ja 19 2s
+7 R +07/+08 1993 May 23
+6 R +06/+07 2011 Mar 27 2s
+7 - +07 2014 O 26 2s
+6 - +06 2016 Jul 24 2s
+7 - +07
+Z Asia/Tomsk 5:39:51 - LMT 1919 D 22
+6 - +06 1930 Jun 21
+7 R +07/+08 1991 Mar 31 2s
+6 R +06/+07 1992 Ja 19 2s
+7 R +07/+08 2002 May 1 3
+6 R +06/+07 2011 Mar 27 2s
+7 - +07 2014 O 26 2s
+6 - +06 2016 May 29 2s
+7 - +07
+Z Asia/Novokuznetsk 5:48:48 - LMT 1924 May
+6 - +06 1930 Jun 21
+7 R +07/+08 1991 Mar 31 2s
+6 R +06/+07 1992 Ja 19 2s
+7 R +07/+08 2010 Mar 28 2s
+6 R +06/+07 2011 Mar 27 2s
+7 - +07
+Z Asia/Krasnoyarsk 6:11:26 - LMT 1920 Ja 6
+6 - +06 1930 Jun 21
+7 R +07/+08 1991 Mar 31 2s
+6 R +06/+07 1992 Ja 19 2s
+7 R +07/+08 2011 Mar 27 2s
+8 - +08 2014 O 26 2s
+7 - +07
+Z Asia/Irkutsk 6:57:5 - LMT 1880
+6:57:5 - IMT 1920 Ja 25
+7 - +07 1930 Jun 21
+8 R +08/+09 1991 Mar 31 2s
+7 R +07/+08 1992 Ja 19 2s
+8 R +08/+09 2011 Mar 27 2s
+9 - +09 2014 O 26 2s
+8 - +08
+Z Asia/Chita 7:33:52 - LMT 1919 D 15
+8 - +08 1930 Jun 21
+9 R +09/+10 1991 Mar 31 2s
+8 R +08/+09 1992 Ja 19 2s
+9 R +09/+10 2011 Mar 27 2s
+10 - +10 2014 O 26 2s
+8 - +08 2016 Mar 27 2
+9 - +09
+Z Asia/Yakutsk 8:38:58 - LMT 1919 D 15
+8 - +08 1930 Jun 21
+9 R +09/+10 1991 Mar 31 2s
+8 R +08/+09 1992 Ja 19 2s
+9 R +09/+10 2011 Mar 27 2s
+10 - +10 2014 O 26 2s
+9 - +09
+Z Asia/Vladivostok 8:47:31 - LMT 1922 N 15
+9 - +09 1930 Jun 21
+10 R +10/+11 1991 Mar 31 2s
+9 R +09/+10 1992 Ja 19 2s
+10 R +10/+11 2011 Mar 27 2s
+11 - +11 2014 O 26 2s
+10 - +10
+Z Asia/Khandyga 9:2:13 - LMT 1919 D 15
+8 - +08 1930 Jun 21
+9 R +09/+10 1991 Mar 31 2s
+8 R +08/+09 1992 Ja 19 2s
+9 R +09/+10 2004
+10 R +10/+11 2011 Mar 27 2s
+11 - +11 2011 S 13 0s
+10 - +10 2014 O 26 2s
+9 - +09
+Z Asia/Sakhalin 9:30:48 - LMT 1905 Au 23
+9 - +09 1945 Au 25
+11 R +11/+12 1991 Mar 31 2s
+10 R +10/+11 1992 Ja 19 2s
+11 R +11/+12 1997 Mar lastSu 2s
+10 R +10/+11 2011 Mar 27 2s
+11 - +11 2014 O 26 2s
+10 - +10 2016 Mar 27 2s
+11 - +11
+Z Asia/Magadan 10:3:12 - LMT 1924 May 2
+10 - +10 1930 Jun 21
+11 R +11/+12 1991 Mar 31 2s
+10 R +10/+11 1992 Ja 19 2s
+11 R +11/+12 2011 Mar 27 2s
+12 - +12 2014 O 26 2s
+10 - +10 2016 Ap 24 2s
+11 - +11
+Z Asia/Srednekolymsk 10:14:52 - LMT 1924 May 2
+10 - +10 1930 Jun 21
+11 R +11/+12 1991 Mar 31 2s
+10 R +10/+11 1992 Ja 19 2s
+11 R +11/+12 2011 Mar 27 2s
+12 - +12 2014 O 26 2s
+11 - +11
+Z Asia/Ust-Nera 9:32:54 - LMT 1919 D 15
+8 - +08 1930 Jun 21
+9 R +09/+10 1981 Ap
+11 R +11/+12 1991 Mar 31 2s
+10 R +10/+11 1992 Ja 19 2s
+11 R +11/+12 2011 Mar 27 2s
+12 - +12 2011 S 13 0s
+11 - +11 2014 O 26 2s
+10 - +10
+Z Asia/Kamchatka 10:34:36 - LMT 1922 N 10
+11 - +11 1930 Jun 21
+12 R +12/+13 1991 Mar 31 2s
+11 R +11/+12 1992 Ja 19 2s
+12 R +12/+13 2010 Mar 28 2s
+11 R +11/+12 2011 Mar 27 2s
+12 - +12
+Z Asia/Anadyr 11:49:56 - LMT 1924 May 2
+12 - +12 1930 Jun 21
+13 R +13/+14 1982 Ap 1 0s
+12 R +12/+13 1991 Mar 31 2s
+11 R +11/+12 1992 Ja 19 2s
+12 R +12/+13 2010 Mar 28 2s
+11 R +11/+12 2011 Mar 27 2s
+12 - +12
+Z Europe/Belgrade 1:22 - LMT 1884
+1 - CET 1941 Ap 18 23
+1 c CE%sT 1945
+1 - CET 1945 May 8 2s
+1 1 CEST 1945 S 16 2s
+1 - CET 1982 N 27
+1 E CE%sT
+L Europe/Belgrade Europe/Ljubljana
+L Europe/Belgrade Europe/Podgorica
+L Europe/Belgrade Europe/Sarajevo
+L Europe/Belgrade Europe/Skopje
+L Europe/Belgrade Europe/Zagreb
+L Europe/Prague Europe/Bratislava
+R s 1918 o - Ap 15 23 1 S
+R s 1918 1919 - O 6 24s 0 -
+R s 1919 o - Ap 6 23 1 S
+R s 1924 o - Ap 16 23 1 S
+R s 1924 o - O 4 24s 0 -
+R s 1926 o - Ap 17 23 1 S
+R s 1926 1929 - O Sa>=1 24s 0 -
+R s 1927 o - Ap 9 23 1 S
+R s 1928 o - Ap 15 0 1 S
+R s 1929 o - Ap 20 23 1 S
+R s 1937 o - Jun 16 23 1 S
+R s 1937 o - O 2 24s 0 -
+R s 1938 o - Ap 2 23 1 S
+R s 1938 o - Ap 30 23 2 M
+R s 1938 o - O 2 24 1 S
+R s 1939 o - O 7 24s 0 -
+R s 1942 o - May 2 23 1 S
+R s 1942 o - S 1 1 0 -
+R s 1943 1946 - Ap Sa>=13 23 1 S
+R s 1943 1944 - O Su>=1 1 0 -
+R s 1945 1946 - S lastSu 1 0 -
+R s 1949 o - Ap 30 23 1 S
+R s 1949 o - O 2 1 0 -
+R s 1974 1975 - Ap Sa>=12 23 1 S
+R s 1974 1975 - O Su>=1 1 0 -
+R s 1976 o - Mar 27 23 1 S
+R s 1976 1977 - S lastSu 1 0 -
+R s 1977 o - Ap 2 23 1 S
+R s 1978 o - Ap 2 2s 1 S
+R s 1978 o - O 1 2s 0 -
+R Sp 1967 o - Jun 3 12 1 S
+R Sp 1967 o - O 1 0 0 -
+R Sp 1974 o - Jun 24 0 1 S
+R Sp 1974 o - S 1 0 0 -
+R Sp 1976 1977 - May 1 0 1 S
+R Sp 1976 o - Au 1 0 0 -
+R Sp 1977 o - S 28 0 0 -
+R Sp 1978 o - Jun 1 0 1 S
+R Sp 1978 o - Au 4 0 0 -
+Z Europe/Madrid -0:14:44 - LMT 1900 D 31 23:45:16
+0 s WE%sT 1940 Mar 16 23
+1 s CE%sT 1979
+1 E CE%sT
+Z Africa/Ceuta -0:21:16 - LMT 1900 D 31 23:38:44
+0 - WET 1918 May 6 23
+0 1 WEST 1918 O 7 23
+0 - WET 1924
+0 s WE%sT 1929
+0 - WET 1967
+0 Sp WE%sT 1984 Mar 16
+1 - CET 1986
+1 E CE%sT
+Z Atlantic/Canary -1:1:36 - LMT 1922 Mar
+-1 - -01 1946 S 30 1
+0 - WET 1980 Ap 6 0s
+0 1 WEST 1980 S 28 1u
+0 E WE%sT
+Z Europe/Stockholm 1:12:12 - LMT 1879
+1:0:14 - SET 1900
+1 - CET 1916 May 14 23
+1 1 CEST 1916 O 1 1
+1 - CET 1980
+1 E CE%sT
+R CH 1941 1942 - May M>=1 1 1 S
+R CH 1941 1942 - O M>=1 2 0 -
+Z Europe/Zurich 0:34:8 - LMT 1853 Jul 16
+0:29:46 - BMT 1894 Jun
+1 CH CE%sT 1981
+1 E CE%sT
+R T 1916 o - May 1 0 1 S
+R T 1916 o - O 1 0 0 -
+R T 1920 o - Mar 28 0 1 S
+R T 1920 o - O 25 0 0 -
+R T 1921 o - Ap 3 0 1 S
+R T 1921 o - O 3 0 0 -
+R T 1922 o - Mar 26 0 1 S
+R T 1922 o - O 8 0 0 -
+R T 1924 o - May 13 0 1 S
+R T 1924 1925 - O 1 0 0 -
+R T 1925 o - May 1 0 1 S
+R T 1940 o - Jul 1 0 1 S
+R T 1940 o - O 6 0 0 -
+R T 1940 o - D 1 0 1 S
+R T 1941 o - S 21 0 0 -
+R T 1942 o - Ap 1 0 1 S
+R T 1945 o - O 8 0 0 -
+R T 1946 o - Jun 1 0 1 S
+R T 1946 o - O 1 0 0 -
+R T 1947 1948 - Ap Su>=16 0 1 S
+R T 1947 1951 - O Su>=2 0 0 -
+R T 1949 o - Ap 10 0 1 S
+R T 1950 o - Ap 16 0 1 S
+R T 1951 o - Ap 22 0 1 S
+R T 1962 o - Jul 15 0 1 S
+R T 1963 o - O 30 0 0 -
+R T 1964 o - May 15 0 1 S
+R T 1964 o - O 1 0 0 -
+R T 1973 o - Jun 3 1 1 S
+R T 1973 1976 - O Su>=31 2 0 -
+R T 1974 o - Mar 31 2 1 S
+R T 1975 o - Mar 22 2 1 S
+R T 1976 o - Mar 21 2 1 S
+R T 1977 1978 - Ap Su>=1 2 1 S
+R T 1977 1978 - O Su>=15 2 0 -
+R T 1978 o - Jun 29 0 0 -
+R T 1983 o - Jul 31 2 1 S
+R T 1983 o - O 2 2 0 -
+R T 1985 o - Ap 20 1s 1 S
+R T 1985 o - S 28 1s 0 -
+R T 1986 1993 - Mar lastSu 1s 1 S
+R T 1986 1995 - S lastSu 1s 0 -
+R T 1994 o - Mar 20 1s 1 S
+R T 1995 2006 - Mar lastSu 1s 1 S
+R T 1996 2006 - O lastSu 1s 0 -
+Z Europe/Istanbul 1:55:52 - LMT 1880
+1:56:56 - IMT 1910 O
+2 T EE%sT 1978 Jun 29
+3 T +03/+04 1984 N 1 2
+2 T EE%sT 2007
+2 E EE%sT 2011 Mar 27 1u
+2 - EET 2011 Mar 28 1u
+2 E EE%sT 2014 Mar 30 1u
+2 - EET 2014 Mar 31 1u
+2 E EE%sT 2015 O 25 1u
+2 1 EEST 2015 N 8 1u
+2 E EE%sT 2016 S 7
+3 - +03
+L Europe/Istanbul Asia/Istanbul
+Z Europe/Kiev 2:2:4 - LMT 1880
+2:2:4 - KMT 1924 May 2
+2 - EET 1930 Jun 21
+3 - MSK 1941 S 20
+1 c CE%sT 1943 N 6
+3 R MSK/MSD 1990 Jul 1 2
+2 1 EEST 1991 S 29 3
+2 e EE%sT 1995
+2 E EE%sT
+Z Europe/Uzhgorod 1:29:12 - LMT 1890 O
+1 - CET 1940
+1 c CE%sT 1944 O
+1 1 CEST 1944 O 26
+1 - CET 1945 Jun 29
+3 R MSK/MSD 1990
+3 - MSK 1990 Jul 1 2
+1 - CET 1991 Mar 31 3
+2 - EET 1992
+2 e EE%sT 1995
+2 E EE%sT
+Z Europe/Zaporozhye 2:20:40 - LMT 1880
+2:20 - +0220 1924 May 2
+2 - EET 1930 Jun 21
+3 - MSK 1941 Au 25
+1 c CE%sT 1943 O 25
+3 R MSK/MSD 1991 Mar 31 2
+2 e EE%sT 1995
+2 E EE%sT
+R u 1918 1919 - Mar lastSu 2 1 D
+R u 1918 1919 - O lastSu 2 0 S
+R u 1942 o - F 9 2 1 W
+R u 1945 o - Au 14 23u 1 P
+R u 1945 o - S 30 2 0 S
+R u 1967 2006 - O lastSu 2 0 S
+R u 1967 1973 - Ap lastSu 2 1 D
+R u 1974 o - Ja 6 2 1 D
+R u 1975 o - F lastSu 2 1 D
+R u 1976 1986 - Ap lastSu 2 1 D
+R u 1987 2006 - Ap Su>=1 2 1 D
+R u 2007 ma - Mar Su>=8 2 1 D
+R u 2007 ma - N Su>=1 2 0 S
+Z EST -5 - EST
+Z MST -7 - MST
+Z HST -10 - HST
+Z EST5EDT -5 u E%sT
+Z CST6CDT -6 u C%sT
+Z MST7MDT -7 u M%sT
+Z PST8PDT -8 u P%sT
+R NY 1920 o - Mar lastSu 2 1 D
+R NY 1920 o - O lastSu 2 0 S
+R NY 1921 1966 - Ap lastSu 2 1 D
+R NY 1921 1954 - S lastSu 2 0 S
+R NY 1955 1966 - O lastSu 2 0 S
+Z America/New_York -4:56:2 - LMT 1883 N 18 12:3:58
+-5 u E%sT 1920
+-5 NY E%sT 1942
+-5 u E%sT 1946
+-5 NY E%sT 1967
+-5 u E%sT
+R Ch 1920 o - Jun 13 2 1 D
+R Ch 1920 1921 - O lastSu 2 0 S
+R Ch 1921 o - Mar lastSu 2 1 D
+R Ch 1922 1966 - Ap lastSu 2 1 D
+R Ch 1922 1954 - S lastSu 2 0 S
+R Ch 1955 1966 - O lastSu 2 0 S
+Z America/Chicago -5:50:36 - LMT 1883 N 18 12:9:24
+-6 u C%sT 1920
+-6 Ch C%sT 1936 Mar 1 2
+-5 - EST 1936 N 15 2
+-6 Ch C%sT 1942
+-6 u C%sT 1946
+-6 Ch C%sT 1967
+-6 u C%sT
+Z America/North_Dakota/Center -6:45:12 - LMT 1883 N 18 12:14:48
+-7 u M%sT 1992 O 25 2
+-6 u C%sT
+Z America/North_Dakota/New_Salem -6:45:39 - LMT 1883 N 18 12:14:21
+-7 u M%sT 2003 O 26 2
+-6 u C%sT
+Z America/North_Dakota/Beulah -6:47:7 - LMT 1883 N 18 12:12:53
+-7 u M%sT 2010 N 7 2
+-6 u C%sT
+R De 1920 1921 - Mar lastSu 2 1 D
+R De 1920 o - O lastSu 2 0 S
+R De 1921 o - May 22 2 0 S
+R De 1965 1966 - Ap lastSu 2 1 D
+R De 1965 1966 - O lastSu 2 0 S
+Z America/Denver -6:59:56 - LMT 1883 N 18 12:0:4
+-7 u M%sT 1920
+-7 De M%sT 1942
+-7 u M%sT 1946
+-7 De M%sT 1967
+-7 u M%sT
+R CA 1948 o - Mar 14 2:1 1 D
+R CA 1949 o - Ja 1 2 0 S
+R CA 1950 1966 - Ap lastSu 1 1 D
+R CA 1950 1961 - S lastSu 2 0 S
+R CA 1962 1966 - O lastSu 2 0 S
+Z America/Los_Angeles -7:52:58 - LMT 1883 N 18 12:7:2
+-8 u P%sT 1946
+-8 CA P%sT 1967
+-8 u P%sT
+Z America/Juneau 15:2:19 - LMT 1867 O 19 15:33:32
+-8:57:41 - LMT 1900 Au 20 12
+-8 - PST 1942
+-8 u P%sT 1946
+-8 - PST 1969
+-8 u P%sT 1980 Ap 27 2
+-9 u Y%sT 1980 O 26 2
+-8 u P%sT 1983 O 30 2
+-9 u Y%sT 1983 N 30
+-9 u AK%sT
+Z America/Sitka 14:58:47 - LMT 1867 O 19 15:30
+-9:1:13 - LMT 1900 Au 20 12
+-8 - PST 1942
+-8 u P%sT 1946
+-8 - PST 1969
+-8 u P%sT 1983 O 30 2
+-9 u Y%sT 1983 N 30
+-9 u AK%sT
+Z America/Metlakatla 15:13:42 - LMT 1867 O 19 15:44:55
+-8:46:18 - LMT 1900 Au 20 12
+-8 - PST 1942
+-8 u P%sT 1946
+-8 - PST 1969
+-8 u P%sT 1983 O 30 2
+-8 - PST 2015 N 1 2
+-9 u AK%sT 2018 N 4 2
+-8 - PST 2019 Ja 20 2
+-9 u AK%sT
+Z America/Yakutat 14:41:5 - LMT 1867 O 19 15:12:18
+-9:18:55 - LMT 1900 Au 20 12
+-9 - YST 1942
+-9 u Y%sT 1946
+-9 - YST 1969
+-9 u Y%sT 1983 N 30
+-9 u AK%sT
+Z America/Anchorage 14:0:24 - LMT 1867 O 19 14:31:37
+-9:59:36 - LMT 1900 Au 20 12
+-10 - AST 1942
+-10 u A%sT 1967 Ap
+-10 - AHST 1969
+-10 u AH%sT 1983 O 30 2
+-9 u Y%sT 1983 N 30
+-9 u AK%sT
+Z America/Nome 12:58:22 - LMT 1867 O 19 13:29:35
+-11:1:38 - LMT 1900 Au 20 12
+-11 - NST 1942
+-11 u N%sT 1946
+-11 - NST 1967 Ap
+-11 - BST 1969
+-11 u B%sT 1983 O 30 2
+-9 u Y%sT 1983 N 30
+-9 u AK%sT
+Z America/Adak 12:13:22 - LMT 1867 O 19 12:44:35
+-11:46:38 - LMT 1900 Au 20 12
+-11 - NST 1942
+-11 u N%sT 1946
+-11 - NST 1967 Ap
+-11 - BST 1969
+-11 u B%sT 1983 O 30 2
+-10 u AH%sT 1983 N 30
+-10 u H%sT
+Z Pacific/Honolulu -10:31:26 - LMT 1896 Ja 13 12
+-10:30 - HST 1933 Ap 30 2
+-10:30 1 HDT 1933 May 21 12
+-10:30 u H%sT 1947 Jun 8 2
+-10 - HST
+Z America/Phoenix -7:28:18 - LMT 1883 N 18 11:31:42
+-7 u M%sT 1944 Ja 1 0:1
+-7 - MST 1944 Ap 1 0:1
+-7 u M%sT 1944 O 1 0:1
+-7 - MST 1967
+-7 u M%sT 1968 Mar 21
+-7 - MST
+L America/Phoenix America/Creston
+Z America/Boise -7:44:49 - LMT 1883 N 18 12:15:11
+-8 u P%sT 1923 May 13 2
+-7 u M%sT 1974
+-7 - MST 1974 F 3 2
+-7 u M%sT
+R In 1941 o - Jun 22 2 1 D
+R In 1941 1954 - S lastSu 2 0 S
+R In 1946 1954 - Ap lastSu 2 1 D
+Z America/Indiana/Indianapolis -5:44:38 - LMT 1883 N 18 12:15:22
+-6 u C%sT 1920
+-6 In C%sT 1942
+-6 u C%sT 1946
+-6 In C%sT 1955 Ap 24 2
+-5 - EST 1957 S 29 2
+-6 - CST 1958 Ap 27 2
+-5 - EST 1969
+-5 u E%sT 1971
+-5 - EST 2006
+-5 u E%sT
+R Ma 1951 o - Ap lastSu 2 1 D
+R Ma 1951 o - S lastSu 2 0 S
+R Ma 1954 1960 - Ap lastSu 2 1 D
+R Ma 1954 1960 - S lastSu 2 0 S
+Z America/Indiana/Marengo -5:45:23 - LMT 1883 N 18 12:14:37
+-6 u C%sT 1951
+-6 Ma C%sT 1961 Ap 30 2
+-5 - EST 1969
+-5 u E%sT 1974 Ja 6 2
+-6 1 CDT 1974 O 27 2
+-5 u E%sT 1976
+-5 - EST 2006
+-5 u E%sT
+R V 1946 o - Ap lastSu 2 1 D
+R V 1946 o - S lastSu 2 0 S
+R V 1953 1954 - Ap lastSu 2 1 D
+R V 1953 1959 - S lastSu 2 0 S
+R V 1955 o - May 1 0 1 D
+R V 1956 1963 - Ap lastSu 2 1 D
+R V 1960 o - O lastSu 2 0 S
+R V 1961 o - S lastSu 2 0 S
+R V 1962 1963 - O lastSu 2 0 S
+Z America/Indiana/Vincennes -5:50:7 - LMT 1883 N 18 12:9:53
+-6 u C%sT 1946
+-6 V C%sT 1964 Ap 26 2
+-5 - EST 1969
+-5 u E%sT 1971
+-5 - EST 2006 Ap 2 2
+-6 u C%sT 2007 N 4 2
+-5 u E%sT
+R Pe 1955 o - May 1 0 1 D
+R Pe 1955 1960 - S lastSu 2 0 S
+R Pe 1956 1963 - Ap lastSu 2 1 D
+R Pe 1961 1963 - O lastSu 2 0 S
+Z America/Indiana/Tell_City -5:47:3 - LMT 1883 N 18 12:12:57
+-6 u C%sT 1946
+-6 Pe C%sT 1964 Ap 26 2
+-5 - EST 1967 O 29 2
+-6 u C%sT 1969 Ap 27 2
+-5 u E%sT 1971
+-5 - EST 2006 Ap 2 2
+-6 u C%sT
+R Pi 1955 o - May 1 0 1 D
+R Pi 1955 1960 - S lastSu 2 0 S
+R Pi 1956 1964 - Ap lastSu 2 1 D
+R Pi 1961 1964 - O lastSu 2 0 S
+Z America/Indiana/Petersburg -5:49:7 - LMT 1883 N 18 12:10:53
+-6 u C%sT 1955
+-6 Pi C%sT 1965 Ap 25 2
+-5 - EST 1966 O 30 2
+-6 u C%sT 1977 O 30 2
+-5 - EST 2006 Ap 2 2
+-6 u C%sT 2007 N 4 2
+-5 u E%sT
+R St 1947 1961 - Ap lastSu 2 1 D
+R St 1947 1954 - S lastSu 2 0 S
+R St 1955 1956 - O lastSu 2 0 S
+R St 1957 1958 - S lastSu 2 0 S
+R St 1959 1961 - O lastSu 2 0 S
+Z America/Indiana/Knox -5:46:30 - LMT 1883 N 18 12:13:30
+-6 u C%sT 1947
+-6 St C%sT 1962 Ap 29 2
+-5 - EST 1963 O 27 2
+-6 u C%sT 1991 O 27 2
+-5 - EST 2006 Ap 2 2
+-6 u C%sT
+R Pu 1946 1960 - Ap lastSu 2 1 D
+R Pu 1946 1954 - S lastSu 2 0 S
+R Pu 1955 1956 - O lastSu 2 0 S
+R Pu 1957 1960 - S lastSu 2 0 S
+Z America/Indiana/Winamac -5:46:25 - LMT 1883 N 18 12:13:35
+-6 u C%sT 1946
+-6 Pu C%sT 1961 Ap 30 2
+-5 - EST 1969
+-5 u E%sT 1971
+-5 - EST 2006 Ap 2 2
+-6 u C%sT 2007 Mar 11 2
+-5 u E%sT
+Z America/Indiana/Vevay -5:40:16 - LMT 1883 N 18 12:19:44
+-6 u C%sT 1954 Ap 25 2
+-5 - EST 1969
+-5 u E%sT 1973
+-5 - EST 2006
+-5 u E%sT
+R v 1921 o - May 1 2 1 D
+R v 1921 o - S 1 2 0 S
+R v 1941 o - Ap lastSu 2 1 D
+R v 1941 o - S lastSu 2 0 S
+R v 1946 o - Ap lastSu 0:1 1 D
+R v 1946 o - Jun 2 2 0 S
+R v 1950 1961 - Ap lastSu 2 1 D
+R v 1950 1955 - S lastSu 2 0 S
+R v 1956 1961 - O lastSu 2 0 S
+Z America/Kentucky/Louisville -5:43:2 - LMT 1883 N 18 12:16:58
+-6 u C%sT 1921
+-6 v C%sT 1942
+-6 u C%sT 1946
+-6 v C%sT 1961 Jul 23 2
+-5 - EST 1968
+-5 u E%sT 1974 Ja 6 2
+-6 1 CDT 1974 O 27 2
+-5 u E%sT
+Z America/Kentucky/Monticello -5:39:24 - LMT 1883 N 18 12:20:36
+-6 u C%sT 1946
+-6 - CST 1968
+-6 u C%sT 2000 O 29 2
+-5 u E%sT
+R Dt 1948 o - Ap lastSu 2 1 D
+R Dt 1948 o - S lastSu 2 0 S
+Z America/Detroit -5:32:11 - LMT 1905
+-6 - CST 1915 May 15 2
+-5 - EST 1942
+-5 u E%sT 1946
+-5 Dt E%sT 1967 Jun 14 0:1
+-5 u E%sT 1969
+-5 - EST 1973
+-5 u E%sT 1975
+-5 - EST 1975 Ap 27 2
+-5 u E%sT
+R Me 1946 o - Ap lastSu 2 1 D
+R Me 1946 o - S lastSu 2 0 S
+R Me 1966 o - Ap lastSu 2 1 D
+R Me 1966 o - O lastSu 2 0 S
+Z America/Menominee -5:50:27 - LMT 1885 S 18 12
+-6 u C%sT 1946
+-6 Me C%sT 1969 Ap 27 2
+-5 - EST 1973 Ap 29 2
+-6 u C%sT
+R C 1918 o - Ap 14 2 1 D
+R C 1918 o - O 27 2 0 S
+R C 1942 o - F 9 2 1 W
+R C 1945 o - Au 14 23u 1 P
+R C 1945 o - S 30 2 0 S
+R C 1974 1986 - Ap lastSu 2 1 D
+R C 1974 2006 - O lastSu 2 0 S
+R C 1987 2006 - Ap Su>=1 2 1 D
+R C 2007 ma - Mar Su>=8 2 1 D
+R C 2007 ma - N Su>=1 2 0 S
+R j 1917 o - Ap 8 2 1 D
+R j 1917 o - S 17 2 0 S
+R j 1919 o - May 5 23 1 D
+R j 1919 o - Au 12 23 0 S
+R j 1920 1935 - May Su>=1 23 1 D
+R j 1920 1935 - O lastSu 23 0 S
+R j 1936 1941 - May M>=9 0 1 D
+R j 1936 1941 - O M>=2 0 0 S
+R j 1946 1950 - May Su>=8 2 1 D
+R j 1946 1950 - O Su>=2 2 0 S
+R j 1951 1986 - Ap lastSu 2 1 D
+R j 1951 1959 - S lastSu 2 0 S
+R j 1960 1986 - O lastSu 2 0 S
+R j 1987 o - Ap Su>=1 0:1 1 D
+R j 1987 2006 - O lastSu 0:1 0 S
+R j 1988 o - Ap Su>=1 0:1 2 DD
+R j 1989 2006 - Ap Su>=1 0:1 1 D
+R j 2007 2011 - Mar Su>=8 0:1 1 D
+R j 2007 2010 - N Su>=1 0:1 0 S
+Z America/St_Johns -3:30:52 - LMT 1884
+-3:30:52 j N%sT 1918
+-3:30:52 C N%sT 1919
+-3:30:52 j N%sT 1935 Mar 30
+-3:30 j N%sT 1942 May 11
+-3:30 C N%sT 1946
+-3:30 j N%sT 2011 N
+-3:30 C N%sT
+Z America/Goose_Bay -4:1:40 - LMT 1884
+-3:30:52 - NST 1918
+-3:30:52 C N%sT 1919
+-3:30:52 - NST 1935 Mar 30
+-3:30 - NST 1936
+-3:30 j N%sT 1942 May 11
+-3:30 C N%sT 1946
+-3:30 j N%sT 1966 Mar 15 2
+-4 j A%sT 2011 N
+-4 C A%sT
+R H 1916 o - Ap 1 0 1 D
+R H 1916 o - O 1 0 0 S
+R H 1920 o - May 9 0 1 D
+R H 1920 o - Au 29 0 0 S
+R H 1921 o - May 6 0 1 D
+R H 1921 1922 - S 5 0 0 S
+R H 1922 o - Ap 30 0 1 D
+R H 1923 1925 - May Su>=1 0 1 D
+R H 1923 o - S 4 0 0 S
+R H 1924 o - S 15 0 0 S
+R H 1925 o - S 28 0 0 S
+R H 1926 o - May 16 0 1 D
+R H 1926 o - S 13 0 0 S
+R H 1927 o - May 1 0 1 D
+R H 1927 o - S 26 0 0 S
+R H 1928 1931 - May Su>=8 0 1 D
+R H 1928 o - S 9 0 0 S
+R H 1929 o - S 3 0 0 S
+R H 1930 o - S 15 0 0 S
+R H 1931 1932 - S M>=24 0 0 S
+R H 1932 o - May 1 0 1 D
+R H 1933 o - Ap 30 0 1 D
+R H 1933 o - O 2 0 0 S
+R H 1934 o - May 20 0 1 D
+R H 1934 o - S 16 0 0 S
+R H 1935 o - Jun 2 0 1 D
+R H 1935 o - S 30 0 0 S
+R H 1936 o - Jun 1 0 1 D
+R H 1936 o - S 14 0 0 S
+R H 1937 1938 - May Su>=1 0 1 D
+R H 1937 1941 - S M>=24 0 0 S
+R H 1939 o - May 28 0 1 D
+R H 1940 1941 - May Su>=1 0 1 D
+R H 1946 1949 - Ap lastSu 2 1 D
+R H 1946 1949 - S lastSu 2 0 S
+R H 1951 1954 - Ap lastSu 2 1 D
+R H 1951 1954 - S lastSu 2 0 S
+R H 1956 1959 - Ap lastSu 2 1 D
+R H 1956 1959 - S lastSu 2 0 S
+R H 1962 1973 - Ap lastSu 2 1 D
+R H 1962 1973 - O lastSu 2 0 S
+Z America/Halifax -4:14:24 - LMT 1902 Jun 15
+-4 H A%sT 1918
+-4 C A%sT 1919
+-4 H A%sT 1942 F 9 2s
+-4 C A%sT 1946
+-4 H A%sT 1974
+-4 C A%sT
+Z America/Glace_Bay -3:59:48 - LMT 1902 Jun 15
+-4 C A%sT 1953
+-4 H A%sT 1954
+-4 - AST 1972
+-4 H A%sT 1974
+-4 C A%sT
+R o 1933 1935 - Jun Su>=8 1 1 D
+R o 1933 1935 - S Su>=8 1 0 S
+R o 1936 1938 - Jun Su>=1 1 1 D
+R o 1936 1938 - S Su>=1 1 0 S
+R o 1939 o - May 27 1 1 D
+R o 1939 1941 - S Sa>=21 1 0 S
+R o 1940 o - May 19 1 1 D
+R o 1941 o - May 4 1 1 D
+R o 1946 1972 - Ap lastSu 2 1 D
+R o 1946 1956 - S lastSu 2 0 S
+R o 1957 1972 - O lastSu 2 0 S
+R o 1993 2006 - Ap Su>=1 0:1 1 D
+R o 1993 2006 - O lastSu 0:1 0 S
+Z America/Moncton -4:19:8 - LMT 1883 D 9
+-5 - EST 1902 Jun 15
+-4 C A%sT 1933
+-4 o A%sT 1942
+-4 C A%sT 1946
+-4 o A%sT 1973
+-4 C A%sT 1993
+-4 o A%sT 2007
+-4 C A%sT
+R t 1919 o - Mar 30 23:30 1 D
+R t 1919 o - O 26 0 0 S
+R t 1920 o - May 2 2 1 D
+R t 1920 o - S 26 0 0 S
+R t 1921 o - May 15 2 1 D
+R t 1921 o - S 15 2 0 S
+R t 1922 1923 - May Su>=8 2 1 D
+R t 1922 1926 - S Su>=15 2 0 S
+R t 1924 1927 - May Su>=1 2 1 D
+R t 1927 1937 - S Su>=25 2 0 S
+R t 1928 1937 - Ap Su>=25 2 1 D
+R t 1938 1940 - Ap lastSu 2 1 D
+R t 1938 1939 - S lastSu 2 0 S
+R t 1945 1946 - S lastSu 2 0 S
+R t 1946 o - Ap lastSu 2 1 D
+R t 1947 1949 - Ap lastSu 0 1 D
+R t 1947 1948 - S lastSu 0 0 S
+R t 1949 o - N lastSu 0 0 S
+R t 1950 1973 - Ap lastSu 2 1 D
+R t 1950 o - N lastSu 2 0 S
+R t 1951 1956 - S lastSu 2 0 S
+R t 1957 1973 - O lastSu 2 0 S
+Z America/Toronto -5:17:32 - LMT 1895
+-5 C E%sT 1919
+-5 t E%sT 1942 F 9 2s
+-5 C E%sT 1946
+-5 t E%sT 1974
+-5 C E%sT
+L America/Toronto America/Nassau
+Z America/Thunder_Bay -5:57 - LMT 1895
+-6 - CST 1910
+-5 - EST 1942
+-5 C E%sT 1970
+-5 t E%sT 1973
+-5 - EST 1974
+-5 C E%sT
+Z America/Nipigon -5:53:4 - LMT 1895
+-5 C E%sT 1940 S 29
+-5 1 EDT 1942 F 9 2s
+-5 C E%sT
+Z America/Rainy_River -6:18:16 - LMT 1895
+-6 C C%sT 1940 S 29
+-6 1 CDT 1942 F 9 2s
+-6 C C%sT
+R W 1916 o - Ap 23 0 1 D
+R W 1916 o - S 17 0 0 S
+R W 1918 o - Ap 14 2 1 D
+R W 1918 o - O 27 2 0 S
+R W 1937 o - May 16 2 1 D
+R W 1937 o - S 26 2 0 S
+R W 1942 o - F 9 2 1 W
+R W 1945 o - Au 14 23u 1 P
+R W 1945 o - S lastSu 2 0 S
+R W 1946 o - May 12 2 1 D
+R W 1946 o - O 13 2 0 S
+R W 1947 1949 - Ap lastSu 2 1 D
+R W 1947 1949 - S lastSu 2 0 S
+R W 1950 o - May 1 2 1 D
+R W 1950 o - S 30 2 0 S
+R W 1951 1960 - Ap lastSu 2 1 D
+R W 1951 1958 - S lastSu 2 0 S
+R W 1959 o - O lastSu 2 0 S
+R W 1960 o - S lastSu 2 0 S
+R W 1963 o - Ap lastSu 2 1 D
+R W 1963 o - S 22 2 0 S
+R W 1966 1986 - Ap lastSu 2s 1 D
+R W 1966 2005 - O lastSu 2s 0 S
+R W 1987 2005 - Ap Su>=1 2s 1 D
+Z America/Winnipeg -6:28:36 - LMT 1887 Jul 16
+-6 W C%sT 2006
+-6 C C%sT
+R r 1918 o - Ap 14 2 1 D
+R r 1918 o - O 27 2 0 S
+R r 1930 1934 - May Su>=1 0 1 D
+R r 1930 1934 - O Su>=1 0 0 S
+R r 1937 1941 - Ap Su>=8 0 1 D
+R r 1937 o - O Su>=8 0 0 S
+R r 1938 o - O Su>=1 0 0 S
+R r 1939 1941 - O Su>=8 0 0 S
+R r 1942 o - F 9 2 1 W
+R r 1945 o - Au 14 23u 1 P
+R r 1945 o - S lastSu 2 0 S
+R r 1946 o - Ap Su>=8 2 1 D
+R r 1946 o - O Su>=8 2 0 S
+R r 1947 1957 - Ap lastSu 2 1 D
+R r 1947 1957 - S lastSu 2 0 S
+R r 1959 o - Ap lastSu 2 1 D
+R r 1959 o - O lastSu 2 0 S
+R Sw 1957 o - Ap lastSu 2 1 D
+R Sw 1957 o - O lastSu 2 0 S
+R Sw 1959 1961 - Ap lastSu 2 1 D
+R Sw 1959 o - O lastSu 2 0 S
+R Sw 1960 1961 - S lastSu 2 0 S
+Z America/Regina -6:58:36 - LMT 1905 S
+-7 r M%sT 1960 Ap lastSu 2
+-6 - CST
+Z America/Swift_Current -7:11:20 - LMT 1905 S
+-7 C M%sT 1946 Ap lastSu 2
+-7 r M%sT 1950
+-7 Sw M%sT 1972 Ap lastSu 2
+-6 - CST
+R Ed 1918 1919 - Ap Su>=8 2 1 D
+R Ed 1918 o - O 27 2 0 S
+R Ed 1919 o - May 27 2 0 S
+R Ed 1920 1923 - Ap lastSu 2 1 D
+R Ed 1920 o - O lastSu 2 0 S
+R Ed 1921 1923 - S lastSu 2 0 S
+R Ed 1942 o - F 9 2 1 W
+R Ed 1945 o - Au 14 23u 1 P
+R Ed 1945 o - S lastSu 2 0 S
+R Ed 1947 o - Ap lastSu 2 1 D
+R Ed 1947 o - S lastSu 2 0 S
+R Ed 1972 1986 - Ap lastSu 2 1 D
+R Ed 1972 2006 - O lastSu 2 0 S
+Z America/Edmonton -7:33:52 - LMT 1906 S
+-7 Ed M%sT 1987
+-7 C M%sT
+R Va 1918 o - Ap 14 2 1 D
+R Va 1918 o - O 27 2 0 S
+R Va 1942 o - F 9 2 1 W
+R Va 1945 o - Au 14 23u 1 P
+R Va 1945 o - S 30 2 0 S
+R Va 1946 1986 - Ap lastSu 2 1 D
+R Va 1946 o - S 29 2 0 S
+R Va 1947 1961 - S lastSu 2 0 S
+R Va 1962 2006 - O lastSu 2 0 S
+Z America/Vancouver -8:12:28 - LMT 1884
+-8 Va P%sT 1987
+-8 C P%sT
+Z America/Dawson_Creek -8:0:56 - LMT 1884
+-8 C P%sT 1947
+-8 Va P%sT 1972 Au 30 2
+-7 - MST
+Z America/Fort_Nelson -8:10:47 - LMT 1884
+-8 Va P%sT 1946
+-8 - PST 1947
+-8 Va P%sT 1987
+-8 C P%sT 2015 Mar 8 2
+-7 - MST
+R Y 1918 o - Ap 14 2 1 D
+R Y 1918 o - O 27 2 0 S
+R Y 1919 o - May 25 2 1 D
+R Y 1919 o - N 1 0 0 S
+R Y 1942 o - F 9 2 1 W
+R Y 1945 o - Au 14 23u 1 P
+R Y 1945 o - S 30 2 0 S
+R Y 1965 o - Ap lastSu 0 2 DD
+R Y 1965 o - O lastSu 2 0 S
+R Y 1980 1986 - Ap lastSu 2 1 D
+R Y 1980 2006 - O lastSu 2 0 S
+R Y 1987 2006 - Ap Su>=1 2 1 D
+Z America/Pangnirtung 0 - -00 1921
+-4 Y A%sT 1995 Ap Su>=1 2
+-5 C E%sT 1999 O 31 2
+-6 C C%sT 2000 O 29 2
+-5 C E%sT
+Z America/Iqaluit 0 - -00 1942 Au
+-5 Y E%sT 1999 O 31 2
+-6 C C%sT 2000 O 29 2
+-5 C E%sT
+Z America/Resolute 0 - -00 1947 Au 31
+-6 Y C%sT 2000 O 29 2
+-5 - EST 2001 Ap 1 3
+-6 C C%sT 2006 O 29 2
+-5 - EST 2007 Mar 11 3
+-6 C C%sT
+Z America/Rankin_Inlet 0 - -00 1957
+-6 Y C%sT 2000 O 29 2
+-5 - EST 2001 Ap 1 3
+-6 C C%sT
+Z America/Cambridge_Bay 0 - -00 1920
+-7 Y M%sT 1999 O 31 2
+-6 C C%sT 2000 O 29 2
+-5 - EST 2000 N 5
+-6 - CST 2001 Ap 1 3
+-7 C M%sT
+Z America/Yellowknife 0 - -00 1935
+-7 Y M%sT 1980
+-7 C M%sT
+Z America/Inuvik 0 - -00 1953
+-8 Y P%sT 1979 Ap lastSu 2
+-7 Y M%sT 1980
+-7 C M%sT
+Z America/Whitehorse -9:0:12 - LMT 1900 Au 20
+-9 Y Y%sT 1967 May 28
+-8 Y P%sT 1980
+-8 C P%sT 2020 N
+-7 - MST
+Z America/Dawson -9:17:40 - LMT 1900 Au 20
+-9 Y Y%sT 1973 O 28
+-8 Y P%sT 1980
+-8 C P%sT 2020 N
+-7 - MST
+R m 1939 o - F 5 0 1 D
+R m 1939 o - Jun 25 0 0 S
+R m 1940 o - D 9 0 1 D
+R m 1941 o - Ap 1 0 0 S
+R m 1943 o - D 16 0 1 W
+R m 1944 o - May 1 0 0 S
+R m 1950 o - F 12 0 1 D
+R m 1950 o - Jul 30 0 0 S
+R m 1996 2000 - Ap Su>=1 2 1 D
+R m 1996 2000 - O lastSu 2 0 S
+R m 2001 o - May Su>=1 2 1 D
+R m 2001 o - S lastSu 2 0 S
+R m 2002 ma - Ap Su>=1 2 1 D
+R m 2002 ma - O lastSu 2 0 S
+Z America/Cancun -5:47:4 - LMT 1922 Ja 1 0:12:56
+-6 - CST 1981 D 23
+-5 m E%sT 1998 Au 2 2
+-6 m C%sT 2015 F 1 2
+-5 - EST
+Z America/Merida -5:58:28 - LMT 1922 Ja 1 0:1:32
+-6 - CST 1981 D 23
+-5 - EST 1982 D 2
+-6 m C%sT
+Z America/Matamoros -6:40 - LMT 1921 D 31 23:20
+-6 - CST 1988
+-6 u C%sT 1989
+-6 m C%sT 2010
+-6 u C%sT
+Z America/Monterrey -6:41:16 - LMT 1921 D 31 23:18:44
+-6 - CST 1988
+-6 u C%sT 1989
+-6 m C%sT
+Z America/Mexico_City -6:36:36 - LMT 1922 Ja 1 0:23:24
+-7 - MST 1927 Jun 10 23
+-6 - CST 1930 N 15
+-7 - MST 1931 May 1 23
+-6 - CST 1931 O
+-7 - MST 1932 Ap
+-6 m C%sT 2001 S 30 2
+-6 - CST 2002 F 20
+-6 m C%sT
+Z America/Ojinaga -6:57:40 - LMT 1922 Ja 1 0:2:20
+-7 - MST 1927 Jun 10 23
+-6 - CST 1930 N 15
+-7 - MST 1931 May 1 23
+-6 - CST 1931 O
+-7 - MST 1932 Ap
+-6 - CST 1996
+-6 m C%sT 1998
+-6 - CST 1998 Ap Su>=1 3
+-7 m M%sT 2010
+-7 u M%sT
+Z America/Chihuahua -7:4:20 - LMT 1921 D 31 23:55:40
+-7 - MST 1927 Jun 10 23
+-6 - CST 1930 N 15
+-7 - MST 1931 May 1 23
+-6 - CST 1931 O
+-7 - MST 1932 Ap
+-6 - CST 1996
+-6 m C%sT 1998
+-6 - CST 1998 Ap Su>=1 3
+-7 m M%sT
+Z America/Hermosillo -7:23:52 - LMT 1921 D 31 23:36:8
+-7 - MST 1927 Jun 10 23
+-6 - CST 1930 N 15
+-7 - MST 1931 May 1 23
+-6 - CST 1931 O
+-7 - MST 1932 Ap
+-6 - CST 1942 Ap 24
+-7 - MST 1949 Ja 14
+-8 - PST 1970
+-7 m M%sT 1999
+-7 - MST
+Z America/Mazatlan -7:5:40 - LMT 1921 D 31 23:54:20
+-7 - MST 1927 Jun 10 23
+-6 - CST 1930 N 15
+-7 - MST 1931 May 1 23
+-6 - CST 1931 O
+-7 - MST 1932 Ap
+-6 - CST 1942 Ap 24
+-7 - MST 1949 Ja 14
+-8 - PST 1970
+-7 m M%sT
+Z America/Bahia_Banderas -7:1 - LMT 1921 D 31 23:59
+-7 - MST 1927 Jun 10 23
+-6 - CST 1930 N 15
+-7 - MST 1931 May 1 23
+-6 - CST 1931 O
+-7 - MST 1932 Ap
+-6 - CST 1942 Ap 24
+-7 - MST 1949 Ja 14
+-8 - PST 1970
+-7 m M%sT 2010 Ap 4 2
+-6 m C%sT
+Z America/Tijuana -7:48:4 - LMT 1922 Ja 1 0:11:56
+-7 - MST 1924
+-8 - PST 1927 Jun 10 23
+-7 - MST 1930 N 15
+-8 - PST 1931 Ap
+-8 1 PDT 1931 S 30
+-8 - PST 1942 Ap 24
+-8 1 PWT 1945 Au 14 23u
+-8 1 PPT 1945 N 12
+-8 - PST 1948 Ap 5
+-8 1 PDT 1949 Ja 14
+-8 - PST 1954
+-8 CA P%sT 1961
+-8 - PST 1976
+-8 u P%sT 1996
+-8 m P%sT 2001
+-8 u P%sT 2002 F 20
+-8 m P%sT 2010
+-8 u P%sT
+R BB 1942 o - Ap 19 5u 1 D
+R BB 1942 o - Au 31 6u 0 S
+R BB 1943 o - May 2 5u 1 D
+R BB 1943 o - S 5 6u 0 S
+R BB 1944 o - Ap 10 5u 0:30 -
+R BB 1944 o - S 10 6u 0 S
+R BB 1977 o - Jun 12 2 1 D
+R BB 1977 1978 - O Su>=1 2 0 S
+R BB 1978 1980 - Ap Su>=15 2 1 D
+R BB 1979 o - S 30 2 0 S
+R BB 1980 o - S 25 2 0 S
+Z America/Barbados -3:58:29 - LMT 1911 Au 28
+-4 BB A%sT 1944
+-4 BB AST/-0330 1945
+-4 BB A%sT
+R BZ 1918 1941 - O Sa>=1 24 0:30 -0530
+R BZ 1919 1942 - F Sa>=8 24 0 CST
+R BZ 1942 o - Jun 27 24 1 CWT
+R BZ 1945 o - Au 14 23u 1 CPT
+R BZ 1945 o - D 15 24 0 CST
+R BZ 1947 1967 - O Sa>=1 24 0:30 -0530
+R BZ 1948 1968 - F Sa>=8 24 0 CST
+R BZ 1973 o - D 5 0 1 CDT
+R BZ 1974 o - F 9 0 0 CST
+R BZ 1982 o - D 18 0 1 CDT
+R BZ 1983 o - F 12 0 0 CST
+Z America/Belize -5:52:48 - LMT 1912 Ap
+-6 BZ %s
+R Be 1917 o - Ap 5 24 1 -
+R Be 1917 o - S 30 24 0 -
+R Be 1918 o - Ap 13 24 1 -
+R Be 1918 o - S 15 24 0 S
+R Be 1942 o - Ja 11 2 1 D
+R Be 1942 o - O 18 2 0 S
+R Be 1943 o - Mar 21 2 1 D
+R Be 1943 o - O 31 2 0 S
+R Be 1944 1945 - Mar Su>=8 2 1 D
+R Be 1944 1945 - N Su>=1 2 0 S
+R Be 1947 o - May Su>=15 2 1 D
+R Be 1947 o - S Su>=8 2 0 S
+R Be 1948 1952 - May Su>=22 2 1 D
+R Be 1948 1952 - S Su>=1 2 0 S
+R Be 1956 o - May Su>=22 2 1 D
+R Be 1956 o - O lastSu 2 0 S
+Z Atlantic/Bermuda -4:19:18 - LMT 1890
+-4:19:18 Be BMT/BST 1930 Ja 1 2
+-4 Be A%sT 1974 Ap 28 2
+-4 C A%sT 1976
+-4 u A%sT
+R CR 1979 1980 - F lastSu 0 1 D
+R CR 1979 1980 - Jun Su>=1 0 0 S
+R CR 1991 1992 - Ja Sa>=15 0 1 D
+R CR 1991 o - Jul 1 0 0 S
+R CR 1992 o - Mar 15 0 0 S
+Z America/Costa_Rica -5:36:13 - LMT 1890
+-5:36:13 - SJMT 1921 Ja 15
+-6 CR C%sT
+R Q 1928 o - Jun 10 0 1 D
+R Q 1928 o - O 10 0 0 S
+R Q 1940 1942 - Jun Su>=1 0 1 D
+R Q 1940 1942 - S Su>=1 0 0 S
+R Q 1945 1946 - Jun Su>=1 0 1 D
+R Q 1945 1946 - S Su>=1 0 0 S
+R Q 1965 o - Jun 1 0 1 D
+R Q 1965 o - S 30 0 0 S
+R Q 1966 o - May 29 0 1 D
+R Q 1966 o - O 2 0 0 S
+R Q 1967 o - Ap 8 0 1 D
+R Q 1967 1968 - S Su>=8 0 0 S
+R Q 1968 o - Ap 14 0 1 D
+R Q 1969 1977 - Ap lastSu 0 1 D
+R Q 1969 1971 - O lastSu 0 0 S
+R Q 1972 1974 - O 8 0 0 S
+R Q 1975 1977 - O lastSu 0 0 S
+R Q 1978 o - May 7 0 1 D
+R Q 1978 1990 - O Su>=8 0 0 S
+R Q 1979 1980 - Mar Su>=15 0 1 D
+R Q 1981 1985 - May Su>=5 0 1 D
+R Q 1986 1989 - Mar Su>=14 0 1 D
+R Q 1990 1997 - Ap Su>=1 0 1 D
+R Q 1991 1995 - O Su>=8 0s 0 S
+R Q 1996 o - O 6 0s 0 S
+R Q 1997 o - O 12 0s 0 S
+R Q 1998 1999 - Mar lastSu 0s 1 D
+R Q 1998 2003 - O lastSu 0s 0 S
+R Q 2000 2003 - Ap Su>=1 0s 1 D
+R Q 2004 o - Mar lastSu 0s 1 D
+R Q 2006 2010 - O lastSu 0s 0 S
+R Q 2007 o - Mar Su>=8 0s 1 D
+R Q 2008 o - Mar Su>=15 0s 1 D
+R Q 2009 2010 - Mar Su>=8 0s 1 D
+R Q 2011 o - Mar Su>=15 0s 1 D
+R Q 2011 o - N 13 0s 0 S
+R Q 2012 o - Ap 1 0s 1 D
+R Q 2012 ma - N Su>=1 0s 0 S
+R Q 2013 ma - Mar Su>=8 0s 1 D
+Z America/Havana -5:29:28 - LMT 1890
+-5:29:36 - HMT 1925 Jul 19 12
+-5 Q C%sT
+R DO 1966 o - O 30 0 1 EDT
+R DO 1967 o - F 28 0 0 EST
+R DO 1969 1973 - O lastSu 0 0:30 -0430
+R DO 1970 o - F 21 0 0 EST
+R DO 1971 o - Ja 20 0 0 EST
+R DO 1972 1974 - Ja 21 0 0 EST
+Z America/Santo_Domingo -4:39:36 - LMT 1890
+-4:40 - SDMT 1933 Ap 1 12
+-5 DO %s 1974 O 27
+-4 - AST 2000 O 29 2
+-5 u E%sT 2000 D 3 1
+-4 - AST
+R SV 1987 1988 - May Su>=1 0 1 D
+R SV 1987 1988 - S lastSu 0 0 S
+Z America/El_Salvador -5:56:48 - LMT 1921
+-6 SV C%sT
+R GT 1973 o - N 25 0 1 D
+R GT 1974 o - F 24 0 0 S
+R GT 1983 o - May 21 0 1 D
+R GT 1983 o - S 22 0 0 S
+R GT 1991 o - Mar 23 0 1 D
+R GT 1991 o - S 7 0 0 S
+R GT 2006 o - Ap 30 0 1 D
+R GT 2006 o - O 1 0 0 S
+Z America/Guatemala -6:2:4 - LMT 1918 O 5
+-6 GT C%sT
+R HT 1983 o - May 8 0 1 D
+R HT 1984 1987 - Ap lastSu 0 1 D
+R HT 1983 1987 - O lastSu 0 0 S
+R HT 1988 1997 - Ap Su>=1 1s 1 D
+R HT 1988 1997 - O lastSu 1s 0 S
+R HT 2005 2006 - Ap Su>=1 0 1 D
+R HT 2005 2006 - O lastSu 0 0 S
+R HT 2012 2015 - Mar Su>=8 2 1 D
+R HT 2012 2015 - N Su>=1 2 0 S
+R HT 2017 ma - Mar Su>=8 2 1 D
+R HT 2017 ma - N Su>=1 2 0 S
+Z America/Port-au-Prince -4:49:20 - LMT 1890
+-4:49 - PPMT 1917 Ja 24 12
+-5 HT E%sT
+R HN 1987 1988 - May Su>=1 0 1 D
+R HN 1987 1988 - S lastSu 0 0 S
+R HN 2006 o - May Su>=1 0 1 D
+R HN 2006 o - Au M>=1 0 0 S
+Z America/Tegucigalpa -5:48:52 - LMT 1921 Ap
+-6 HN C%sT
+Z America/Jamaica -5:7:10 - LMT 1890
+-5:7:10 - KMT 1912 F
+-5 - EST 1974
+-5 u E%sT 1984
+-5 - EST
+Z America/Martinique -4:4:20 - LMT 1890
+-4:4:20 - FFMT 1911 May
+-4 - AST 1980 Ap 6
+-4 1 ADT 1980 S 28
+-4 - AST
+R NI 1979 1980 - Mar Su>=16 0 1 D
+R NI 1979 1980 - Jun M>=23 0 0 S
+R NI 2005 o - Ap 10 0 1 D
+R NI 2005 o - O Su>=1 0 0 S
+R NI 2006 o - Ap 30 2 1 D
+R NI 2006 o - O Su>=1 1 0 S
+Z America/Managua -5:45:8 - LMT 1890
+-5:45:12 - MMT 1934 Jun 23
+-6 - CST 1973 May
+-5 - EST 1975 F 16
+-6 NI C%sT 1992 Ja 1 4
+-5 - EST 1992 S 24
+-6 - CST 1993
+-5 - EST 1997
+-6 NI C%sT
+Z America/Panama -5:18:8 - LMT 1890
+-5:19:36 - CMT 1908 Ap 22
+-5 - EST
+L America/Panama America/Atikokan
+L America/Panama America/Cayman
+Z America/Puerto_Rico -4:24:25 - LMT 1899 Mar 28 12
+-4 - AST 1942 May 3
+-4 u A%sT 1946
+-4 - AST
+L America/Puerto_Rico America/Anguilla
+L America/Puerto_Rico America/Antigua
+L America/Puerto_Rico America/Aruba
+L America/Puerto_Rico America/Curacao
+L America/Puerto_Rico America/Blanc-Sablon
+L America/Puerto_Rico America/Dominica
+L America/Puerto_Rico America/Grenada
+L America/Puerto_Rico America/Guadeloupe
+L America/Puerto_Rico America/Kralendijk
+L America/Puerto_Rico America/Lower_Princes
+L America/Puerto_Rico America/Marigot
+L America/Puerto_Rico America/Montserrat
+L America/Puerto_Rico America/Port_of_Spain
+L America/Puerto_Rico America/St_Barthelemy
+L America/Puerto_Rico America/St_Kitts
+L America/Puerto_Rico America/St_Lucia
+L America/Puerto_Rico America/St_Thomas
+L America/Puerto_Rico America/St_Vincent
+L America/Puerto_Rico America/Tortola
+Z America/Miquelon -3:44:40 - LMT 1911 May 15
+-4 - AST 1980 May
+-3 - -03 1987
+-3 C -03/-02
+Z America/Grand_Turk -4:44:32 - LMT 1890
+-5:7:10 - KMT 1912 F
+-5 - EST 1979
+-5 u E%sT 2015 Mar 8 2
+-4 - AST 2018 Mar 11 3
+-5 u E%sT
+R A 1930 o - D 1 0 1 -
+R A 1931 o - Ap 1 0 0 -
+R A 1931 o - O 15 0 1 -
+R A 1932 1940 - Mar 1 0 0 -
+R A 1932 1939 - N 1 0 1 -
+R A 1940 o - Jul 1 0 1 -
+R A 1941 o - Jun 15 0 0 -
+R A 1941 o - O 15 0 1 -
+R A 1943 o - Au 1 0 0 -
+R A 1943 o - O 15 0 1 -
+R A 1946 o - Mar 1 0 0 -
+R A 1946 o - O 1 0 1 -
+R A 1963 o - O 1 0 0 -
+R A 1963 o - D 15 0 1 -
+R A 1964 1966 - Mar 1 0 0 -
+R A 1964 1966 - O 15 0 1 -
+R A 1967 o - Ap 2 0 0 -
+R A 1967 1968 - O Su>=1 0 1 -
+R A 1968 1969 - Ap Su>=1 0 0 -
+R A 1974 o - Ja 23 0 1 -
+R A 1974 o - May 1 0 0 -
+R A 1988 o - D 1 0 1 -
+R A 1989 1993 - Mar Su>=1 0 0 -
+R A 1989 1992 - O Su>=15 0 1 -
+R A 1999 o - O Su>=1 0 1 -
+R A 2000 o - Mar 3 0 0 -
+R A 2007 o - D 30 0 1 -
+R A 2008 2009 - Mar Su>=15 0 0 -
+R A 2008 o - O Su>=15 0 1 -
+Z America/Argentina/Buenos_Aires -3:53:48 - LMT 1894 O 31
+-4:16:48 - CMT 1920 May
+-4 - -04 1930 D
+-4 A -04/-03 1969 O 5
+-3 A -03/-02 1999 O 3
+-4 A -04/-03 2000 Mar 3
+-3 A -03/-02
+Z America/Argentina/Cordoba -4:16:48 - LMT 1894 O 31
+-4:16:48 - CMT 1920 May
+-4 - -04 1930 D
+-4 A -04/-03 1969 O 5
+-3 A -03/-02 1991 Mar 3
+-4 - -04 1991 O 20
+-3 A -03/-02 1999 O 3
+-4 A -04/-03 2000 Mar 3
+-3 A -03/-02
+Z America/Argentina/Salta -4:21:40 - LMT 1894 O 31
+-4:16:48 - CMT 1920 May
+-4 - -04 1930 D
+-4 A -04/-03 1969 O 5
+-3 A -03/-02 1991 Mar 3
+-4 - -04 1991 O 20
+-3 A -03/-02 1999 O 3
+-4 A -04/-03 2000 Mar 3
+-3 A -03/-02 2008 O 18
+-3 - -03
+Z America/Argentina/Tucuman -4:20:52 - LMT 1894 O 31
+-4:16:48 - CMT 1920 May
+-4 - -04 1930 D
+-4 A -04/-03 1969 O 5
+-3 A -03/-02 1991 Mar 3
+-4 - -04 1991 O 20
+-3 A -03/-02 1999 O 3
+-4 A -04/-03 2000 Mar 3
+-3 - -03 2004 Jun
+-4 - -04 2004 Jun 13
+-3 A -03/-02
+Z America/Argentina/La_Rioja -4:27:24 - LMT 1894 O 31
+-4:16:48 - CMT 1920 May
+-4 - -04 1930 D
+-4 A -04/-03 1969 O 5
+-3 A -03/-02 1991 Mar
+-4 - -04 1991 May 7
+-3 A -03/-02 1999 O 3
+-4 A -04/-03 2000 Mar 3
+-3 - -03 2004 Jun
+-4 - -04 2004 Jun 20
+-3 A -03/-02 2008 O 18
+-3 - -03
+Z America/Argentina/San_Juan -4:34:4 - LMT 1894 O 31
+-4:16:48 - CMT 1920 May
+-4 - -04 1930 D
+-4 A -04/-03 1969 O 5
+-3 A -03/-02 1991 Mar
+-4 - -04 1991 May 7
+-3 A -03/-02 1999 O 3
+-4 A -04/-03 2000 Mar 3
+-3 - -03 2004 May 31
+-4 - -04 2004 Jul 25
+-3 A -03/-02 2008 O 18
+-3 - -03
+Z America/Argentina/Jujuy -4:21:12 - LMT 1894 O 31
+-4:16:48 - CMT 1920 May
+-4 - -04 1930 D
+-4 A -04/-03 1969 O 5
+-3 A -03/-02 1990 Mar 4
+-4 - -04 1990 O 28
+-4 1 -03 1991 Mar 17
+-4 - -04 1991 O 6
+-3 1 -02 1992
+-3 A -03/-02 1999 O 3
+-4 A -04/-03 2000 Mar 3
+-3 A -03/-02 2008 O 18
+-3 - -03
+Z America/Argentina/Catamarca -4:23:8 - LMT 1894 O 31
+-4:16:48 - CMT 1920 May
+-4 - -04 1930 D
+-4 A -04/-03 1969 O 5
+-3 A -03/-02 1991 Mar 3
+-4 - -04 1991 O 20
+-3 A -03/-02 1999 O 3
+-4 A -04/-03 2000 Mar 3
+-3 - -03 2004 Jun
+-4 - -04 2004 Jun 20
+-3 A -03/-02 2008 O 18
+-3 - -03
+Z America/Argentina/Mendoza -4:35:16 - LMT 1894 O 31
+-4:16:48 - CMT 1920 May
+-4 - -04 1930 D
+-4 A -04/-03 1969 O 5
+-3 A -03/-02 1990 Mar 4
+-4 - -04 1990 O 15
+-4 1 -03 1991 Mar
+-4 - -04 1991 O 15
+-4 1 -03 1992 Mar
+-4 - -04 1992 O 18
+-3 A -03/-02 1999 O 3
+-4 A -04/-03 2000 Mar 3
+-3 - -03 2004 May 23
+-4 - -04 2004 S 26
+-3 A -03/-02 2008 O 18
+-3 - -03
+R Sa 2008 2009 - Mar Su>=8 0 0 -
+R Sa 2007 2008 - O Su>=8 0 1 -
+Z America/Argentina/San_Luis -4:25:24 - LMT 1894 O 31
+-4:16:48 - CMT 1920 May
+-4 - -04 1930 D
+-4 A -04/-03 1969 O 5
+-3 A -03/-02 1990
+-3 1 -02 1990 Mar 14
+-4 - -04 1990 O 15
+-4 1 -03 1991 Mar
+-4 - -04 1991 Jun
+-3 - -03 1999 O 3
+-4 1 -03 2000 Mar 3
+-3 - -03 2004 May 31
+-4 - -04 2004 Jul 25
+-3 A -03/-02 2008 Ja 21
+-4 Sa -04/-03 2009 O 11
+-3 - -03
+Z America/Argentina/Rio_Gallegos -4:36:52 - LMT 1894 O 31
+-4:16:48 - CMT 1920 May
+-4 - -04 1930 D
+-4 A -04/-03 1969 O 5
+-3 A -03/-02 1999 O 3
+-4 A -04/-03 2000 Mar 3
+-3 - -03 2004 Jun
+-4 - -04 2004 Jun 20
+-3 A -03/-02 2008 O 18
+-3 - -03
+Z America/Argentina/Ushuaia -4:33:12 - LMT 1894 O 31
+-4:16:48 - CMT 1920 May
+-4 - -04 1930 D
+-4 A -04/-03 1969 O 5
+-3 A -03/-02 1999 O 3
+-4 A -04/-03 2000 Mar 3
+-3 - -03 2004 May 30
+-4 - -04 2004 Jun 20
+-3 A -03/-02 2008 O 18
+-3 - -03
+Z America/La_Paz -4:32:36 - LMT 1890
+-4:32:36 - CMT 1931 O 15
+-4:32:36 1 BST 1932 Mar 21
+-4 - -04
+R B 1931 o - O 3 11 1 -
+R B 1932 1933 - Ap 1 0 0 -
+R B 1932 o - O 3 0 1 -
+R B 1949 1952 - D 1 0 1 -
+R B 1950 o - Ap 16 1 0 -
+R B 1951 1952 - Ap 1 0 0 -
+R B 1953 o - Mar 1 0 0 -
+R B 1963 o - D 9 0 1 -
+R B 1964 o - Mar 1 0 0 -
+R B 1965 o - Ja 31 0 1 -
+R B 1965 o - Mar 31 0 0 -
+R B 1965 o - D 1 0 1 -
+R B 1966 1968 - Mar 1 0 0 -
+R B 1966 1967 - N 1 0 1 -
+R B 1985 o - N 2 0 1 -
+R B 1986 o - Mar 15 0 0 -
+R B 1986 o - O 25 0 1 -
+R B 1987 o - F 14 0 0 -
+R B 1987 o - O 25 0 1 -
+R B 1988 o - F 7 0 0 -
+R B 1988 o - O 16 0 1 -
+R B 1989 o - Ja 29 0 0 -
+R B 1989 o - O 15 0 1 -
+R B 1990 o - F 11 0 0 -
+R B 1990 o - O 21 0 1 -
+R B 1991 o - F 17 0 0 -
+R B 1991 o - O 20 0 1 -
+R B 1992 o - F 9 0 0 -
+R B 1992 o - O 25 0 1 -
+R B 1993 o - Ja 31 0 0 -
+R B 1993 1995 - O Su>=11 0 1 -
+R B 1994 1995 - F Su>=15 0 0 -
+R B 1996 o - F 11 0 0 -
+R B 1996 o - O 6 0 1 -
+R B 1997 o - F 16 0 0 -
+R B 1997 o - O 6 0 1 -
+R B 1998 o - Mar 1 0 0 -
+R B 1998 o - O 11 0 1 -
+R B 1999 o - F 21 0 0 -
+R B 1999 o - O 3 0 1 -
+R B 2000 o - F 27 0 0 -
+R B 2000 2001 - O Su>=8 0 1 -
+R B 2001 2006 - F Su>=15 0 0 -
+R B 2002 o - N 3 0 1 -
+R B 2003 o - O 19 0 1 -
+R B 2004 o - N 2 0 1 -
+R B 2005 o - O 16 0 1 -
+R B 2006 o - N 5 0 1 -
+R B 2007 o - F 25 0 0 -
+R B 2007 o - O Su>=8 0 1 -
+R B 2008 2017 - O Su>=15 0 1 -
+R B 2008 2011 - F Su>=15 0 0 -
+R B 2012 o - F Su>=22 0 0 -
+R B 2013 2014 - F Su>=15 0 0 -
+R B 2015 o - F Su>=22 0 0 -
+R B 2016 2019 - F Su>=15 0 0 -
+R B 2018 o - N Su>=1 0 1 -
+Z America/Noronha -2:9:40 - LMT 1914
+-2 B -02/-01 1990 S 17
+-2 - -02 1999 S 30
+-2 B -02/-01 2000 O 15
+-2 - -02 2001 S 13
+-2 B -02/-01 2002 O
+-2 - -02
+Z America/Belem -3:13:56 - LMT 1914
+-3 B -03/-02 1988 S 12
+-3 - -03
+Z America/Santarem -3:38:48 - LMT 1914
+-4 B -04/-03 1988 S 12
+-4 - -04 2008 Jun 24
+-3 - -03
+Z America/Fortaleza -2:34 - LMT 1914
+-3 B -03/-02 1990 S 17
+-3 - -03 1999 S 30
+-3 B -03/-02 2000 O 22
+-3 - -03 2001 S 13
+-3 B -03/-02 2002 O
+-3 - -03
+Z America/Recife -2:19:36 - LMT 1914
+-3 B -03/-02 1990 S 17
+-3 - -03 1999 S 30
+-3 B -03/-02 2000 O 15
+-3 - -03 2001 S 13
+-3 B -03/-02 2002 O
+-3 - -03
+Z America/Araguaina -3:12:48 - LMT 1914
+-3 B -03/-02 1990 S 17
+-3 - -03 1995 S 14
+-3 B -03/-02 2003 S 24
+-3 - -03 2012 O 21
+-3 B -03/-02 2013 S
+-3 - -03
+Z America/Maceio -2:22:52 - LMT 1914
+-3 B -03/-02 1990 S 17
+-3 - -03 1995 O 13
+-3 B -03/-02 1996 S 4
+-3 - -03 1999 S 30
+-3 B -03/-02 2000 O 22
+-3 - -03 2001 S 13
+-3 B -03/-02 2002 O
+-3 - -03
+Z America/Bahia -2:34:4 - LMT 1914
+-3 B -03/-02 2003 S 24
+-3 - -03 2011 O 16
+-3 B -03/-02 2012 O 21
+-3 - -03
+Z America/Sao_Paulo -3:6:28 - LMT 1914
+-3 B -03/-02 1963 O 23
+-3 1 -02 1964
+-3 B -03/-02
+Z America/Campo_Grande -3:38:28 - LMT 1914
+-4 B -04/-03
+Z America/Cuiaba -3:44:20 - LMT 1914
+-4 B -04/-03 2003 S 24
+-4 - -04 2004 O
+-4 B -04/-03
+Z America/Porto_Velho -4:15:36 - LMT 1914
+-4 B -04/-03 1988 S 12
+-4 - -04
+Z America/Boa_Vista -4:2:40 - LMT 1914
+-4 B -04/-03 1988 S 12
+-4 - -04 1999 S 30
+-4 B -04/-03 2000 O 15
+-4 - -04
+Z America/Manaus -4:0:4 - LMT 1914
+-4 B -04/-03 1988 S 12
+-4 - -04 1993 S 28
+-4 B -04/-03 1994 S 22
+-4 - -04
+Z America/Eirunepe -4:39:28 - LMT 1914
+-5 B -05/-04 1988 S 12
+-5 - -05 1993 S 28
+-5 B -05/-04 1994 S 22
+-5 - -05 2008 Jun 24
+-4 - -04 2013 N 10
+-5 - -05
+Z America/Rio_Branco -4:31:12 - LMT 1914
+-5 B -05/-04 1988 S 12
+-5 - -05 2008 Jun 24
+-4 - -04 2013 N 10
+-5 - -05
+R x 1927 1931 - S 1 0 1 -
+R x 1928 1932 - Ap 1 0 0 -
+R x 1968 o - N 3 4u 1 -
+R x 1969 o - Mar 30 3u 0 -
+R x 1969 o - N 23 4u 1 -
+R x 1970 o - Mar 29 3u 0 -
+R x 1971 o - Mar 14 3u 0 -
+R x 1970 1972 - O Su>=9 4u 1 -
+R x 1972 1986 - Mar Su>=9 3u 0 -
+R x 1973 o - S 30 4u 1 -
+R x 1974 1987 - O Su>=9 4u 1 -
+R x 1987 o - Ap 12 3u 0 -
+R x 1988 1990 - Mar Su>=9 3u 0 -
+R x 1988 1989 - O Su>=9 4u 1 -
+R x 1990 o - S 16 4u 1 -
+R x 1991 1996 - Mar Su>=9 3u 0 -
+R x 1991 1997 - O Su>=9 4u 1 -
+R x 1997 o - Mar 30 3u 0 -
+R x 1998 o - Mar Su>=9 3u 0 -
+R x 1998 o - S 27 4u 1 -
+R x 1999 o - Ap 4 3u 0 -
+R x 1999 2010 - O Su>=9 4u 1 -
+R x 2000 2007 - Mar Su>=9 3u 0 -
+R x 2008 o - Mar 30 3u 0 -
+R x 2009 o - Mar Su>=9 3u 0 -
+R x 2010 o - Ap Su>=1 3u 0 -
+R x 2011 o - May Su>=2 3u 0 -
+R x 2011 o - Au Su>=16 4u 1 -
+R x 2012 2014 - Ap Su>=23 3u 0 -
+R x 2012 2014 - S Su>=2 4u 1 -
+R x 2016 2018 - May Su>=9 3u 0 -
+R x 2016 2018 - Au Su>=9 4u 1 -
+R x 2019 ma - Ap Su>=2 3u 0 -
+R x 2019 ma - S Su>=2 4u 1 -
+Z America/Santiago -4:42:46 - LMT 1890
+-4:42:46 - SMT 1910 Ja 10
+-5 - -05 1916 Jul
+-4:42:46 - SMT 1918 S 10
+-4 - -04 1919 Jul
+-4:42:46 - SMT 1927 S
+-5 x -05/-04 1932 S
+-4 - -04 1942 Jun
+-5 - -05 1942 Au
+-4 - -04 1946 Jul 15
+-4 1 -03 1946 S
+-4 - -04 1947 Ap
+-5 - -05 1947 May 21 23
+-4 x -04/-03
+Z America/Punta_Arenas -4:43:40 - LMT 1890
+-4:42:46 - SMT 1910 Ja 10
+-5 - -05 1916 Jul
+-4:42:46 - SMT 1918 S 10
+-4 - -04 1919 Jul
+-4:42:46 - SMT 1927 S
+-5 x -05/-04 1932 S
+-4 - -04 1942 Jun
+-5 - -05 1942 Au
+-4 - -04 1947 Ap
+-5 - -05 1947 May 21 23
+-4 x -04/-03 2016 D 4
+-3 - -03
+Z Pacific/Easter -7:17:28 - LMT 1890
+-7:17:28 - EMT 1932 S
+-7 x -07/-06 1982 Mar 14 3u
+-6 x -06/-05
+Z Antarctica/Palmer 0 - -00 1965
+-4 A -04/-03 1969 O 5
+-3 A -03/-02 1982 May
+-4 x -04/-03 2016 D 4
+-3 - -03
+R CO 1992 o - May 3 0 1 -
+R CO 1993 o - Ap 4 0 0 -
+Z America/Bogota -4:56:16 - LMT 1884 Mar 13
+-4:56:16 - BMT 1914 N 23
+-5 CO -05/-04
+R EC 1992 o - N 28 0 1 -
+R EC 1993 o - F 5 0 0 -
+Z America/Guayaquil -5:19:20 - LMT 1890
+-5:14 - QMT 1931
+-5 EC -05/-04
+Z Pacific/Galapagos -5:58:24 - LMT 1931
+-5 - -05 1986
+-6 EC -06/-05
+R FK 1937 1938 - S lastSu 0 1 -
+R FK 1938 1942 - Mar Su>=19 0 0 -
+R FK 1939 o - O 1 0 1 -
+R FK 1940 1942 - S lastSu 0 1 -
+R FK 1943 o - Ja 1 0 0 -
+R FK 1983 o - S lastSu 0 1 -
+R FK 1984 1985 - Ap lastSu 0 0 -
+R FK 1984 o - S 16 0 1 -
+R FK 1985 2000 - S Su>=9 0 1 -
+R FK 1986 2000 - Ap Su>=16 0 0 -
+R FK 2001 2010 - Ap Su>=15 2 0 -
+R FK 2001 2010 - S Su>=1 2 1 -
+Z Atlantic/Stanley -3:51:24 - LMT 1890
+-3:51:24 - SMT 1912 Mar 12
+-4 FK -04/-03 1983 May
+-3 FK -03/-02 1985 S 15
+-4 FK -04/-03 2010 S 5 2
+-3 - -03
+Z America/Cayenne -3:29:20 - LMT 1911 Jul
+-4 - -04 1967 O
+-3 - -03
+Z America/Guyana -3:52:39 - LMT 1911 Au
+-4 - -04 1915 Mar
+-3:45 - -0345 1975 Au
+-3 - -03 1992 Mar 29 1
+-4 - -04
+R y 1975 1988 - O 1 0 1 -
+R y 1975 1978 - Mar 1 0 0 -
+R y 1979 1991 - Ap 1 0 0 -
+R y 1989 o - O 22 0 1 -
+R y 1990 o - O 1 0 1 -
+R y 1991 o - O 6 0 1 -
+R y 1992 o - Mar 1 0 0 -
+R y 1992 o - O 5 0 1 -
+R y 1993 o - Mar 31 0 0 -
+R y 1993 1995 - O 1 0 1 -
+R y 1994 1995 - F lastSu 0 0 -
+R y 1996 o - Mar 1 0 0 -
+R y 1996 2001 - O Su>=1 0 1 -
+R y 1997 o - F lastSu 0 0 -
+R y 1998 2001 - Mar Su>=1 0 0 -
+R y 2002 2004 - Ap Su>=1 0 0 -
+R y 2002 2003 - S Su>=1 0 1 -
+R y 2004 2009 - O Su>=15 0 1 -
+R y 2005 2009 - Mar Su>=8 0 0 -
+R y 2010 ma - O Su>=1 0 1 -
+R y 2010 2012 - Ap Su>=8 0 0 -
+R y 2013 ma - Mar Su>=22 0 0 -
+Z America/Asuncion -3:50:40 - LMT 1890
+-3:50:40 - AMT 1931 O 10
+-4 - -04 1972 O
+-3 - -03 1974 Ap
+-4 y -04/-03
+R PE 1938 o - Ja 1 0 1 -
+R PE 1938 o - Ap 1 0 0 -
+R PE 1938 1939 - S lastSu 0 1 -
+R PE 1939 1940 - Mar Su>=24 0 0 -
+R PE 1986 1987 - Ja 1 0 1 -
+R PE 1986 1987 - Ap 1 0 0 -
+R PE 1990 o - Ja 1 0 1 -
+R PE 1990 o - Ap 1 0 0 -
+R PE 1994 o - Ja 1 0 1 -
+R PE 1994 o - Ap 1 0 0 -
+Z America/Lima -5:8:12 - LMT 1890
+-5:8:36 - LMT 1908 Jul 28
+-5 PE -05/-04
+Z Atlantic/South_Georgia -2:26:8 - LMT 1890
+-2 - -02
+Z America/Paramaribo -3:40:40 - LMT 1911
+-3:40:52 - PMT 1935
+-3:40:36 - PMT 1945 O
+-3:30 - -0330 1984 O
+-3 - -03
+R U 1923 1925 - O 1 0 0:30 -
+R U 1924 1926 - Ap 1 0 0 -
+R U 1933 1938 - O lastSu 0 0:30 -
+R U 1934 1941 - Mar lastSa 24 0 -
+R U 1939 o - O 1 0 0:30 -
+R U 1940 o - O 27 0 0:30 -
+R U 1941 o - Au 1 0 0:30 -
+R U 1942 o - D 14 0 0:30 -
+R U 1943 o - Mar 14 0 0 -
+R U 1959 o - May 24 0 0:30 -
+R U 1959 o - N 15 0 0 -
+R U 1960 o - Ja 17 0 1 -
+R U 1960 o - Mar 6 0 0 -
+R U 1965 o - Ap 4 0 1 -
+R U 1965 o - S 26 0 0 -
+R U 1968 o - May 27 0 0:30 -
+R U 1968 o - D 1 0 0 -
+R U 1970 o - Ap 25 0 1 -
+R U 1970 o - Jun 14 0 0 -
+R U 1972 o - Ap 23 0 1 -
+R U 1972 o - Jul 16 0 0 -
+R U 1974 o - Ja 13 0 1:30 -
+R U 1974 o - Mar 10 0 0:30 -
+R U 1974 o - S 1 0 0 -
+R U 1974 o - D 22 0 1 -
+R U 1975 o - Mar 30 0 0 -
+R U 1976 o - D 19 0 1 -
+R U 1977 o - Mar 6 0 0 -
+R U 1977 o - D 4 0 1 -
+R U 1978 1979 - Mar Su>=1 0 0 -
+R U 1978 o - D 17 0 1 -
+R U 1979 o - Ap 29 0 1 -
+R U 1980 o - Mar 16 0 0 -
+R U 1987 o - D 14 0 1 -
+R U 1988 o - F 28 0 0 -
+R U 1988 o - D 11 0 1 -
+R U 1989 o - Mar 5 0 0 -
+R U 1989 o - O 29 0 1 -
+R U 1990 o - F 25 0 0 -
+R U 1990 1991 - O Su>=21 0 1 -
+R U 1991 1992 - Mar Su>=1 0 0 -
+R U 1992 o - O 18 0 1 -
+R U 1993 o - F 28 0 0 -
+R U 2004 o - S 19 0 1 -
+R U 2005 o - Mar 27 2 0 -
+R U 2005 o - O 9 2 1 -
+R U 2006 2015 - Mar Su>=8 2 0 -
+R U 2006 2014 - O Su>=1 2 1 -
+Z America/Montevideo -3:44:51 - LMT 1908 Jun 10
+-3:44:51 - MMT 1920 May
+-4 - -04 1923 O
+-3:30 U -0330/-03 1942 D 14
+-3 U -03/-0230 1960
+-3 U -03/-02 1968
+-3 U -03/-0230 1970
+-3 U -03/-02 1974
+-3 U -03/-0130 1974 Mar 10
+-3 U -03/-0230 1974 D 22
+-3 U -03/-02
+Z America/Caracas -4:27:44 - LMT 1890
+-4:27:40 - CMT 1912 F 12
+-4:30 - -0430 1965
+-4 - -04 2007 D 9 3
+-4:30 - -0430 2016 May 1 2:30
+-4 - -04
+Z Etc/GMT 0 - GMT
+Z Etc/UTC 0 - UTC
+L Etc/GMT GMT
+L Etc/UTC Etc/Universal
+L Etc/UTC Etc/Zulu
+L Etc/GMT Etc/Greenwich
+L Etc/GMT Etc/GMT-0
+L Etc/GMT Etc/GMT+0
+L Etc/GMT Etc/GMT0
+Z Etc/GMT-14 14 - +14
+Z Etc/GMT-13 13 - +13
+Z Etc/GMT-12 12 - +12
+Z Etc/GMT-11 11 - +11
+Z Etc/GMT-10 10 - +10
+Z Etc/GMT-9 9 - +09
+Z Etc/GMT-8 8 - +08
+Z Etc/GMT-7 7 - +07
+Z Etc/GMT-6 6 - +06
+Z Etc/GMT-5 5 - +05
+Z Etc/GMT-4 4 - +04
+Z Etc/GMT-3 3 - +03
+Z Etc/GMT-2 2 - +02
+Z Etc/GMT-1 1 - +01
+Z Etc/GMT+1 -1 - -01
+Z Etc/GMT+2 -2 - -02
+Z Etc/GMT+3 -3 - -03
+Z Etc/GMT+4 -4 - -04
+Z Etc/GMT+5 -5 - -05
+Z Etc/GMT+6 -6 - -06
+Z Etc/GMT+7 -7 - -07
+Z Etc/GMT+8 -8 - -08
+Z Etc/GMT+9 -9 - -09
+Z Etc/GMT+10 -10 - -10
+Z Etc/GMT+11 -11 - -11
+Z Etc/GMT+12 -12 - -12
+Z Factory 0 - -00
+L Africa/Nairobi Africa/Asmera
+L Africa/Abidjan Africa/Timbuktu
+L America/Argentina/Catamarca America/Argentina/ComodRivadavia
+L America/Adak America/Atka
+L America/Argentina/Buenos_Aires America/Buenos_Aires
+L America/Argentina/Catamarca America/Catamarca
+L America/Panama America/Coral_Harbour
+L America/Argentina/Cordoba America/Cordoba
+L America/Tijuana America/Ensenada
+L America/Indiana/Indianapolis America/Fort_Wayne
+L America/Nuuk America/Godthab
+L America/Indiana/Indianapolis America/Indianapolis
+L America/Argentina/Jujuy America/Jujuy
+L America/Indiana/Knox America/Knox_IN
+L America/Kentucky/Louisville America/Louisville
+L America/Argentina/Mendoza America/Mendoza
+L America/Toronto America/Montreal
+L America/Rio_Branco America/Porto_Acre
+L America/Argentina/Cordoba America/Rosario
+L America/Tijuana America/Santa_Isabel
+L America/Denver America/Shiprock
+L America/Puerto_Rico America/Virgin
+L Pacific/Auckland Antarctica/South_Pole
+L Asia/Ashgabat Asia/Ashkhabad
+L Asia/Kolkata Asia/Calcutta
+L Asia/Shanghai Asia/Chongqing
+L Asia/Shanghai Asia/Chungking
+L Asia/Dhaka Asia/Dacca
+L Asia/Shanghai Asia/Harbin
+L Asia/Urumqi Asia/Kashgar
+L Asia/Kathmandu Asia/Katmandu
+L Asia/Macau Asia/Macao
+L Asia/Yangon Asia/Rangoon
+L Asia/Ho_Chi_Minh Asia/Saigon
+L Asia/Jerusalem Asia/Tel_Aviv
+L Asia/Thimphu Asia/Thimbu
+L Asia/Makassar Asia/Ujung_Pandang
+L Asia/Ulaanbaatar Asia/Ulan_Bator
+L Atlantic/Faroe Atlantic/Faeroe
+L Europe/Oslo Atlantic/Jan_Mayen
+L Australia/Sydney Australia/ACT
+L Australia/Sydney Australia/Canberra
+L Australia/Hobart Australia/Currie
+L Australia/Lord_Howe Australia/LHI
+L Australia/Sydney Australia/NSW
+L Australia/Darwin Australia/North
+L Australia/Brisbane Australia/Queensland
+L Australia/Adelaide Australia/South
+L Australia/Hobart Australia/Tasmania
+L Australia/Melbourne Australia/Victoria
+L Australia/Perth Australia/West
+L Australia/Broken_Hill Australia/Yancowinna
+L America/Rio_Branco Brazil/Acre
+L America/Noronha Brazil/DeNoronha
+L America/Sao_Paulo Brazil/East
+L America/Manaus Brazil/West
+L America/Halifax Canada/Atlantic
+L America/Winnipeg Canada/Central
+L America/Toronto Canada/Eastern
+L America/Edmonton Canada/Mountain
+L America/St_Johns Canada/Newfoundland
+L America/Vancouver Canada/Pacific
+L America/Regina Canada/Saskatchewan
+L America/Whitehorse Canada/Yukon
+L America/Santiago Chile/Continental
+L Pacific/Easter Chile/EasterIsland
+L America/Havana Cuba
+L Africa/Cairo Egypt
+L Europe/Dublin Eire
+L Etc/UTC Etc/UCT
+L Europe/London Europe/Belfast
+L Europe/Chisinau Europe/Tiraspol
+L Europe/London GB
+L Europe/London GB-Eire
+L Etc/GMT GMT+0
+L Etc/GMT GMT-0
+L Etc/GMT GMT0
+L Etc/GMT Greenwich
+L Asia/Hong_Kong Hongkong
+L Atlantic/Reykjavik Iceland
+L Asia/Tehran Iran
+L Asia/Jerusalem Israel
+L America/Jamaica Jamaica
+L Asia/Tokyo Japan
+L Pacific/Kwajalein Kwajalein
+L Africa/Tripoli Libya
+L America/Tijuana Mexico/BajaNorte
+L America/Mazatlan Mexico/BajaSur
+L America/Mexico_City Mexico/General
+L Pacific/Auckland NZ
+L Pacific/Chatham NZ-CHAT
+L America/Denver Navajo
+L Asia/Shanghai PRC
+L Pacific/Kanton Pacific/Enderbury
+L Pacific/Honolulu Pacific/Johnston
+L Pacific/Pohnpei Pacific/Ponape
+L Pacific/Pago_Pago Pacific/Samoa
+L Pacific/Chuuk Pacific/Truk
+L Pacific/Chuuk Pacific/Yap
+L Europe/Warsaw Poland
+L Europe/Lisbon Portugal
+L Asia/Taipei ROC
+L Asia/Seoul ROK
+L Asia/Singapore Singapore
+L Europe/Istanbul Turkey
+L Etc/UTC UCT
+L America/Anchorage US/Alaska
+L America/Adak US/Aleutian
+L America/Phoenix US/Arizona
+L America/Chicago US/Central
+L America/Indiana/Indianapolis US/East-Indiana
+L America/New_York US/Eastern
+L Pacific/Honolulu US/Hawaii
+L America/Indiana/Knox US/Indiana-Starke
+L America/Detroit US/Michigan
+L America/Denver US/Mountain
+L America/Los_Angeles US/Pacific
+L Pacific/Pago_Pago US/Samoa
+L Etc/UTC UTC
+L Etc/UTC Universal
+L Europe/Moscow W-SU
+L Etc/UTC Zulu
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/zone.tab b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/zone.tab
new file mode 100644
index 0000000000000000000000000000000000000000..086458fb20739bda8be479089f05271c20472ed8
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/zone.tab
@@ -0,0 +1,454 @@
+# tzdb timezone descriptions (deprecated version)
+#
+# This file is in the public domain, so clarified as of
+# 2009-05-17 by Arthur David Olson.
+#
+# From Paul Eggert (2021-09-20):
+# This file is intended as a backward-compatibility aid for older programs.
+# New programs should use zone1970.tab. This file is like zone1970.tab (see
+# zone1970.tab's comments), but with the following additional restrictions:
+#
+# 1. This file contains only ASCII characters.
+# 2. The first data column contains exactly one country code.
+#
+# Because of (2), each row stands for an area that is the intersection
+# of a region identified by a country code and of a timezone where civil
+# clocks have agreed since 1970; this is a narrower definition than
+# that of zone1970.tab.
+#
+# Unlike zone1970.tab, a row's third column can be a Link from
+# 'backward' instead of a Zone.
+#
+# This table is intended as an aid for users, to help them select timezones
+# appropriate for their practical needs. It is not intended to take or
+# endorse any position on legal or territorial claims.
+#
+#country-
+#code coordinates TZ comments
+AD +4230+00131 Europe/Andorra
+AE +2518+05518 Asia/Dubai
+AF +3431+06912 Asia/Kabul
+AG +1703-06148 America/Antigua
+AI +1812-06304 America/Anguilla
+AL +4120+01950 Europe/Tirane
+AM +4011+04430 Asia/Yerevan
+AO -0848+01314 Africa/Luanda
+AQ -7750+16636 Antarctica/McMurdo New Zealand time - McMurdo, South Pole
+AQ -6617+11031 Antarctica/Casey Casey
+AQ -6835+07758 Antarctica/Davis Davis
+AQ -6640+14001 Antarctica/DumontDUrville Dumont-d'Urville
+AQ -6736+06253 Antarctica/Mawson Mawson
+AQ -6448-06406 Antarctica/Palmer Palmer
+AQ -6734-06808 Antarctica/Rothera Rothera
+AQ -690022+0393524 Antarctica/Syowa Syowa
+AQ -720041+0023206 Antarctica/Troll Troll
+AQ -7824+10654 Antarctica/Vostok Vostok
+AR -3436-05827 America/Argentina/Buenos_Aires Buenos Aires (BA, CF)
+AR -3124-06411 America/Argentina/Cordoba Argentina (most areas: CB, CC, CN, ER, FM, MN, SE, SF)
+AR -2447-06525 America/Argentina/Salta Salta (SA, LP, NQ, RN)
+AR -2411-06518 America/Argentina/Jujuy Jujuy (JY)
+AR -2649-06513 America/Argentina/Tucuman Tucuman (TM)
+AR -2828-06547 America/Argentina/Catamarca Catamarca (CT); Chubut (CH)
+AR -2926-06651 America/Argentina/La_Rioja La Rioja (LR)
+AR -3132-06831 America/Argentina/San_Juan San Juan (SJ)
+AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ)
+AR -3319-06621 America/Argentina/San_Luis San Luis (SL)
+AR -5138-06913 America/Argentina/Rio_Gallegos Santa Cruz (SC)
+AR -5448-06818 America/Argentina/Ushuaia Tierra del Fuego (TF)
+AS -1416-17042 Pacific/Pago_Pago
+AT +4813+01620 Europe/Vienna
+AU -3133+15905 Australia/Lord_Howe Lord Howe Island
+AU -5430+15857 Antarctica/Macquarie Macquarie Island
+AU -4253+14719 Australia/Hobart Tasmania
+AU -3749+14458 Australia/Melbourne Victoria
+AU -3352+15113 Australia/Sydney New South Wales (most areas)
+AU -3157+14127 Australia/Broken_Hill New South Wales (Yancowinna)
+AU -2728+15302 Australia/Brisbane Queensland (most areas)
+AU -2016+14900 Australia/Lindeman Queensland (Whitsunday Islands)
+AU -3455+13835 Australia/Adelaide South Australia
+AU -1228+13050 Australia/Darwin Northern Territory
+AU -3157+11551 Australia/Perth Western Australia (most areas)
+AU -3143+12852 Australia/Eucla Western Australia (Eucla)
+AW +1230-06958 America/Aruba
+AX +6006+01957 Europe/Mariehamn
+AZ +4023+04951 Asia/Baku
+BA +4352+01825 Europe/Sarajevo
+BB +1306-05937 America/Barbados
+BD +2343+09025 Asia/Dhaka
+BE +5050+00420 Europe/Brussels
+BF +1222-00131 Africa/Ouagadougou
+BG +4241+02319 Europe/Sofia
+BH +2623+05035 Asia/Bahrain
+BI -0323+02922 Africa/Bujumbura
+BJ +0629+00237 Africa/Porto-Novo
+BL +1753-06251 America/St_Barthelemy
+BM +3217-06446 Atlantic/Bermuda
+BN +0456+11455 Asia/Brunei
+BO -1630-06809 America/La_Paz
+BQ +120903-0681636 America/Kralendijk
+BR -0351-03225 America/Noronha Atlantic islands
+BR -0127-04829 America/Belem Para (east); Amapa
+BR -0343-03830 America/Fortaleza Brazil (northeast: MA, PI, CE, RN, PB)
+BR -0803-03454 America/Recife Pernambuco
+BR -0712-04812 America/Araguaina Tocantins
+BR -0940-03543 America/Maceio Alagoas, Sergipe
+BR -1259-03831 America/Bahia Bahia
+BR -2332-04637 America/Sao_Paulo Brazil (southeast: GO, DF, MG, ES, RJ, SP, PR, SC, RS)
+BR -2027-05437 America/Campo_Grande Mato Grosso do Sul
+BR -1535-05605 America/Cuiaba Mato Grosso
+BR -0226-05452 America/Santarem Para (west)
+BR -0846-06354 America/Porto_Velho Rondonia
+BR +0249-06040 America/Boa_Vista Roraima
+BR -0308-06001 America/Manaus Amazonas (east)
+BR -0640-06952 America/Eirunepe Amazonas (west)
+BR -0958-06748 America/Rio_Branco Acre
+BS +2505-07721 America/Nassau
+BT +2728+08939 Asia/Thimphu
+BW -2439+02555 Africa/Gaborone
+BY +5354+02734 Europe/Minsk
+BZ +1730-08812 America/Belize
+CA +4734-05243 America/St_Johns Newfoundland; Labrador (southeast)
+CA +4439-06336 America/Halifax Atlantic - NS (most areas); PE
+CA +4612-05957 America/Glace_Bay Atlantic - NS (Cape Breton)
+CA +4606-06447 America/Moncton Atlantic - New Brunswick
+CA +5320-06025 America/Goose_Bay Atlantic - Labrador (most areas)
+CA +5125-05707 America/Blanc-Sablon AST - QC (Lower North Shore)
+CA +4339-07923 America/Toronto Eastern - ON, QC (most areas)
+CA +4901-08816 America/Nipigon Eastern - ON, QC (no DST 1967-73)
+CA +4823-08915 America/Thunder_Bay Eastern - ON (Thunder Bay)
+CA +6344-06828 America/Iqaluit Eastern - NU (most east areas)
+CA +6608-06544 America/Pangnirtung Eastern - NU (Pangnirtung)
+CA +484531-0913718 America/Atikokan EST - ON (Atikokan); NU (Coral H)
+CA +4953-09709 America/Winnipeg Central - ON (west); Manitoba
+CA +4843-09434 America/Rainy_River Central - ON (Rainy R, Ft Frances)
+CA +744144-0944945 America/Resolute Central - NU (Resolute)
+CA +624900-0920459 America/Rankin_Inlet Central - NU (central)
+CA +5024-10439 America/Regina CST - SK (most areas)
+CA +5017-10750 America/Swift_Current CST - SK (midwest)
+CA +5333-11328 America/Edmonton Mountain - AB; BC (E); SK (W)
+CA +690650-1050310 America/Cambridge_Bay Mountain - NU (west)
+CA +6227-11421 America/Yellowknife Mountain - NT (central)
+CA +682059-1334300 America/Inuvik Mountain - NT (west)
+CA +4906-11631 America/Creston MST - BC (Creston)
+CA +5946-12014 America/Dawson_Creek MST - BC (Dawson Cr, Ft St John)
+CA +5848-12242 America/Fort_Nelson MST - BC (Ft Nelson)
+CA +6043-13503 America/Whitehorse MST - Yukon (east)
+CA +6404-13925 America/Dawson MST - Yukon (west)
+CA +4916-12307 America/Vancouver Pacific - BC (most areas)
+CC -1210+09655 Indian/Cocos
+CD -0418+01518 Africa/Kinshasa Dem. Rep. of Congo (west)
+CD -1140+02728 Africa/Lubumbashi Dem. Rep. of Congo (east)
+CF +0422+01835 Africa/Bangui
+CG -0416+01517 Africa/Brazzaville
+CH +4723+00832 Europe/Zurich
+CI +0519-00402 Africa/Abidjan
+CK -2114-15946 Pacific/Rarotonga
+CL -3327-07040 America/Santiago Chile (most areas)
+CL -5309-07055 America/Punta_Arenas Region of Magallanes
+CL -2709-10926 Pacific/Easter Easter Island
+CM +0403+00942 Africa/Douala
+CN +3114+12128 Asia/Shanghai Beijing Time
+CN +4348+08735 Asia/Urumqi Xinjiang Time
+CO +0436-07405 America/Bogota
+CR +0956-08405 America/Costa_Rica
+CU +2308-08222 America/Havana
+CV +1455-02331 Atlantic/Cape_Verde
+CW +1211-06900 America/Curacao
+CX -1025+10543 Indian/Christmas
+CY +3510+03322 Asia/Nicosia Cyprus (most areas)
+CY +3507+03357 Asia/Famagusta Northern Cyprus
+CZ +5005+01426 Europe/Prague
+DE +5230+01322 Europe/Berlin Germany (most areas)
+DE +4742+00841 Europe/Busingen Busingen
+DJ +1136+04309 Africa/Djibouti
+DK +5540+01235 Europe/Copenhagen
+DM +1518-06124 America/Dominica
+DO +1828-06954 America/Santo_Domingo
+DZ +3647+00303 Africa/Algiers
+EC -0210-07950 America/Guayaquil Ecuador (mainland)
+EC -0054-08936 Pacific/Galapagos Galapagos Islands
+EE +5925+02445 Europe/Tallinn
+EG +3003+03115 Africa/Cairo
+EH +2709-01312 Africa/El_Aaiun
+ER +1520+03853 Africa/Asmara
+ES +4024-00341 Europe/Madrid Spain (mainland)
+ES +3553-00519 Africa/Ceuta Ceuta, Melilla
+ES +2806-01524 Atlantic/Canary Canary Islands
+ET +0902+03842 Africa/Addis_Ababa
+FI +6010+02458 Europe/Helsinki
+FJ -1808+17825 Pacific/Fiji
+FK -5142-05751 Atlantic/Stanley
+FM +0725+15147 Pacific/Chuuk Chuuk/Truk, Yap
+FM +0658+15813 Pacific/Pohnpei Pohnpei/Ponape
+FM +0519+16259 Pacific/Kosrae Kosrae
+FO +6201-00646 Atlantic/Faroe
+FR +4852+00220 Europe/Paris
+GA +0023+00927 Africa/Libreville
+GB +513030-0000731 Europe/London
+GD +1203-06145 America/Grenada
+GE +4143+04449 Asia/Tbilisi
+GF +0456-05220 America/Cayenne
+GG +492717-0023210 Europe/Guernsey
+GH +0533-00013 Africa/Accra
+GI +3608-00521 Europe/Gibraltar
+GL +6411-05144 America/Nuuk Greenland (most areas)
+GL +7646-01840 America/Danmarkshavn National Park (east coast)
+GL +7029-02158 America/Scoresbysund Scoresbysund/Ittoqqortoormiit
+GL +7634-06847 America/Thule Thule/Pituffik
+GM +1328-01639 Africa/Banjul
+GN +0931-01343 Africa/Conakry
+GP +1614-06132 America/Guadeloupe
+GQ +0345+00847 Africa/Malabo
+GR +3758+02343 Europe/Athens
+GS -5416-03632 Atlantic/South_Georgia
+GT +1438-09031 America/Guatemala
+GU +1328+14445 Pacific/Guam
+GW +1151-01535 Africa/Bissau
+GY +0648-05810 America/Guyana
+HK +2217+11409 Asia/Hong_Kong
+HN +1406-08713 America/Tegucigalpa
+HR +4548+01558 Europe/Zagreb
+HT +1832-07220 America/Port-au-Prince
+HU +4730+01905 Europe/Budapest
+ID -0610+10648 Asia/Jakarta Java, Sumatra
+ID -0002+10920 Asia/Pontianak Borneo (west, central)
+ID -0507+11924 Asia/Makassar Borneo (east, south); Sulawesi/Celebes, Bali, Nusa Tengarra; Timor (west)
+ID -0232+14042 Asia/Jayapura New Guinea (West Papua / Irian Jaya); Malukus/Moluccas
+IE +5320-00615 Europe/Dublin
+IL +314650+0351326 Asia/Jerusalem
+IM +5409-00428 Europe/Isle_of_Man
+IN +2232+08822 Asia/Kolkata
+IO -0720+07225 Indian/Chagos
+IQ +3321+04425 Asia/Baghdad
+IR +3540+05126 Asia/Tehran
+IS +6409-02151 Atlantic/Reykjavik
+IT +4154+01229 Europe/Rome
+JE +491101-0020624 Europe/Jersey
+JM +175805-0764736 America/Jamaica
+JO +3157+03556 Asia/Amman
+JP +353916+1394441 Asia/Tokyo
+KE -0117+03649 Africa/Nairobi
+KG +4254+07436 Asia/Bishkek
+KH +1133+10455 Asia/Phnom_Penh
+KI +0125+17300 Pacific/Tarawa Gilbert Islands
+KI -0247-17143 Pacific/Kanton Phoenix Islands
+KI +0152-15720 Pacific/Kiritimati Line Islands
+KM -1141+04316 Indian/Comoro
+KN +1718-06243 America/St_Kitts
+KP +3901+12545 Asia/Pyongyang
+KR +3733+12658 Asia/Seoul
+KW +2920+04759 Asia/Kuwait
+KY +1918-08123 America/Cayman
+KZ +4315+07657 Asia/Almaty Kazakhstan (most areas)
+KZ +4448+06528 Asia/Qyzylorda Qyzylorda/Kyzylorda/Kzyl-Orda
+KZ +5312+06337 Asia/Qostanay Qostanay/Kostanay/Kustanay
+KZ +5017+05710 Asia/Aqtobe Aqtobe/Aktobe
+KZ +4431+05016 Asia/Aqtau Mangghystau/Mankistau
+KZ +4707+05156 Asia/Atyrau Atyrau/Atirau/Gur'yev
+KZ +5113+05121 Asia/Oral West Kazakhstan
+LA +1758+10236 Asia/Vientiane
+LB +3353+03530 Asia/Beirut
+LC +1401-06100 America/St_Lucia
+LI +4709+00931 Europe/Vaduz
+LK +0656+07951 Asia/Colombo
+LR +0618-01047 Africa/Monrovia
+LS -2928+02730 Africa/Maseru
+LT +5441+02519 Europe/Vilnius
+LU +4936+00609 Europe/Luxembourg
+LV +5657+02406 Europe/Riga
+LY +3254+01311 Africa/Tripoli
+MA +3339-00735 Africa/Casablanca
+MC +4342+00723 Europe/Monaco
+MD +4700+02850 Europe/Chisinau
+ME +4226+01916 Europe/Podgorica
+MF +1804-06305 America/Marigot
+MG -1855+04731 Indian/Antananarivo
+MH +0709+17112 Pacific/Majuro Marshall Islands (most areas)
+MH +0905+16720 Pacific/Kwajalein Kwajalein
+MK +4159+02126 Europe/Skopje
+ML +1239-00800 Africa/Bamako
+MM +1647+09610 Asia/Yangon
+MN +4755+10653 Asia/Ulaanbaatar Mongolia (most areas)
+MN +4801+09139 Asia/Hovd Bayan-Olgiy, Govi-Altai, Hovd, Uvs, Zavkhan
+MN +4804+11430 Asia/Choibalsan Dornod, Sukhbaatar
+MO +221150+1133230 Asia/Macau
+MP +1512+14545 Pacific/Saipan
+MQ +1436-06105 America/Martinique
+MR +1806-01557 Africa/Nouakchott
+MS +1643-06213 America/Montserrat
+MT +3554+01431 Europe/Malta
+MU -2010+05730 Indian/Mauritius
+MV +0410+07330 Indian/Maldives
+MW -1547+03500 Africa/Blantyre
+MX +1924-09909 America/Mexico_City Central Time
+MX +2105-08646 America/Cancun Eastern Standard Time - Quintana Roo
+MX +2058-08937 America/Merida Central Time - Campeche, Yucatan
+MX +2540-10019 America/Monterrey Central Time - Durango; Coahuila, Nuevo Leon, Tamaulipas (most areas)
+MX +2550-09730 America/Matamoros Central Time US - Coahuila, Nuevo Leon, Tamaulipas (US border)
+MX +2313-10625 America/Mazatlan Mountain Time - Baja California Sur, Nayarit, Sinaloa
+MX +2838-10605 America/Chihuahua Mountain Time - Chihuahua (most areas)
+MX +2934-10425 America/Ojinaga Mountain Time US - Chihuahua (US border)
+MX +2904-11058 America/Hermosillo Mountain Standard Time - Sonora
+MX +3232-11701 America/Tijuana Pacific Time US - Baja California
+MX +2048-10515 America/Bahia_Banderas Central Time - Bahia de Banderas
+MY +0310+10142 Asia/Kuala_Lumpur Malaysia (peninsula)
+MY +0133+11020 Asia/Kuching Sabah, Sarawak
+MZ -2558+03235 Africa/Maputo
+NA -2234+01706 Africa/Windhoek
+NC -2216+16627 Pacific/Noumea
+NE +1331+00207 Africa/Niamey
+NF -2903+16758 Pacific/Norfolk
+NG +0627+00324 Africa/Lagos
+NI +1209-08617 America/Managua
+NL +5222+00454 Europe/Amsterdam
+NO +5955+01045 Europe/Oslo
+NP +2743+08519 Asia/Kathmandu
+NR -0031+16655 Pacific/Nauru
+NU -1901-16955 Pacific/Niue
+NZ -3652+17446 Pacific/Auckland New Zealand (most areas)
+NZ -4357-17633 Pacific/Chatham Chatham Islands
+OM +2336+05835 Asia/Muscat
+PA +0858-07932 America/Panama
+PE -1203-07703 America/Lima
+PF -1732-14934 Pacific/Tahiti Society Islands
+PF -0900-13930 Pacific/Marquesas Marquesas Islands
+PF -2308-13457 Pacific/Gambier Gambier Islands
+PG -0930+14710 Pacific/Port_Moresby Papua New Guinea (most areas)
+PG -0613+15534 Pacific/Bougainville Bougainville
+PH +1435+12100 Asia/Manila
+PK +2452+06703 Asia/Karachi
+PL +5215+02100 Europe/Warsaw
+PM +4703-05620 America/Miquelon
+PN -2504-13005 Pacific/Pitcairn
+PR +182806-0660622 America/Puerto_Rico
+PS +3130+03428 Asia/Gaza Gaza Strip
+PS +313200+0350542 Asia/Hebron West Bank
+PT +3843-00908 Europe/Lisbon Portugal (mainland)
+PT +3238-01654 Atlantic/Madeira Madeira Islands
+PT +3744-02540 Atlantic/Azores Azores
+PW +0720+13429 Pacific/Palau
+PY -2516-05740 America/Asuncion
+QA +2517+05132 Asia/Qatar
+RE -2052+05528 Indian/Reunion
+RO +4426+02606 Europe/Bucharest
+RS +4450+02030 Europe/Belgrade
+RU +5443+02030 Europe/Kaliningrad MSK-01 - Kaliningrad
+RU +554521+0373704 Europe/Moscow MSK+00 - Moscow area
+# The obsolescent zone.tab format cannot represent Europe/Simferopol well.
+# Put it in RU section and list as UA. See "territorial claims" above.
+# Programs should use zone1970.tab instead; see above.
+UA +4457+03406 Europe/Simferopol Crimea
+RU +5836+04939 Europe/Kirov MSK+00 - Kirov
+RU +4844+04425 Europe/Volgograd MSK+00 - Volgograd
+RU +4621+04803 Europe/Astrakhan MSK+01 - Astrakhan
+RU +5134+04602 Europe/Saratov MSK+01 - Saratov
+RU +5420+04824 Europe/Ulyanovsk MSK+01 - Ulyanovsk
+RU +5312+05009 Europe/Samara MSK+01 - Samara, Udmurtia
+RU +5651+06036 Asia/Yekaterinburg MSK+02 - Urals
+RU +5500+07324 Asia/Omsk MSK+03 - Omsk
+RU +5502+08255 Asia/Novosibirsk MSK+04 - Novosibirsk
+RU +5322+08345 Asia/Barnaul MSK+04 - Altai
+RU +5630+08458 Asia/Tomsk MSK+04 - Tomsk
+RU +5345+08707 Asia/Novokuznetsk MSK+04 - Kemerovo
+RU +5601+09250 Asia/Krasnoyarsk MSK+04 - Krasnoyarsk area
+RU +5216+10420 Asia/Irkutsk MSK+05 - Irkutsk, Buryatia
+RU +5203+11328 Asia/Chita MSK+06 - Zabaykalsky
+RU +6200+12940 Asia/Yakutsk MSK+06 - Lena River
+RU +623923+1353314 Asia/Khandyga MSK+06 - Tomponsky, Ust-Maysky
+RU +4310+13156 Asia/Vladivostok MSK+07 - Amur River
+RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky
+RU +5934+15048 Asia/Magadan MSK+08 - Magadan
+RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island
+RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); North Kuril Is
+RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka
+RU +6445+17729 Asia/Anadyr MSK+09 - Bering Sea
+RW -0157+03004 Africa/Kigali
+SA +2438+04643 Asia/Riyadh
+SB -0932+16012 Pacific/Guadalcanal
+SC -0440+05528 Indian/Mahe
+SD +1536+03232 Africa/Khartoum
+SE +5920+01803 Europe/Stockholm
+SG +0117+10351 Asia/Singapore
+SH -1555-00542 Atlantic/St_Helena
+SI +4603+01431 Europe/Ljubljana
+SJ +7800+01600 Arctic/Longyearbyen
+SK +4809+01707 Europe/Bratislava
+SL +0830-01315 Africa/Freetown
+SM +4355+01228 Europe/San_Marino
+SN +1440-01726 Africa/Dakar
+SO +0204+04522 Africa/Mogadishu
+SR +0550-05510 America/Paramaribo
+SS +0451+03137 Africa/Juba
+ST +0020+00644 Africa/Sao_Tome
+SV +1342-08912 America/El_Salvador
+SX +180305-0630250 America/Lower_Princes
+SY +3330+03618 Asia/Damascus
+SZ -2618+03106 Africa/Mbabane
+TC +2128-07108 America/Grand_Turk
+TD +1207+01503 Africa/Ndjamena
+TF -492110+0701303 Indian/Kerguelen
+TG +0608+00113 Africa/Lome
+TH +1345+10031 Asia/Bangkok
+TJ +3835+06848 Asia/Dushanbe
+TK -0922-17114 Pacific/Fakaofo
+TL -0833+12535 Asia/Dili
+TM +3757+05823 Asia/Ashgabat
+TN +3648+01011 Africa/Tunis
+TO -210800-1751200 Pacific/Tongatapu
+TR +4101+02858 Europe/Istanbul
+TT +1039-06131 America/Port_of_Spain
+TV -0831+17913 Pacific/Funafuti
+TW +2503+12130 Asia/Taipei
+TZ -0648+03917 Africa/Dar_es_Salaam
+UA +5026+03031 Europe/Kiev Ukraine (most areas)
+UA +4837+02218 Europe/Uzhgorod Transcarpathia
+UA +4750+03510 Europe/Zaporozhye Zaporozhye and east Lugansk
+UG +0019+03225 Africa/Kampala
+UM +2813-17722 Pacific/Midway Midway Islands
+UM +1917+16637 Pacific/Wake Wake Island
+US +404251-0740023 America/New_York Eastern (most areas)
+US +421953-0830245 America/Detroit Eastern - MI (most areas)
+US +381515-0854534 America/Kentucky/Louisville Eastern - KY (Louisville area)
+US +364947-0845057 America/Kentucky/Monticello Eastern - KY (Wayne)
+US +394606-0860929 America/Indiana/Indianapolis Eastern - IN (most areas)
+US +384038-0873143 America/Indiana/Vincennes Eastern - IN (Da, Du, K, Mn)
+US +410305-0863611 America/Indiana/Winamac Eastern - IN (Pulaski)
+US +382232-0862041 America/Indiana/Marengo Eastern - IN (Crawford)
+US +382931-0871643 America/Indiana/Petersburg Eastern - IN (Pike)
+US +384452-0850402 America/Indiana/Vevay Eastern - IN (Switzerland)
+US +415100-0873900 America/Chicago Central (most areas)
+US +375711-0864541 America/Indiana/Tell_City Central - IN (Perry)
+US +411745-0863730 America/Indiana/Knox Central - IN (Starke)
+US +450628-0873651 America/Menominee Central - MI (Wisconsin border)
+US +470659-1011757 America/North_Dakota/Center Central - ND (Oliver)
+US +465042-1012439 America/North_Dakota/New_Salem Central - ND (Morton rural)
+US +471551-1014640 America/North_Dakota/Beulah Central - ND (Mercer)
+US +394421-1045903 America/Denver Mountain (most areas)
+US +433649-1161209 America/Boise Mountain - ID (south); OR (east)
+US +332654-1120424 America/Phoenix MST - Arizona (except Navajo)
+US +340308-1181434 America/Los_Angeles Pacific
+US +611305-1495401 America/Anchorage Alaska (most areas)
+US +581807-1342511 America/Juneau Alaska - Juneau area
+US +571035-1351807 America/Sitka Alaska - Sitka area
+US +550737-1313435 America/Metlakatla Alaska - Annette Island
+US +593249-1394338 America/Yakutat Alaska - Yakutat
+US +643004-1652423 America/Nome Alaska (west)
+US +515248-1763929 America/Adak Aleutian Islands
+US +211825-1575130 Pacific/Honolulu Hawaii
+UY -345433-0561245 America/Montevideo
+UZ +3940+06648 Asia/Samarkand Uzbekistan (west)
+UZ +4120+06918 Asia/Tashkent Uzbekistan (east)
+VA +415408+0122711 Europe/Vatican
+VC +1309-06114 America/St_Vincent
+VE +1030-06656 America/Caracas
+VG +1827-06437 America/Tortola
+VI +1821-06456 America/St_Thomas
+VN +1045+10640 Asia/Ho_Chi_Minh
+VU -1740+16825 Pacific/Efate
+WF -1318-17610 Pacific/Wallis
+WS -1350-17144 Pacific/Apia
+YE +1245+04512 Asia/Aden
+YT -1247+04514 Indian/Mayotte
+ZA -2615+02800 Africa/Johannesburg
+ZM -1525+02817 Africa/Lusaka
+ZW -1750+03103 Africa/Harare
diff --git a/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/zone1970.tab b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/zone1970.tab
new file mode 100644
index 0000000000000000000000000000000000000000..c614be81f4012cb6a7aa7ef9a3303c83c51a7786
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/pytz/zoneinfo/zone1970.tab
@@ -0,0 +1,374 @@
+# tzdb timezone descriptions
+#
+# This file is in the public domain.
+#
+# From Paul Eggert (2018-06-27):
+# This file contains a table where each row stands for a timezone where
+# civil timestamps have agreed since 1970. Columns are separated by
+# a single tab. Lines beginning with '#' are comments. All text uses
+# UTF-8 encoding. The columns of the table are as follows:
+#
+# 1. The countries that overlap the timezone, as a comma-separated list
+# of ISO 3166 2-character country codes. See the file 'iso3166.tab'.
+# 2. Latitude and longitude of the timezone's principal location
+# in ISO 6709 sign-degrees-minutes-seconds format,
+# either ±DDMM±DDDMM or ±DDMMSS±DDDMMSS,
+# first latitude (+ is north), then longitude (+ is east).
+# 3. Timezone name used in value of TZ environment variable.
+# Please see the theory.html file for how these names are chosen.
+# If multiple timezones overlap a country, each has a row in the
+# table, with each column 1 containing the country code.
+# 4. Comments; present if and only if a country has multiple timezones.
+#
+# If a timezone covers multiple countries, the most-populous city is used,
+# and that country is listed first in column 1; any other countries
+# are listed alphabetically by country code. The table is sorted
+# first by country code, then (if possible) by an order within the
+# country that (1) makes some geographical sense, and (2) puts the
+# most populous timezones first, where that does not contradict (1).
+#
+# This table is intended as an aid for users, to help them select timezones
+# appropriate for their practical needs. It is not intended to take or
+# endorse any position on legal or territorial claims.
+#
+#country-
+#codes coordinates TZ comments
+AD +4230+00131 Europe/Andorra
+AE,OM +2518+05518 Asia/Dubai
+AF +3431+06912 Asia/Kabul
+AL +4120+01950 Europe/Tirane
+AM +4011+04430 Asia/Yerevan
+AQ -6617+11031 Antarctica/Casey Casey
+AQ -6835+07758 Antarctica/Davis Davis
+AQ -6736+06253 Antarctica/Mawson Mawson
+AQ -6448-06406 Antarctica/Palmer Palmer
+AQ -6734-06808 Antarctica/Rothera Rothera
+AQ -720041+0023206 Antarctica/Troll Troll
+AQ -7824+10654 Antarctica/Vostok Vostok
+AR -3436-05827 America/Argentina/Buenos_Aires Buenos Aires (BA, CF)
+AR -3124-06411 America/Argentina/Cordoba Argentina (most areas: CB, CC, CN, ER, FM, MN, SE, SF)
+AR -2447-06525 America/Argentina/Salta Salta (SA, LP, NQ, RN)
+AR -2411-06518 America/Argentina/Jujuy Jujuy (JY)
+AR -2649-06513 America/Argentina/Tucuman Tucumán (TM)
+AR -2828-06547 America/Argentina/Catamarca Catamarca (CT); Chubut (CH)
+AR -2926-06651 America/Argentina/La_Rioja La Rioja (LR)
+AR -3132-06831 America/Argentina/San_Juan San Juan (SJ)
+AR -3253-06849 America/Argentina/Mendoza Mendoza (MZ)
+AR -3319-06621 America/Argentina/San_Luis San Luis (SL)
+AR -5138-06913 America/Argentina/Rio_Gallegos Santa Cruz (SC)
+AR -5448-06818 America/Argentina/Ushuaia Tierra del Fuego (TF)
+AS,UM -1416-17042 Pacific/Pago_Pago Samoa, Midway
+AT +4813+01620 Europe/Vienna
+AU -3133+15905 Australia/Lord_Howe Lord Howe Island
+AU -5430+15857 Antarctica/Macquarie Macquarie Island
+AU -4253+14719 Australia/Hobart Tasmania
+AU -3749+14458 Australia/Melbourne Victoria
+AU -3352+15113 Australia/Sydney New South Wales (most areas)
+AU -3157+14127 Australia/Broken_Hill New South Wales (Yancowinna)
+AU -2728+15302 Australia/Brisbane Queensland (most areas)
+AU -2016+14900 Australia/Lindeman Queensland (Whitsunday Islands)
+AU -3455+13835 Australia/Adelaide South Australia
+AU -1228+13050 Australia/Darwin Northern Territory
+AU -3157+11551 Australia/Perth Western Australia (most areas)
+AU -3143+12852 Australia/Eucla Western Australia (Eucla)
+AZ +4023+04951 Asia/Baku
+BB +1306-05937 America/Barbados
+BD +2343+09025 Asia/Dhaka
+BE +5050+00420 Europe/Brussels
+BG +4241+02319 Europe/Sofia
+BM +3217-06446 Atlantic/Bermuda
+BN +0456+11455 Asia/Brunei
+BO -1630-06809 America/La_Paz
+BR -0351-03225 America/Noronha Atlantic islands
+BR -0127-04829 America/Belem Pará (east); Amapá
+BR -0343-03830 America/Fortaleza Brazil (northeast: MA, PI, CE, RN, PB)
+BR -0803-03454 America/Recife Pernambuco
+BR -0712-04812 America/Araguaina Tocantins
+BR -0940-03543 America/Maceio Alagoas, Sergipe
+BR -1259-03831 America/Bahia Bahia
+BR -2332-04637 America/Sao_Paulo Brazil (southeast: GO, DF, MG, ES, RJ, SP, PR, SC, RS)
+BR -2027-05437 America/Campo_Grande Mato Grosso do Sul
+BR -1535-05605 America/Cuiaba Mato Grosso
+BR -0226-05452 America/Santarem Pará (west)
+BR -0846-06354 America/Porto_Velho Rondônia
+BR +0249-06040 America/Boa_Vista Roraima
+BR -0308-06001 America/Manaus Amazonas (east)
+BR -0640-06952 America/Eirunepe Amazonas (west)
+BR -0958-06748 America/Rio_Branco Acre
+BT +2728+08939 Asia/Thimphu
+BY +5354+02734 Europe/Minsk
+BZ +1730-08812 America/Belize
+CA +4734-05243 America/St_Johns Newfoundland; Labrador (southeast)
+CA +4439-06336 America/Halifax Atlantic - NS (most areas); PE
+CA +4612-05957 America/Glace_Bay Atlantic - NS (Cape Breton)
+CA +4606-06447 America/Moncton Atlantic - New Brunswick
+CA +5320-06025 America/Goose_Bay Atlantic - Labrador (most areas)
+CA,BS +4339-07923 America/Toronto Eastern - ON, QC (most areas), Bahamas
+CA +4901-08816 America/Nipigon Eastern - ON, QC (no DST 1967-73)
+CA +4823-08915 America/Thunder_Bay Eastern - ON (Thunder Bay)
+CA +6344-06828 America/Iqaluit Eastern - NU (most east areas)
+CA +6608-06544 America/Pangnirtung Eastern - NU (Pangnirtung)
+CA +4953-09709 America/Winnipeg Central - ON (west); Manitoba
+CA +4843-09434 America/Rainy_River Central - ON (Rainy R, Ft Frances)
+CA +744144-0944945 America/Resolute Central - NU (Resolute)
+CA +624900-0920459 America/Rankin_Inlet Central - NU (central)
+CA +5024-10439 America/Regina CST - SK (most areas)
+CA +5017-10750 America/Swift_Current CST - SK (midwest)
+CA +5333-11328 America/Edmonton Mountain - AB; BC (E); SK (W)
+CA +690650-1050310 America/Cambridge_Bay Mountain - NU (west)
+CA +6227-11421 America/Yellowknife Mountain - NT (central)
+CA +682059-1334300 America/Inuvik Mountain - NT (west)
+CA +5946-12014 America/Dawson_Creek MST - BC (Dawson Cr, Ft St John)
+CA +5848-12242 America/Fort_Nelson MST - BC (Ft Nelson)
+CA +6043-13503 America/Whitehorse MST - Yukon (east)
+CA +6404-13925 America/Dawson MST - Yukon (west)
+CA +4916-12307 America/Vancouver Pacific - BC (most areas)
+CC -1210+09655 Indian/Cocos
+CH,DE,LI +4723+00832 Europe/Zurich Swiss time
+CI,BF,GH,GM,GN,ML,MR,SH,SL,SN,TG +0519-00402 Africa/Abidjan
+CK -2114-15946 Pacific/Rarotonga
+CL -3327-07040 America/Santiago Chile (most areas)
+CL -5309-07055 America/Punta_Arenas Region of Magallanes
+CL -2709-10926 Pacific/Easter Easter Island
+CN +3114+12128 Asia/Shanghai Beijing Time
+CN +4348+08735 Asia/Urumqi Xinjiang Time
+CO +0436-07405 America/Bogota
+CR +0956-08405 America/Costa_Rica
+CU +2308-08222 America/Havana
+CV +1455-02331 Atlantic/Cape_Verde
+CX -1025+10543 Indian/Christmas
+CY +3510+03322 Asia/Nicosia Cyprus (most areas)
+CY +3507+03357 Asia/Famagusta Northern Cyprus
+CZ,SK +5005+01426 Europe/Prague
+DE +5230+01322 Europe/Berlin Germany (most areas)
+DK +5540+01235 Europe/Copenhagen
+DO +1828-06954 America/Santo_Domingo
+DZ +3647+00303 Africa/Algiers
+EC -0210-07950 America/Guayaquil Ecuador (mainland)
+EC -0054-08936 Pacific/Galapagos Galápagos Islands
+EE +5925+02445 Europe/Tallinn
+EG +3003+03115 Africa/Cairo
+EH +2709-01312 Africa/El_Aaiun
+ES +4024-00341 Europe/Madrid Spain (mainland)
+ES +3553-00519 Africa/Ceuta Ceuta, Melilla
+ES +2806-01524 Atlantic/Canary Canary Islands
+FI,AX +6010+02458 Europe/Helsinki
+FJ -1808+17825 Pacific/Fiji
+FK -5142-05751 Atlantic/Stanley
+FM +0725+15147 Pacific/Chuuk Chuuk/Truk, Yap
+FM +0658+15813 Pacific/Pohnpei Pohnpei/Ponape
+FM +0519+16259 Pacific/Kosrae Kosrae
+FO +6201-00646 Atlantic/Faroe
+FR +4852+00220 Europe/Paris
+GB,GG,IM,JE +513030-0000731 Europe/London
+GE +4143+04449 Asia/Tbilisi
+GF +0456-05220 America/Cayenne
+GI +3608-00521 Europe/Gibraltar
+GL +6411-05144 America/Nuuk Greenland (most areas)
+GL +7646-01840 America/Danmarkshavn National Park (east coast)
+GL +7029-02158 America/Scoresbysund Scoresbysund/Ittoqqortoormiit
+GL +7634-06847 America/Thule Thule/Pituffik
+GR +3758+02343 Europe/Athens
+GS -5416-03632 Atlantic/South_Georgia
+GT +1438-09031 America/Guatemala
+GU,MP +1328+14445 Pacific/Guam
+GW +1151-01535 Africa/Bissau
+GY +0648-05810 America/Guyana
+HK +2217+11409 Asia/Hong_Kong
+HN +1406-08713 America/Tegucigalpa
+HT +1832-07220 America/Port-au-Prince
+HU +4730+01905 Europe/Budapest
+ID -0610+10648 Asia/Jakarta Java, Sumatra
+ID -0002+10920 Asia/Pontianak Borneo (west, central)
+ID -0507+11924 Asia/Makassar Borneo (east, south); Sulawesi/Celebes, Bali, Nusa Tengarra; Timor (west)
+ID -0232+14042 Asia/Jayapura New Guinea (West Papua / Irian Jaya); Malukus/Moluccas
+IE +5320-00615 Europe/Dublin
+IL +314650+0351326 Asia/Jerusalem
+IN +2232+08822 Asia/Kolkata
+IO -0720+07225 Indian/Chagos
+IQ +3321+04425 Asia/Baghdad
+IR +3540+05126 Asia/Tehran
+IS +6409-02151 Atlantic/Reykjavik
+IT,SM,VA +4154+01229 Europe/Rome
+JM +175805-0764736 America/Jamaica
+JO +3157+03556 Asia/Amman
+JP +353916+1394441 Asia/Tokyo
+KE,DJ,ER,ET,KM,MG,SO,TZ,UG,YT -0117+03649 Africa/Nairobi
+KG +4254+07436 Asia/Bishkek
+KI +0125+17300 Pacific/Tarawa Gilbert Islands
+KI -0247-17143 Pacific/Kanton Phoenix Islands
+KI +0152-15720 Pacific/Kiritimati Line Islands
+KP +3901+12545 Asia/Pyongyang
+KR +3733+12658 Asia/Seoul
+KZ +4315+07657 Asia/Almaty Kazakhstan (most areas)
+KZ +4448+06528 Asia/Qyzylorda Qyzylorda/Kyzylorda/Kzyl-Orda
+KZ +5312+06337 Asia/Qostanay Qostanay/Kostanay/Kustanay
+KZ +5017+05710 Asia/Aqtobe Aqtöbe/Aktobe
+KZ +4431+05016 Asia/Aqtau Mangghystaū/Mankistau
+KZ +4707+05156 Asia/Atyrau Atyraū/Atirau/Gur'yev
+KZ +5113+05121 Asia/Oral West Kazakhstan
+LB +3353+03530 Asia/Beirut
+LK +0656+07951 Asia/Colombo
+LR +0618-01047 Africa/Monrovia
+LT +5441+02519 Europe/Vilnius
+LU +4936+00609 Europe/Luxembourg
+LV +5657+02406 Europe/Riga
+LY +3254+01311 Africa/Tripoli
+MA +3339-00735 Africa/Casablanca
+MC +4342+00723 Europe/Monaco
+MD +4700+02850 Europe/Chisinau
+MH +0709+17112 Pacific/Majuro Marshall Islands (most areas)
+MH +0905+16720 Pacific/Kwajalein Kwajalein
+MM +1647+09610 Asia/Yangon
+MN +4755+10653 Asia/Ulaanbaatar Mongolia (most areas)
+MN +4801+09139 Asia/Hovd Bayan-Ölgii, Govi-Altai, Hovd, Uvs, Zavkhan
+MN +4804+11430 Asia/Choibalsan Dornod, Sükhbaatar
+MO +221150+1133230 Asia/Macau
+MQ +1436-06105 America/Martinique
+MT +3554+01431 Europe/Malta
+MU -2010+05730 Indian/Mauritius
+MV +0410+07330 Indian/Maldives
+MX +1924-09909 America/Mexico_City Central Time
+MX +2105-08646 America/Cancun Eastern Standard Time - Quintana Roo
+MX +2058-08937 America/Merida Central Time - Campeche, Yucatán
+MX +2540-10019 America/Monterrey Central Time - Durango; Coahuila, Nuevo León, Tamaulipas (most areas)
+MX +2550-09730 America/Matamoros Central Time US - Coahuila, Nuevo León, Tamaulipas (US border)
+MX +2313-10625 America/Mazatlan Mountain Time - Baja California Sur, Nayarit, Sinaloa
+MX +2838-10605 America/Chihuahua Mountain Time - Chihuahua (most areas)
+MX +2934-10425 America/Ojinaga Mountain Time US - Chihuahua (US border)
+MX +2904-11058 America/Hermosillo Mountain Standard Time - Sonora
+MX +3232-11701 America/Tijuana Pacific Time US - Baja California
+MX +2048-10515 America/Bahia_Banderas Central Time - Bahía de Banderas
+MY +0310+10142 Asia/Kuala_Lumpur Malaysia (peninsula)
+MY +0133+11020 Asia/Kuching Sabah, Sarawak
+MZ,BI,BW,CD,MW,RW,ZM,ZW -2558+03235 Africa/Maputo Central Africa Time
+NA -2234+01706 Africa/Windhoek
+NC -2216+16627 Pacific/Noumea
+NF -2903+16758 Pacific/Norfolk
+NG,AO,BJ,CD,CF,CG,CM,GA,GQ,NE +0627+00324 Africa/Lagos West Africa Time
+NI +1209-08617 America/Managua
+NL +5222+00454 Europe/Amsterdam
+NO,SJ +5955+01045 Europe/Oslo
+NP +2743+08519 Asia/Kathmandu
+NR -0031+16655 Pacific/Nauru
+NU -1901-16955 Pacific/Niue
+NZ,AQ -3652+17446 Pacific/Auckland New Zealand time
+NZ -4357-17633 Pacific/Chatham Chatham Islands
+PA,CA,KY +0858-07932 America/Panama EST - Panama, Cayman, ON (Atikokan), NU (Coral H)
+PE -1203-07703 America/Lima
+PF -1732-14934 Pacific/Tahiti Society Islands
+PF -0900-13930 Pacific/Marquesas Marquesas Islands
+PF -2308-13457 Pacific/Gambier Gambier Islands
+PG,AQ -0930+14710 Pacific/Port_Moresby Papua New Guinea (most areas), Dumont d'Urville
+PG -0613+15534 Pacific/Bougainville Bougainville
+PH +1435+12100 Asia/Manila
+PK +2452+06703 Asia/Karachi
+PL +5215+02100 Europe/Warsaw
+PM +4703-05620 America/Miquelon
+PN -2504-13005 Pacific/Pitcairn
+PR,AG,CA,AI,AW,BL,BQ,CW,DM,GD,GP,KN,LC,MF,MS,SX,TT,VC,VG,VI +182806-0660622 America/Puerto_Rico AST
+PS +3130+03428 Asia/Gaza Gaza Strip
+PS +313200+0350542 Asia/Hebron West Bank
+PT +3843-00908 Europe/Lisbon Portugal (mainland)
+PT +3238-01654 Atlantic/Madeira Madeira Islands
+PT +3744-02540 Atlantic/Azores Azores
+PW +0720+13429 Pacific/Palau
+PY -2516-05740 America/Asuncion
+QA,BH +2517+05132 Asia/Qatar
+RE,TF -2052+05528 Indian/Reunion Réunion, Crozet, Scattered Islands
+RO +4426+02606 Europe/Bucharest
+RS,BA,HR,ME,MK,SI +4450+02030 Europe/Belgrade
+RU +5443+02030 Europe/Kaliningrad MSK-01 - Kaliningrad
+RU +554521+0373704 Europe/Moscow MSK+00 - Moscow area
+# Mention RU and UA alphabetically. See "territorial claims" above.
+RU,UA +4457+03406 Europe/Simferopol Crimea
+RU +5836+04939 Europe/Kirov MSK+00 - Kirov
+RU +4844+04425 Europe/Volgograd MSK+00 - Volgograd
+RU +4621+04803 Europe/Astrakhan MSK+01 - Astrakhan
+RU +5134+04602 Europe/Saratov MSK+01 - Saratov
+RU +5420+04824 Europe/Ulyanovsk MSK+01 - Ulyanovsk
+RU +5312+05009 Europe/Samara MSK+01 - Samara, Udmurtia
+RU +5651+06036 Asia/Yekaterinburg MSK+02 - Urals
+RU +5500+07324 Asia/Omsk MSK+03 - Omsk
+RU +5502+08255 Asia/Novosibirsk MSK+04 - Novosibirsk
+RU +5322+08345 Asia/Barnaul MSK+04 - Altai
+RU +5630+08458 Asia/Tomsk MSK+04 - Tomsk
+RU +5345+08707 Asia/Novokuznetsk MSK+04 - Kemerovo
+RU +5601+09250 Asia/Krasnoyarsk MSK+04 - Krasnoyarsk area
+RU +5216+10420 Asia/Irkutsk MSK+05 - Irkutsk, Buryatia
+RU +5203+11328 Asia/Chita MSK+06 - Zabaykalsky
+RU +6200+12940 Asia/Yakutsk MSK+06 - Lena River
+RU +623923+1353314 Asia/Khandyga MSK+06 - Tomponsky, Ust-Maysky
+RU +4310+13156 Asia/Vladivostok MSK+07 - Amur River
+RU +643337+1431336 Asia/Ust-Nera MSK+07 - Oymyakonsky
+RU +5934+15048 Asia/Magadan MSK+08 - Magadan
+RU +4658+14242 Asia/Sakhalin MSK+08 - Sakhalin Island
+RU +6728+15343 Asia/Srednekolymsk MSK+08 - Sakha (E); North Kuril Is
+RU +5301+15839 Asia/Kamchatka MSK+09 - Kamchatka
+RU +6445+17729 Asia/Anadyr MSK+09 - Bering Sea
+SA,AQ,KW,YE +2438+04643 Asia/Riyadh Arabia, Syowa
+SB -0932+16012 Pacific/Guadalcanal
+SC -0440+05528 Indian/Mahe
+SD +1536+03232 Africa/Khartoum
+SE +5920+01803 Europe/Stockholm
+SG,MY +0117+10351 Asia/Singapore Singapore, peninsular Malaysia
+SR +0550-05510 America/Paramaribo
+SS +0451+03137 Africa/Juba
+ST +0020+00644 Africa/Sao_Tome
+SV +1342-08912 America/El_Salvador
+SY +3330+03618 Asia/Damascus
+TC +2128-07108 America/Grand_Turk
+TD +1207+01503 Africa/Ndjamena
+TF -492110+0701303 Indian/Kerguelen Kerguelen, St Paul Island, Amsterdam Island
+TH,KH,LA,VN +1345+10031 Asia/Bangkok Indochina (most areas)
+TJ +3835+06848 Asia/Dushanbe
+TK -0922-17114 Pacific/Fakaofo
+TL -0833+12535 Asia/Dili
+TM +3757+05823 Asia/Ashgabat
+TN +3648+01011 Africa/Tunis
+TO -210800-1751200 Pacific/Tongatapu
+TR +4101+02858 Europe/Istanbul
+TV -0831+17913 Pacific/Funafuti
+TW +2503+12130 Asia/Taipei
+UA +5026+03031 Europe/Kiev Ukraine (most areas)
+UA +4837+02218 Europe/Uzhgorod Transcarpathia
+UA +4750+03510 Europe/Zaporozhye Zaporozhye and east Lugansk
+UM +1917+16637 Pacific/Wake Wake Island
+US +404251-0740023 America/New_York Eastern (most areas)
+US +421953-0830245 America/Detroit Eastern - MI (most areas)
+US +381515-0854534 America/Kentucky/Louisville Eastern - KY (Louisville area)
+US +364947-0845057 America/Kentucky/Monticello Eastern - KY (Wayne)
+US +394606-0860929 America/Indiana/Indianapolis Eastern - IN (most areas)
+US +384038-0873143 America/Indiana/Vincennes Eastern - IN (Da, Du, K, Mn)
+US +410305-0863611 America/Indiana/Winamac Eastern - IN (Pulaski)
+US +382232-0862041 America/Indiana/Marengo Eastern - IN (Crawford)
+US +382931-0871643 America/Indiana/Petersburg Eastern - IN (Pike)
+US +384452-0850402 America/Indiana/Vevay Eastern - IN (Switzerland)
+US +415100-0873900 America/Chicago Central (most areas)
+US +375711-0864541 America/Indiana/Tell_City Central - IN (Perry)
+US +411745-0863730 America/Indiana/Knox Central - IN (Starke)
+US +450628-0873651 America/Menominee Central - MI (Wisconsin border)
+US +470659-1011757 America/North_Dakota/Center Central - ND (Oliver)
+US +465042-1012439 America/North_Dakota/New_Salem Central - ND (Morton rural)
+US +471551-1014640 America/North_Dakota/Beulah Central - ND (Mercer)
+US +394421-1045903 America/Denver Mountain (most areas)
+US +433649-1161209 America/Boise Mountain - ID (south); OR (east)
+US,CA +332654-1120424 America/Phoenix MST - Arizona (except Navajo), Creston BC
+US +340308-1181434 America/Los_Angeles Pacific
+US +611305-1495401 America/Anchorage Alaska (most areas)
+US +581807-1342511 America/Juneau Alaska - Juneau area
+US +571035-1351807 America/Sitka Alaska - Sitka area
+US +550737-1313435 America/Metlakatla Alaska - Annette Island
+US +593249-1394338 America/Yakutat Alaska - Yakutat
+US +643004-1652423 America/Nome Alaska (west)
+US +515248-1763929 America/Adak Aleutian Islands
+US,UM +211825-1575130 Pacific/Honolulu Hawaii
+UY -345433-0561245 America/Montevideo
+UZ +3940+06648 Asia/Samarkand Uzbekistan (west)
+UZ +4120+06918 Asia/Tashkent Uzbekistan (east)
+VE +1030-06656 America/Caracas
+VN +1045+10640 Asia/Ho_Chi_Minh Vietnam (south)
+VU -1740+16825 Pacific/Efate
+WF -1318-17610 Pacific/Wallis
+WS -1350-17144 Pacific/Apia
+ZA,LS,SZ -2615+02800 Africa/Johannesburg
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/AUTHORS.txt b/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/AUTHORS.txt
new file mode 100644
index 0000000000000000000000000000000000000000..72c87d7d38ae7bf859717c333a5ee8230f6ce624
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/AUTHORS.txt
@@ -0,0 +1,562 @@
+A_Rog <adam.thomas.rogerson@gmail.com>
+Aakanksha Agrawal <11389424+rasponic@users.noreply.github.com>
+Abhinav Sagar <40603139+abhinavsagar@users.noreply.github.com>
+ABHYUDAY PRATAP SINGH <abhyudaypratap@outlook.com>
+abs51295 <aagams68@gmail.com>
+AceGentile <ventogrigio83@gmail.com>
+Adam Chainz <adam@adamj.eu>
+Adam Tse <adam.tse@me.com>
+Adam Tse <atse@users.noreply.github.com>
+Adam Wentz <awentz@theonion.com>
+admin <admin@admins-MacBook-Pro.local>
+Adrien Morison <adrien.morison@gmail.com>
+ahayrapetyan <ahayrapetya2@bloomberg.net>
+Ahilya <ahilya16009@iiitd.ac.in>
+AinsworthK <yat626@yahoo.com.hk>
+Akash Srivastava <akashsrivastava4927@gmail.com>
+Alan Yee <alyee@ucsd.edu>
+Albert Tugushev <albert@tugushev.ru>
+Albert-Guan <albert.guan94@gmail.com>
+albertg <albert.guan94@gmail.com>
+Aleks Bunin <github@compuix.com>
+Alethea Flowers <magicalgirl@google.com>
+Alex Gaynor <alex.gaynor@gmail.com>
+Alex Grönholm <alex.gronholm@nextday.fi>
+Alex Loosley <a.loosley@reply.de>
+Alex Morega <alex@grep.ro>
+Alex Stachowiak <alexander@computer.org>
+Alexander Shtyrov <rawzausho@gmail.com>
+Alexandre Conrad <alexandre.conrad@gmail.com>
+Alexey Popravka <a.popravka@smartweb.com.ua>
+Alexey Popravka <alexey.popravka@horsedevel.com>
+Alli <alzeih@users.noreply.github.com>
+Ami Fischman <ami@fischman.org>
+Ananya Maiti <ananyoevo@gmail.com>
+Anatoly Techtonik <techtonik@gmail.com>
+Anders Kaseorg <andersk@mit.edu>
+Andreas Lutro <anlutro@gmail.com>
+Andrei Geacar <andrei.geacar@gmail.com>
+Andrew Gaul <andrew@gaul.org>
+Andrey Bulgakov <mail@andreiko.ru>
+Andrés Delfino <34587441+andresdelfino@users.noreply.github.com>
+Andrés Delfino <adelfino@gmail.com>
+Andy Freeland <andy.freeland@redjack.com>
+Andy Freeland <andy@andyfreeland.net>
+Andy Kluger <AndydeCleyre@users.noreply.github.com>
+Ani Hayrapetyan <ahayrapetya2@bloomberg.net>
+Aniruddha Basak <codewithaniruddha@gmail.com>
+Anish Tambe <anish.tambe@yahoo.in>
+Anrs Hu <anrs@douban.com>
+Anthony Sottile <asottile@umich.edu>
+Antoine Musso <hashar@free.fr>
+Anton Ovchinnikov <revolver112@gmail.com>
+Anton Patrushev <apatrushev@gmail.com>
+Antonio Alvarado Hernandez <tnotstar@gmail.com>
+Antony Lee <anntzer.lee@gmail.com>
+Antti Kaihola <akaihol+github@ambitone.com>
+Anubhav Patel <anubhavp28@gmail.com>
+Anuj Godase <godaseanuj@gmail.com>
+AQNOUCH Mohammed <aqnouch.mohammed@gmail.com>
+AraHaan <seandhunt_7@yahoo.com>
+Arindam Choudhury <arindam@live.com>
+Armin Ronacher <armin.ronacher@active-4.com>
+Artem <duketemon@users.noreply.github.com>
+Ashley Manton <ajd.manton@googlemail.com>
+Ashwin Ramaswami <aramaswamis@gmail.com>
+atse <atse@users.noreply.github.com>
+Atsushi Odagiri <aodagx@gmail.com>
+Avner Cohen <israbirding@gmail.com>
+Baptiste Mispelon <bmispelon@gmail.com>
+Barney Gale <barney.gale@gmail.com>
+barneygale <barney.gale@gmail.com>
+Bartek Ogryczak <b.ogryczak@gmail.com>
+Bastian Venthur <mail@venthur.de>
+Ben Darnell <ben@bendarnell.com>
+Ben Hoyt <benhoyt@gmail.com>
+Ben Rosser <rosser.bjr@gmail.com>
+Bence Nagy <bence@underyx.me>
+Benjamin Peterson <benjamin@python.org>
+Benjamin VanEvery <ben@simondata.com>
+Benoit Pierre <benoit.pierre@gmail.com>
+Berker Peksag <berker.peksag@gmail.com>
+Bernardo B. Marques <bernardo.fire@gmail.com>
+Bernhard M. Wiedemann <bwiedemann@suse.de>
+Bertil Hatt <bertil.hatt@farfetch.com>
+Bogdan Opanchuk <bogdan@opanchuk.net>
+BorisZZZ <BorisZZZ@users.noreply.github.com>
+Brad Erickson <eosrei@gmail.com>
+Bradley Ayers <bradley.ayers@gmail.com>
+Brandon L. Reiss <brandon@damyata.co>
+Brandt Bucher <brandtbucher@gmail.com>
+Brett Randall <javabrett@gmail.com>
+Brian Cristante <33549821+brcrista@users.noreply.github.com>
+Brian Cristante <brcrista@microsoft.com>
+Brian Rosner <brosner@gmail.com>
+BrownTruck <BrownTruck@users.noreply.github.com>
+Bruno Oliveira <nicoddemus@gmail.com>
+Bruno Renié <brutasse@gmail.com>
+Bstrdsmkr <bstrdsmkr@gmail.com>
+Buck Golemon <buck@yelp.com>
+burrows <burrows@preveil.com>
+Bussonnier Matthias <bussonniermatthias@gmail.com>
+c22 <c22@users.noreply.github.com>
+Caleb Martinez <accounts@calebmartinez.com>
+Calvin Smith <eukaryote@users.noreply.github.com>
+Carl Meyer <carl@oddbird.net>
+Carlos Liam <carlos@aarzee.me>
+Carol Willing <carolcode@willingconsulting.com>
+Carter Thayer <carterwthayer@gmail.com>
+Cass <cass.petrus@gmail.com>
+Chandrasekhar Atina <chandu.atina@gmail.com>
+Chih-Hsuan Yen <yan12125@gmail.com>
+Chih-Hsuan Yen <yen@chyen.cc>
+Chris Brinker <chris.brinker@gmail.com>
+Chris Hunt <chrahunt@gmail.com>
+Chris Jerdonek <chris.jerdonek@gmail.com>
+Chris McDonough <chrism@plope.com>
+Chris Wolfe <chriswwolfe@gmail.com>
+Christian Heimes <christian@python.org>
+Christian Oudard <christian.oudard@gmail.com>
+Christopher Hunt <chrahunt@gmail.com>
+Christopher Snyder <cnsnyder@users.noreply.github.com>
+Clark Boylan <clark.boylan@gmail.com>
+Clay McClure <clay@daemons.net>
+Cody <Purring@users.noreply.github.com>
+Cody Soyland <codysoyland@gmail.com>
+Colin Watson <cjwatson@debian.org>
+Connor Osborn <cdosborn@email.arizona.edu>
+Cooper Lees <me@cooperlees.com>
+Cooper Ry Lees <me@cooperlees.com>
+Cory Benfield <lukasaoz@gmail.com>
+Cory Wright <corywright@gmail.com>
+Craig Kerstiens <craig.kerstiens@gmail.com>
+Cristian Sorinel <cristian.sorinel@gmail.com>
+Curtis Doty <Curtis@GreenKey.net>
+cytolentino <ctolentino8@bloomberg.net>
+Damian Quiroga <qdamian@gmail.com>
+Dan Black <dyspop@gmail.com>
+Dan Savilonis <djs@n-cube.org>
+Dan Sully <daniel-github@electricrain.com>
+daniel <mcdonaldd@unimelb.edu.au>
+Daniel Collins <accounts@dac.io>
+Daniel Hahler <git@thequod.de>
+Daniel Holth <dholth@fastmail.fm>
+Daniel Jost <torpedojost@gmail.com>
+Daniel Shaulov <daniel.shaulov@gmail.com>
+Daniele Esposti <expobrain@users.noreply.github.com>
+Daniele Procida <daniele@vurt.org>
+Danny Hermes <daniel.j.hermes@gmail.com>
+Dav Clark <davclark@gmail.com>
+Dave Abrahams <dave@boostpro.com>
+Dave Jones <dave@waveform.org.uk>
+David Aguilar <davvid@gmail.com>
+David Black <db@d1b.org>
+David Bordeynik <david.bordeynik@gmail.com>
+David Bordeynik <david@zebra-med.com>
+David Caro <david@dcaro.es>
+David Evans <d@drhevans.com>
+David Linke <dr.david.linke@gmail.com>
+David Pursehouse <david.pursehouse@gmail.com>
+David Tucker <david@tucker.name>
+David Wales <daviewales@gmail.com>
+Davidovich <david.genest@gmail.com>
+derwolfe <chriswwolfe@gmail.com>
+Desetude <harry@desetude.com>
+Diego Caraballo <diegocaraballo84@gmail.com>
+DiegoCaraballo <diegocaraballo84@gmail.com>
+Dmitry Gladkov <dmitry.gladkov@gmail.com>
+Domen Kožar <domen@dev.si>
+Donald Stufft <donald@stufft.io>
+Dongweiming <dongweiming@admaster.com.cn>
+Douglas Thor <dougthor42@users.noreply.github.com>
+DrFeathers <WilliamGeorgeBurgess@gmail.com>
+Dustin Ingram <di@di.codes>
+Dwayne Bailey <dwayne@translate.org.za>
+Ed Morley <501702+edmorley@users.noreply.github.com>
+Ed Morley <emorley@mozilla.com>
+Eitan Adler <lists@eitanadler.com>
+ekristina <panacejja@gmail.com>
+elainechan <elaine.chan@outlook.com>
+Eli Schwartz <eschwartz93@gmail.com>
+Eli Schwartz <eschwartz@archlinux.org>
+Emil Burzo <contact@emilburzo.com>
+Emil Styrke <emil.styrke@gmail.com>
+Endoh Takanao <djmchl@gmail.com>
+enoch <lanxenet@gmail.com>
+Erdinc Mutlu <erdinc_mutlu@yahoo.com>
+Eric Gillingham <Gillingham@bikezen.net>
+Eric Hanchrow <eric.hanchrow@gmail.com>
+Eric Hopper <hopper@omnifarious.org>
+Erik M. Bray <embray@stsci.edu>
+Erik Rose <erik@mozilla.com>
+Ernest W Durbin III <ewdurbin@gmail.com>
+Ernest W. Durbin III <ewdurbin@gmail.com>
+Erwin Janssen <erwinjanssen@outlook.com>
+Eugene Vereshchagin <evvers@gmail.com>
+everdimension <everdimension@gmail.com>
+Felix Yan <felixonmars@archlinux.org>
+fiber-space <fiber-space@users.noreply.github.com>
+Filip Kokosiński <filip.kokosinski@gmail.com>
+Florian Briand <ownerfrance+github@hotmail.com>
+Florian Rathgeber <florian.rathgeber@gmail.com>
+Francesco <f.guerrieri@gmail.com>
+Francesco Montesano <franz.bergesund@gmail.com>
+Frost Ming <mianghong@gmail.com>
+Gabriel Curio <g.curio@gmail.com>
+Gabriel de Perthuis <g2p.code@gmail.com>
+Garry Polley <garrympolley@gmail.com>
+gdanielson <graeme.danielson@gmail.com>
+Geoffrey Lehée <geoffrey@lehee.name>
+Geoffrey Sneddon <me@gsnedders.com>
+George Song <george@55minutes.com>
+Georgi Valkov <georgi.t.valkov@gmail.com>
+Giftlin Rajaiah <giftlin.rgn@gmail.com>
+gizmoguy1 <gizmoguy1@gmail.com>
+gkdoc <40815324+gkdoc@users.noreply.github.com>
+Gopinath M <31352222+mgopi1990@users.noreply.github.com>
+GOTO Hayato <3532528+gh640@users.noreply.github.com>
+gpiks <gaurav.pikale@gmail.com>
+Guilherme Espada <porcariadagata@gmail.com>
+Guy Rozendorn <guy@rzn.co.il>
+gzpan123 <gzpan123@gmail.com>
+Hanjun Kim <hallazzang@gmail.com>
+Hari Charan <hcharan997@gmail.com>
+Harsh Vardhan <harsh59v@gmail.com>
+Herbert Pfennig <herbert@albinen.com>
+Hsiaoming Yang <lepture@me.com>
+Hugo <hugovk@users.noreply.github.com>
+Hugo Lopes Tavares <hltbra@gmail.com>
+Hugo van Kemenade <hugovk@users.noreply.github.com>
+hugovk <hugovk@users.noreply.github.com>
+Hynek Schlawack <hs@ox.cx>
+Ian Bicking <ianb@colorstudy.com>
+Ian Cordasco <graffatcolmingov@gmail.com>
+Ian Lee <IanLee1521@gmail.com>
+Ian Stapleton Cordasco <graffatcolmingov@gmail.com>
+Ian Wienand <ian@wienand.org>
+Ian Wienand <iwienand@redhat.com>
+Igor Kuzmitshov <kuzmiigo@gmail.com>
+Igor Sobreira <igor@igorsobreira.com>
+Ilya Baryshev <baryshev@gmail.com>
+INADA Naoki <songofacandy@gmail.com>
+Ionel Cristian Mărieș <contact@ionelmc.ro>
+Ionel Maries Cristian <ionel.mc@gmail.com>
+Ivan Pozdeev <vano@mail.mipt.ru>
+Jacob Kim <me@thejacobkim.com>
+jakirkham <jakirkham@gmail.com>
+Jakub Stasiak <kuba.stasiak@gmail.com>
+Jakub Vysoky <jakub@borka.cz>
+Jakub Wilk <jwilk@jwilk.net>
+James Cleveland <jamescleveland@gmail.com>
+James Cleveland <radiosilence@users.noreply.github.com>
+James Firth <hello@james-firth.com>
+James Polley <jp@jamezpolley.com>
+Jan Pokorný <jpokorny@redhat.com>
+Jannis Leidel <jannis@leidel.info>
+jarondl <me@jarondl.net>
+Jason R. Coombs <jaraco@jaraco.com>
+Jay Graves <jay@skabber.com>
+Jean-Christophe Fillion-Robin <jchris.fillionr@kitware.com>
+Jeff Barber <jbarber@computer.org>
+Jeff Dairiki <dairiki@dairiki.org>
+Jelmer Vernooij <jelmer@jelmer.uk>
+jenix21 <devfrog@gmail.com>
+Jeremy Stanley <fungi@yuggoth.org>
+Jeremy Zafran <jzafran@users.noreply.github.com>
+Jiashuo Li <jiasli@microsoft.com>
+Jim Garrison <jim@garrison.cc>
+Jivan Amara <Development@JivanAmara.net>
+John Paton <j.paton@catawiki.nl>
+John-Scott Atlakson <john.scott.atlakson@gmail.com>
+johnthagen <johnthagen@gmail.com>
+johnthagen <johnthagen@users.noreply.github.com>
+Jon Banafato <jon@jonafato.com>
+Jon Dufresne <jon.dufresne@gmail.com>
+Jon Parise <jon@indelible.org>
+Jonas Nockert <jonasnockert@gmail.com>
+Jonathan Herbert <foohyfooh@gmail.com>
+Joost Molenaar <j.j.molenaar@gmail.com>
+Jorge Niedbalski <niedbalski@gmail.com>
+Joseph Long <jdl@fastmail.fm>
+Josh Bronson <jabronson@gmail.com>
+Josh Hansen <josh@skwash.net>
+Josh Schneier <josh.schneier@gmail.com>
+Juanjo Bazán <jjbazan@gmail.com>
+Julian Berman <Julian@GrayVines.com>
+Julian Gethmann <julian.gethmann@kit.edu>
+Julien Demoor <julien@jdemoor.com>
+jwg4 <jack.grahl@yahoo.co.uk>
+Jyrki Pulliainen <jyrki@spotify.com>
+Kai Chen <kaichen120@gmail.com>
+Kamal Bin Mustafa <kamal@smach.net>
+kaustav haldar <hi@kaustav.me>
+keanemind <keanemind@gmail.com>
+Keith Maxwell <keith.maxwell@gmail.com>
+Kelsey Hightower <kelsey.hightower@gmail.com>
+Kenneth Belitzky <kenny@belitzky.com>
+Kenneth Reitz <me@kennethreitz.com>
+Kenneth Reitz <me@kennethreitz.org>
+Kevin Burke <kev@inburke.com>
+Kevin Carter <kevin.carter@rackspace.com>
+Kevin Frommelt <kevin.frommelt@webfilings.com>
+Kevin R Patterson <kevin.r.patterson@intel.com>
+Kexuan Sun <me@kianasun.com>
+Kit Randel <kit@nocturne.net.nz>
+kpinc <kop@meme.com>
+Krishna Oza <krishoza15sep@gmail.com>
+Kumar McMillan <kumar.mcmillan@gmail.com>
+Kyle Persohn <kyle.persohn@gmail.com>
+lakshmanaram <lakshmanaram.n@gmail.com>
+Laszlo Kiss-Kollar <kiss.kollar.laszlo@gmail.com>
+Laurent Bristiel <laurent@bristiel.com>
+Laurie Opperman <laurie@sitesee.com.au>
+Leon Sasson <leonsassonha@gmail.com>
+Lev Givon <lev@columbia.edu>
+Lincoln de Sousa <lincoln@comum.org>
+Lipis <lipiridis@gmail.com>
+Loren Carvalho <lcarvalho@linkedin.com>
+Lucas Cimon <lucas.cimon@gmail.com>
+Ludovic Gasc <gmludo@gmail.com>
+Luke Macken <lmacken@redhat.com>
+Luo Jiebin <luo.jiebin@qq.com>
+luojiebin <luojiebin@users.noreply.github.com>
+luz.paz <luzpaz@users.noreply.github.com>
+László Kiss Kollár <lkisskollar@bloomberg.net>
+László Kiss Kollár <lkollar@users.noreply.github.com>
+Marc Abramowitz <marc@marc-abramowitz.com>
+Marc Tamlyn <marc.tamlyn@gmail.com>
+Marcus Smith <qwcode@gmail.com>
+Mariatta <Mariatta@users.noreply.github.com>
+Mark Kohler <mark.kohler@proteinsimple.com>
+Mark Williams <markrwilliams@gmail.com>
+Mark Williams <mrw@enotuniq.org>
+Markus Hametner <fin+github@xbhd.org>
+Masaki <mk5986@nyu.edu>
+Masklinn <bitbucket.org@masklinn.net>
+Matej Stuchlik <mstuchli@redhat.com>
+Mathew Jennings <mjennings@foursquare.com>
+Mathieu Bridon <bochecha@daitauha.fr>
+Matt Good <matt@matt-good.net>
+Matt Maker <trip@monstro.us>
+Matt Robenolt <matt@ydekproductions.com>
+matthew <matthew@trumbell.net>
+Matthew Einhorn <moiein2000@gmail.com>
+Matthew Gilliard <matthew.gilliard@gmail.com>
+Matthew Iversen <teh.ivo@gmail.com>
+Matthew Trumbell <matthew@thirdstonepartners.com>
+Matthew Willson <matthew@swiftkey.com>
+Matthias Bussonnier <bussonniermatthias@gmail.com>
+mattip <matti.picus@gmail.com>
+Maxim Kurnikov <maxim.kurnikov@gmail.com>
+Maxime Rouyrre <rouyrre+git@gmail.com>
+mayeut <mayeut@users.noreply.github.com>
+mbaluna <44498973+mbaluna@users.noreply.github.com>
+mdebi <17590103+mdebi@users.noreply.github.com>
+memoselyk <memoselyk@gmail.com>
+Michael <michael-k@users.noreply.github.com>
+Michael Aquilina <michaelaquilina@gmail.com>
+Michael E. Karpeles <michael.karpeles@gmail.com>
+Michael Klich <michal@michalklich.com>
+Michael Williamson <mike@zwobble.org>
+michaelpacer <michaelpacer@gmail.com>
+Mickaël Schoentgen <mschoentgen@nuxeo.com>
+Miguel Araujo Perez <miguel.araujo.perez@gmail.com>
+Mihir Singh <git.service@mihirsingh.com>
+Mike <mikeh@blur.com>
+Mike Hendricks <mikeh@blur.com>
+Min RK <benjaminrk@gmail.com>
+MinRK <benjaminrk@gmail.com>
+Miro Hrončok <miro@hroncok.cz>
+Monica Baluna <mbaluna@bloomberg.net>
+montefra <franz.bergesund@gmail.com>
+Monty Taylor <mordred@inaugust.com>
+Nate Coraor <nate@bx.psu.edu>
+Nathaniel J. Smith <njs@pobox.com>
+Nehal J Wani <nehaljw.kkd1@gmail.com>
+Neil Botelho <neil.botelho321@gmail.com>
+Nick Coghlan <ncoghlan@gmail.com>
+Nick Stenning <nick@whiteink.com>
+Nick Timkovich <prometheus235@gmail.com>
+Nicolas Bock <nicolasbock@gmail.com>
+Nikhil Benesch <nikhil.benesch@gmail.com>
+Nitesh Sharma <nbsharma@outlook.com>
+Nowell Strite <nowell@strite.org>
+NtaleGrey <Shadikntale@gmail.com>
+nvdv <modestdev@gmail.com>
+Ofekmeister <ofekmeister@gmail.com>
+ofrinevo <ofrine@gmail.com>
+Oliver Jeeves <oliver.jeeves@ocado.com>
+Oliver Tonnhofer <olt@bogosoft.com>
+Olivier Girardot <ssaboum@gmail.com>
+Olivier Grisel <olivier.grisel@ensta.org>
+Ollie Rutherfurd <orutherfurd@gmail.com>
+OMOTO Kenji <k-omoto@m3.com>
+Omry Yadan <omry@fb.com>
+Oren Held <orenhe@il.ibm.com>
+Oscar Benjamin <oscar.j.benjamin@gmail.com>
+Oz N Tiram <oz.tiram@gmail.com>
+Pachwenko <32424503+Pachwenko@users.noreply.github.com>
+Patrick Dubroy <pdubroy@gmail.com>
+Patrick Jenkins <patrick@socialgrowthtechnologies.com>
+Patrick Lawson <pl@foursquare.com>
+patricktokeeffe <patricktokeeffe@users.noreply.github.com>
+Patrik Kopkan <pkopkan@redhat.com>
+Paul Kehrer <paul.l.kehrer@gmail.com>
+Paul Moore <p.f.moore@gmail.com>
+Paul Nasrat <pnasrat@gmail.com>
+Paul Oswald <pauloswald@gmail.com>
+Paul van der Linden <mail@paultjuh.org>
+Paulus Schoutsen <paulus@paulusschoutsen.nl>
+Pavithra Eswaramoorthy <33131404+QueenCoffee@users.noreply.github.com>
+Pawel Jasinski <pawel.jasinski@gmail.com>
+Pekka Klärck <peke@iki.fi>
+Peter Lisák <peter.lisak@showmax.com>
+Peter Waller <peter.waller@gmail.com>
+petr-tik <petr-tik@users.noreply.github.com>
+Phaneendra Chiruvella <hi@pcx.io>
+Phil Freo <phil@philfreo.com>
+Phil Pennock <phil@pennock-tech.com>
+Phil Whelan <phil123@gmail.com>
+Philip Jägenstedt <philip@foolip.org>
+Philip Molloy <pamolloy@users.noreply.github.com>
+Philippe Ombredanne <pombredanne@gmail.com>
+Pi Delport <pjdelport@gmail.com>
+Pierre-Yves Rofes <github@rofes.fr>
+pip <pypa-dev@googlegroups.com>
+Prabakaran Kumaresshan <k_prabakaran+github@hotmail.com>
+Prabhjyotsing Surjit Singh Sodhi <psinghsodhi@bloomberg.net>
+Prabhu Marappan <prabhum.794@gmail.com>
+Pradyun Gedam <pradyunsg@gmail.com>
+Pratik Mallya <mallya@us.ibm.com>
+Preet Thakkar <preet.thakkar@students.iiit.ac.in>
+Preston Holmes <preston@ptone.com>
+Przemek Wrzos <hetmankp@none>
+Pulkit Goyal <7895pulkit@gmail.com>
+Qiangning Hong <hongqn@gmail.com>
+Quentin Pradet <quentin.pradet@gmail.com>
+R. David Murray <rdmurray@bitdance.com>
+Rafael Caricio <rafael.jacinto@gmail.com>
+Ralf Schmitt <ralf@systemexit.de>
+Razzi Abuissa <razzi53@gmail.com>
+rdb <rdb@users.noreply.github.com>
+Remi Rampin <r@remirampin.com>
+Remi Rampin <remirampin@gmail.com>
+Rene Dudfield <renesd@gmail.com>
+Riccardo Magliocchetti <riccardo.magliocchetti@gmail.com>
+Richard Jones <r1chardj0n3s@gmail.com>
+RobberPhex <robberphex@gmail.com>
+Robert Collins <rbtcollins@hp.com>
+Robert McGibbon <rmcgibbo@gmail.com>
+Robert T. McGibbon <rmcgibbo@gmail.com>
+robin elisha robinson <elisha.rob@gmail.com>
+Roey Berman <roey.berman@gmail.com>
+Rohan Jain <crodjer@gmail.com>
+Rohan Jain <crodjer@users.noreply.github.com>
+Rohan Jain <mail@rohanjain.in>
+Roman Bogorodskiy <roman.bogorodskiy@ericsson.com>
+Romuald Brunet <romuald@chivil.com>
+Ronny Pfannschmidt <Ronny.Pfannschmidt@gmx.de>
+Rory McCann <rory@technomancy.org>
+Ross Brattain <ross.b.brattain@intel.com>
+Roy Wellington Ⅳ <cactus_hugged@yahoo.com>
+Roy Wellington Ⅳ <roy@mybasis.com>
+Ryan Wooden <rygwdn@gmail.com>
+ryneeverett <ryneeverett@gmail.com>
+Sachi King <nakato@nakato.io>
+Salvatore Rinchiera <salvatore@rinchiera.com>
+Savio Jomton <sajo240519@gmail.com>
+schlamar <marc.schlaich@gmail.com>
+Scott Kitterman <sklist@kitterman.com>
+Sean <me@sean.taipei>
+seanj <seanj@xyke.com>
+Sebastian Jordan <sebastian.jordan.mail@googlemail.com>
+Sebastian Schaetz <sschaetz@butterflynetinc.com>
+Segev Finer <segev208@gmail.com>
+SeongSoo Cho <ppiyakk2@printf.kr>
+Sergey Vasilyev <nolar@nolar.info>
+Seth Woodworth <seth@sethish.com>
+Shlomi Fish <shlomif@shlomifish.org>
+Shovan Maity <shovan.maity@mayadata.io>
+Simeon Visser <svisser@users.noreply.github.com>
+Simon Cross <hodgestar@gmail.com>
+Simon Pichugin <simon.pichugin@gmail.com>
+sinoroc <sinoroc.code+git@gmail.com>
+Sorin Sbarnea <sorin.sbarnea@gmail.com>
+Stavros Korokithakis <stavros@korokithakis.net>
+Stefan Scherfke <stefan@sofa-rockers.org>
+Stephan Erb <github@stephanerb.eu>
+stepshal <nessento@openmailbox.org>
+Steve (Gadget) Barnes <gadgetsteve@hotmail.com>
+Steve Barnes <gadgetsteve@hotmail.com>
+Steve Dower <steve.dower@microsoft.com>
+Steve Kowalik <steven@wedontsleep.org>
+Steven Myint <git@stevenmyint.com>
+stonebig <stonebig34@gmail.com>
+Stéphane Bidoul (ACSONE) <stephane.bidoul@acsone.eu>
+Stéphane Bidoul <stephane.bidoul@acsone.eu>
+Stéphane Klein <contact@stephane-klein.info>
+Sumana Harihareswara <sh@changeset.nyc>
+Sviatoslav Sydorenko <wk.cvs.github@sydorenko.org.ua>
+Sviatoslav Sydorenko <wk@sydorenko.org.ua>
+Swat009 <swatantra.kumar8@gmail.com>
+Takayuki SHIMIZUKAWA <shimizukawa@gmail.com>
+tbeswick <tbeswick@enphaseenergy.com>
+Thijs Triemstra <info@collab.nl>
+Thomas Fenzl <thomas.fenzl@gmail.com>
+Thomas Grainger <tagrain@gmail.com>
+Thomas Guettler <tguettler@tbz-pariv.de>
+Thomas Johansson <devnull@localhost>
+Thomas Kluyver <thomas@kluyver.me.uk>
+Thomas Smith <smithtg@ncbi.nlm.nih.gov>
+Tim D. Smith <github@tim-smith.us>
+Tim Gates <tim.gates@iress.com>
+Tim Harder <radhermit@gmail.com>
+Tim Heap <tim@timheap.me>
+tim smith <github@tim-smith.us>
+tinruufu <tinruufu@gmail.com>
+Tom Forbes <tom@tomforb.es>
+Tom Freudenheim <tom.freudenheim@onepeloton.com>
+Tom V <tom@viner.tv>
+Tomas Orsava <torsava@redhat.com>
+Tomer Chachamu <tomer.chachamu@gmail.com>
+Tony Beswick <tonybeswick@orcon.net.nz>
+Tony Zhaocheng Tan <tony@tonytan.io>
+TonyBeswick <TonyBeswick@users.noreply.github.com>
+toonarmycaptain <toonarmycaptain@hotmail.com>
+Toshio Kuratomi <toshio@fedoraproject.org>
+Travis Swicegood <development@domain51.com>
+Tzu-ping Chung <uranusjr@gmail.com>
+Valentin Haenel <valentin.haenel@gmx.de>
+Victor Stinner <victor.stinner@gmail.com>
+victorvpaulo <victorvpaulo@gmail.com>
+Viktor Szépe <viktor@szepe.net>
+Ville Skyttä <ville.skytta@iki.fi>
+Vinay Sajip <vinay_sajip@yahoo.co.uk>
+Vincent Philippon <sindaewoh@gmail.com>
+Vinicyus Macedo <7549205+vinicyusmacedo@users.noreply.github.com>
+Vitaly Babiy <vbabiy86@gmail.com>
+Vladimir Rutsky <rutsky@users.noreply.github.com>
+W. Trevor King <wking@drexel.edu>
+Wil Tan <wil@dready.org>
+Wilfred Hughes <me@wilfred.me.uk>
+William ML Leslie <william.leslie.ttg@gmail.com>
+William T Olson <trevor@heytrevor.com>
+Wilson Mo <wilsonfv@126.com>
+wim glenn <wim.glenn@gmail.com>
+Wolfgang Maier <wolfgang.maier@biologie.uni-freiburg.de>
+Xavier Fernandez <xav.fernandez@gmail.com>
+Xavier Fernandez <xavier.fernandez@polyconseil.fr>
+xoviat <xoviat@users.noreply.github.com>
+xtreak <tir.karthi@gmail.com>
+YAMAMOTO Takashi <yamamoto@midokura.com>
+Yen Chi Hsuan <yan12125@gmail.com>
+Yeray Diaz Diaz <yeraydiazdiaz@gmail.com>
+Yoval P <yoval@gmx.com>
+Yu Jian <askingyj@gmail.com>
+Yuan Jing Vincent Yan <yyan82@bloomberg.net>
+Zearin <zearin@gonk.net>
+Zearin <Zearin@users.noreply.github.com>
+Zhiping Deng <kofreestyler@gmail.com>
+Zvezdan Petkovic <zpetkovic@acm.org>
+Łukasz Langa <lukasz@langa.pl>
+Семён Марьясин <simeon@maryasin.name>
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/INSTALLER b/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/LICENSE.txt b/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/LICENSE.txt
new file mode 100644
index 0000000000000000000000000000000000000000..737fec5c5352af3d9a6a47a0670da4bdb52c5725
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/LICENSE.txt
@@ -0,0 +1,20 @@
+Copyright (c) 2008-2019 The pip developers (see AUTHORS.txt file)
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/METADATA b/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..4adf953086ea4e28c5236788234f38f88602296f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/METADATA
@@ -0,0 +1,82 @@
+Metadata-Version: 2.1
+Name: setuptools
+Version: 44.0.0
+Summary: Easily download, build, install, upgrade, and uninstall Python packages
+Home-page: https://github.com/pypa/setuptools
+Author: Python Packaging Authority
+Author-email: distutils-sig@python.org
+License: UNKNOWN
+Project-URL: Documentation, https://setuptools.readthedocs.io/
+Keywords: CPAN PyPI distutils eggs package management
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: System :: Archiving :: Packaging
+Classifier: Topic :: System :: Systems Administration
+Classifier: Topic :: Utilities
+Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7
+Description-Content-Type: text/x-rst; charset=UTF-8
+
+.. image:: https://img.shields.io/pypi/v/setuptools.svg
+ :target: https://pypi.org/project/setuptools
+
+.. image:: https://img.shields.io/readthedocs/setuptools/latest.svg
+ :target: https://setuptools.readthedocs.io
+
+.. image:: https://img.shields.io/travis/pypa/setuptools/master.svg?label=Linux%20CI&logo=travis&logoColor=white
+ :target: https://travis-ci.org/pypa/setuptools
+
+.. image:: https://img.shields.io/appveyor/ci/pypa/setuptools/master.svg?label=Windows%20CI&logo=appveyor&logoColor=white
+ :target: https://ci.appveyor.com/project/pypa/setuptools/branch/master
+
+.. image:: https://img.shields.io/codecov/c/github/pypa/setuptools/master.svg?logo=codecov&logoColor=white
+ :target: https://codecov.io/gh/pypa/setuptools
+
+.. image:: https://tidelift.com/badges/github/pypa/setuptools?style=flat
+ :target: https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=readme
+
+.. image:: https://img.shields.io/pypi/pyversions/setuptools.svg
+
+See the `Installation Instructions
+<https://packaging.python.org/installing/>`_ in the Python Packaging
+User's Guide for instructions on installing, upgrading, and uninstalling
+Setuptools.
+
+Questions and comments should be directed to the `distutils-sig
+mailing list <http://mail.python.org/pipermail/distutils-sig/>`_.
+Bug reports and especially tested patches may be
+submitted directly to the `bug tracker
+<https://github.com/pypa/setuptools/issues>`_.
+
+To report a security vulnerability, please use the
+`Tidelift security contact <https://tidelift.com/security>`_.
+Tidelift will coordinate the fix and disclosure.
+
+
+For Enterprise
+==============
+
+Available as part of the Tidelift Subscription.
+
+Setuptools and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
+
+`Learn more <https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=referral&utm_campaign=github>`_.
+
+Code of Conduct
+===============
+
+Everyone interacting in the setuptools project's codebases, issue trackers,
+chat rooms, and mailing lists is expected to follow the
+`PyPA Code of Conduct <https://www.pypa.io/en/latest/code-of-conduct/>`_.
+
+
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/RECORD b/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..13d87d0f2c5d12ea72af81c1edf33bc0ab30deef
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/RECORD
@@ -0,0 +1,163 @@
+../../../bin/easy_install,sha256=ZW8qWv8ft8aW2VdsrvTqdqSgtr29TzqVsb6rpGlEOOQ,268
+../../../bin/easy_install-3.8,sha256=ZW8qWv8ft8aW2VdsrvTqdqSgtr29TzqVsb6rpGlEOOQ,268
+__pycache__/easy_install.cpython-38.pyc,,
+easy_install.py,sha256=MDC9vt5AxDsXX5qcKlBz2TnW6Tpuv_AobnfhCJ9X3PM,126
+setuptools-44.0.0.dist-info/AUTHORS.txt,sha256=RtqU9KfonVGhI48DAA4-yTOBUhBtQTjFhaDzHoyh7uU,21518
+setuptools-44.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+setuptools-44.0.0.dist-info/LICENSE.txt,sha256=W6Ifuwlk-TatfRU2LR7W1JMcyMj5_y1NkRkOEJvnRDE,1090
+setuptools-44.0.0.dist-info/METADATA,sha256=L93fcafgVw4xoJUNG0lehyy0prVj-jU_JFxRh0ZUtos,3523
+setuptools-44.0.0.dist-info/RECORD,,
+setuptools-44.0.0.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
+setuptools-44.0.0.dist-info/dependency_links.txt,sha256=HlkCFkoK5TbZ5EMLbLKYhLcY_E31kBWD8TqW2EgmatQ,239
+setuptools-44.0.0.dist-info/entry_points.txt,sha256=ZmIqlp-SBdsBS2cuetmU2NdSOs4DG0kxctUR9UJ8Xk0,3150
+setuptools-44.0.0.dist-info/top_level.txt,sha256=2HUXVVwA4Pff1xgTFr3GsTXXKaPaO6vlG6oNJ_4u4Tg,38
+setuptools-44.0.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
+setuptools/__init__.py,sha256=WBpCcn2lvdckotabeae1TTYonPOcgCIF3raD2zRWzBc,7283
+setuptools/__pycache__/__init__.cpython-38.pyc,,
+setuptools/__pycache__/_deprecation_warning.cpython-38.pyc,,
+setuptools/__pycache__/_imp.cpython-38.pyc,,
+setuptools/__pycache__/archive_util.cpython-38.pyc,,
+setuptools/__pycache__/build_meta.cpython-38.pyc,,
+setuptools/__pycache__/config.cpython-38.pyc,,
+setuptools/__pycache__/dep_util.cpython-38.pyc,,
+setuptools/__pycache__/depends.cpython-38.pyc,,
+setuptools/__pycache__/dist.cpython-38.pyc,,
+setuptools/__pycache__/errors.cpython-38.pyc,,
+setuptools/__pycache__/extension.cpython-38.pyc,,
+setuptools/__pycache__/glob.cpython-38.pyc,,
+setuptools/__pycache__/installer.cpython-38.pyc,,
+setuptools/__pycache__/launch.cpython-38.pyc,,
+setuptools/__pycache__/lib2to3_ex.cpython-38.pyc,,
+setuptools/__pycache__/monkey.cpython-38.pyc,,
+setuptools/__pycache__/msvc.cpython-38.pyc,,
+setuptools/__pycache__/namespaces.cpython-38.pyc,,
+setuptools/__pycache__/package_index.cpython-38.pyc,,
+setuptools/__pycache__/py27compat.cpython-38.pyc,,
+setuptools/__pycache__/py31compat.cpython-38.pyc,,
+setuptools/__pycache__/py33compat.cpython-38.pyc,,
+setuptools/__pycache__/py34compat.cpython-38.pyc,,
+setuptools/__pycache__/sandbox.cpython-38.pyc,,
+setuptools/__pycache__/site-patch.cpython-38.pyc,,
+setuptools/__pycache__/ssl_support.cpython-38.pyc,,
+setuptools/__pycache__/unicode_utils.cpython-38.pyc,,
+setuptools/__pycache__/version.cpython-38.pyc,,
+setuptools/__pycache__/wheel.cpython-38.pyc,,
+setuptools/__pycache__/windows_support.cpython-38.pyc,,
+setuptools/_deprecation_warning.py,sha256=jU9-dtfv6cKmtQJOXN8nP1mm7gONw5kKEtiPtbwnZyI,218
+setuptools/_imp.py,sha256=jloslOkxrTKbobgemfP94YII0nhqiJzE1bRmCTZ1a5I,2223
+setuptools/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+setuptools/_vendor/__pycache__/__init__.cpython-38.pyc,,
+setuptools/_vendor/__pycache__/ordered_set.cpython-38.pyc,,
+setuptools/_vendor/__pycache__/pyparsing.cpython-38.pyc,,
+setuptools/_vendor/__pycache__/six.cpython-38.pyc,,
+setuptools/_vendor/ordered_set.py,sha256=dbaCcs27dyN9gnMWGF5nA_BrVn6Q-NrjKYJpV9_fgBs,15130
+setuptools/_vendor/packaging/__about__.py,sha256=CpuMSyh1V7adw8QMjWKkY3LtdqRUkRX4MgJ6nF4stM0,744
+setuptools/_vendor/packaging/__init__.py,sha256=6enbp5XgRfjBjsI9-bn00HjHf5TH21PDMOKkJW8xw-w,562
+setuptools/_vendor/packaging/__pycache__/__about__.cpython-38.pyc,,
+setuptools/_vendor/packaging/__pycache__/__init__.cpython-38.pyc,,
+setuptools/_vendor/packaging/__pycache__/_compat.cpython-38.pyc,,
+setuptools/_vendor/packaging/__pycache__/_structures.cpython-38.pyc,,
+setuptools/_vendor/packaging/__pycache__/markers.cpython-38.pyc,,
+setuptools/_vendor/packaging/__pycache__/requirements.cpython-38.pyc,,
+setuptools/_vendor/packaging/__pycache__/specifiers.cpython-38.pyc,,
+setuptools/_vendor/packaging/__pycache__/tags.cpython-38.pyc,,
+setuptools/_vendor/packaging/__pycache__/utils.cpython-38.pyc,,
+setuptools/_vendor/packaging/__pycache__/version.cpython-38.pyc,,
+setuptools/_vendor/packaging/_compat.py,sha256=Ugdm-qcneSchW25JrtMIKgUxfEEBcCAz6WrEeXeqz9o,865
+setuptools/_vendor/packaging/_structures.py,sha256=pVd90XcXRGwpZRB_qdFuVEibhCHpX_bL5zYr9-N0mc8,1416
+setuptools/_vendor/packaging/markers.py,sha256=-meFl9Fr9V8rF5Rduzgett5EHK9wBYRUqssAV2pj0lw,8268
+setuptools/_vendor/packaging/requirements.py,sha256=3dwIJekt8RRGCUbgxX8reeAbgmZYjb0wcCRtmH63kxI,4742
+setuptools/_vendor/packaging/specifiers.py,sha256=0ZzQpcUnvrQ6LjR-mQRLzMr8G6hdRv-mY0VSf_amFtI,27778
+setuptools/_vendor/packaging/tags.py,sha256=EPLXhO6GTD7_oiWEO1U0l0PkfR8R_xivpMDHXnsTlts,12933
+setuptools/_vendor/packaging/utils.py,sha256=VaTC0Ei7zO2xl9ARiWmz2YFLFt89PuuhLbAlXMyAGms,1520
+setuptools/_vendor/packaging/version.py,sha256=Npdwnb8OHedj_2L86yiUqscujb7w_i5gmSK1PhOAFzg,11978
+setuptools/_vendor/pyparsing.py,sha256=tmrp-lu-qO1i75ZzIN5A12nKRRD1Cm4Vpk-5LR9rims,232055
+setuptools/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098
+setuptools/archive_util.py,sha256=kw8Ib_lKjCcnPKNbS7h8HztRVK0d5RacU3r_KRdVnmM,6592
+setuptools/build_meta.py,sha256=-9Nmj9YdbW4zX3TssPJZhsENrTa4fw3k86Jm1cdKMik,9597
+setuptools/cli-32.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536
+setuptools/cli-64.exe,sha256=KLABu5pyrnokJCv6skjXZ6GsXeyYHGcqOUT3oHI3Xpo,74752
+setuptools/cli.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536
+setuptools/command/__init__.py,sha256=QCAuA9whnq8Bnoc0bBaS6Lw_KAUO0DiHYZQXEMNn5hg,568
+setuptools/command/__pycache__/__init__.cpython-38.pyc,,
+setuptools/command/__pycache__/alias.cpython-38.pyc,,
+setuptools/command/__pycache__/bdist_egg.cpython-38.pyc,,
+setuptools/command/__pycache__/bdist_rpm.cpython-38.pyc,,
+setuptools/command/__pycache__/bdist_wininst.cpython-38.pyc,,
+setuptools/command/__pycache__/build_clib.cpython-38.pyc,,
+setuptools/command/__pycache__/build_ext.cpython-38.pyc,,
+setuptools/command/__pycache__/build_py.cpython-38.pyc,,
+setuptools/command/__pycache__/develop.cpython-38.pyc,,
+setuptools/command/__pycache__/dist_info.cpython-38.pyc,,
+setuptools/command/__pycache__/easy_install.cpython-38.pyc,,
+setuptools/command/__pycache__/egg_info.cpython-38.pyc,,
+setuptools/command/__pycache__/install.cpython-38.pyc,,
+setuptools/command/__pycache__/install_egg_info.cpython-38.pyc,,
+setuptools/command/__pycache__/install_lib.cpython-38.pyc,,
+setuptools/command/__pycache__/install_scripts.cpython-38.pyc,,
+setuptools/command/__pycache__/py36compat.cpython-38.pyc,,
+setuptools/command/__pycache__/register.cpython-38.pyc,,
+setuptools/command/__pycache__/rotate.cpython-38.pyc,,
+setuptools/command/__pycache__/saveopts.cpython-38.pyc,,
+setuptools/command/__pycache__/sdist.cpython-38.pyc,,
+setuptools/command/__pycache__/setopt.cpython-38.pyc,,
+setuptools/command/__pycache__/test.cpython-38.pyc,,
+setuptools/command/__pycache__/upload.cpython-38.pyc,,
+setuptools/command/__pycache__/upload_docs.cpython-38.pyc,,
+setuptools/command/alias.py,sha256=KjpE0sz_SDIHv3fpZcIQK-sCkJz-SrC6Gmug6b9Nkc8,2426
+setuptools/command/bdist_egg.py,sha256=nnfV8Ah8IRC_Ifv5Loa9FdxL66MVbyDXwy-foP810zM,18185
+setuptools/command/bdist_rpm.py,sha256=B7l0TnzCGb-0nLlm6rS00jWLkojASwVmdhW2w5Qz_Ak,1508
+setuptools/command/bdist_wininst.py,sha256=_6dz3lpB1tY200LxKPLM7qgwTCceOMgaWFF-jW2-pm0,637
+setuptools/command/build_clib.py,sha256=bQ9aBr-5ZSO-9fGsGsDLz0mnnFteHUZnftVLkhvHDq0,4484
+setuptools/command/build_ext.py,sha256=Ib42YUGksBswm2mL5xmQPF6NeTA6HcqrvAtEgFCv32A,13019
+setuptools/command/build_py.py,sha256=yWyYaaS9F3o9JbIczn064A5g1C5_UiKRDxGaTqYbtLE,9596
+setuptools/command/develop.py,sha256=MQlnGS6uP19erK2JCNOyQYoYyquk3PADrqrrinqqLtA,8184
+setuptools/command/dist_info.py,sha256=5t6kOfrdgALT-P3ogss6PF9k-Leyesueycuk3dUyZnI,960
+setuptools/command/easy_install.py,sha256=0lY8Agxe-7IgMtxgxFuOY1NrDlBzOUlpCKsvayXlTYY,89903
+setuptools/command/egg_info.py,sha256=0e_TXrMfpa8nGTO7GmJcmpPCMWzliZi6zt9aMchlumc,25578
+setuptools/command/install.py,sha256=8doMxeQEDoK4Eco0mO2WlXXzzp9QnsGJQ7Z7yWkZPG8,4705
+setuptools/command/install_egg_info.py,sha256=4zq_Ad3jE-EffParuyDEnvxU6efB-Xhrzdr8aB6Ln_8,3195
+setuptools/command/install_lib.py,sha256=9zdc-H5h6RPxjySRhOwi30E_WfcVva7gpfhZ5ata60w,5023
+setuptools/command/install_scripts.py,sha256=UD0rEZ6861mTYhIdzcsqKnUl8PozocXWl9VBQ1VTWnc,2439
+setuptools/command/launcher manifest.xml,sha256=xlLbjWrB01tKC0-hlVkOKkiSPbzMml2eOPtJ_ucCnbE,628
+setuptools/command/py36compat.py,sha256=SzjZcOxF7zdFUT47Zv2n7AM3H8koDys_0OpS-n9gIfc,4986
+setuptools/command/register.py,sha256=kk3DxXCb5lXTvqnhfwx2g6q7iwbUmgTyXUCaBooBOUk,468
+setuptools/command/rotate.py,sha256=co5C1EkI7P0GGT6Tqz-T2SIj2LBJTZXYELpmao6d4KQ,2164
+setuptools/command/saveopts.py,sha256=za7QCBcQimKKriWcoCcbhxPjUz30gSB74zuTL47xpP4,658
+setuptools/command/sdist.py,sha256=IL1LepD2h8qGKOFJ3rrQVbjNH_Q6ViD40l0QADr4MEU,8088
+setuptools/command/setopt.py,sha256=NTWDyx-gjDF-txf4dO577s7LOzHVoKR0Mq33rFxaRr8,5085
+setuptools/command/test.py,sha256=u2kXngIIdSYqtvwFlHiN6Iye1IB4TU6uadB2uiV1szw,9602
+setuptools/command/upload.py,sha256=XT3YFVfYPAmA5qhGg0euluU98ftxRUW-PzKcODMLxUs,462
+setuptools/command/upload_docs.py,sha256=oXiGplM_cUKLwE4CWWw98RzCufAu8tBhMC97GegFcms,7311
+setuptools/config.py,sha256=6SB2OY3qcooOJmG_rsK_s0pKBsorBlDpfMJUyzjQIGk,20575
+setuptools/dep_util.py,sha256=fgixvC1R7sH3r13ktyf7N0FALoqEXL1cBarmNpSEoWg,935
+setuptools/depends.py,sha256=qt2RWllArRvhnm8lxsyRpcthEZYp4GHQgREl1q0LkFw,5517
+setuptools/dist.py,sha256=xtXaNsOsE32MwwQqErzgXJF7jsTQz9GYFRrwnPFQ0J0,49865
+setuptools/errors.py,sha256=MVOcv381HNSajDgEUWzOQ4J6B5BHCBMSjHfaWcEwA1o,524
+setuptools/extension.py,sha256=uc6nHI-MxwmNCNPbUiBnybSyqhpJqjbhvOQ-emdvt_E,1729
+setuptools/extern/__init__.py,sha256=4q9gtShB1XFP6CisltsyPqtcfTO6ZM9Lu1QBl3l-qmo,2514
+setuptools/extern/__pycache__/__init__.cpython-38.pyc,,
+setuptools/glob.py,sha256=o75cHrOxYsvn854thSxE0x9k8JrKDuhP_rRXlVB00Q4,5084
+setuptools/gui-32.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536
+setuptools/gui-64.exe,sha256=aYKMhX1IJLn4ULHgWX0sE0yREUt6B3TEHf_jOw6yNyE,75264
+setuptools/gui.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536
+setuptools/installer.py,sha256=TCFRonRo01I79zo-ucf3Ymhj8TenPlmhMijN916aaJs,5337
+setuptools/launch.py,sha256=sd7ejwhBocCDx_wG9rIs0OaZ8HtmmFU8ZC6IR_S0Lvg,787
+setuptools/lib2to3_ex.py,sha256=t5e12hbR2pi9V4ezWDTB4JM-AISUnGOkmcnYHek3xjg,2013
+setuptools/monkey.py,sha256=FGc9fffh7gAxMLFmJs2DW_OYWpBjkdbNS2n14UAK4NA,5264
+setuptools/msvc.py,sha256=8baJ6aYgCA4TRdWQQi185qB9dnU8FaP4wgpbmd7VODs,46751
+setuptools/namespaces.py,sha256=F0Nrbv8KCT2OrO7rwa03om4N4GZKAlnce-rr-cgDQa8,3199
+setuptools/package_index.py,sha256=6pb-B1POtHyLycAbkDETk4fO-Qv8_sY-rjTXhUOoh6k,40605
+setuptools/py27compat.py,sha256=tvmer0Tn-wk_JummCkoM22UIjpjL-AQ8uUiOaqTs8sI,1496
+setuptools/py31compat.py,sha256=h2rtZghOfwoGYd8sQ0-auaKiF3TcL3qX0bX3VessqcE,838
+setuptools/py33compat.py,sha256=SMF9Z8wnGicTOkU1uRNwZ_kz5Z_bj29PUBbqdqeeNsc,1330
+setuptools/py34compat.py,sha256=KYOd6ybRxjBW8NJmYD8t_UyyVmysppFXqHpFLdslGXU,245
+setuptools/sandbox.py,sha256=9UbwfEL5QY436oMI1LtFWohhoZ-UzwHvGyZjUH_qhkw,14276
+setuptools/script (dev).tmpl,sha256=RUzQzCQUaXtwdLtYHWYbIQmOaES5Brqq1FvUA_tu-5I,218
+setuptools/script.tmpl,sha256=WGTt5piezO27c-Dbx6l5Q4T3Ff20A5z7872hv3aAhYY,138
+setuptools/site-patch.py,sha256=OumkIHMuoSenRSW1382kKWI1VAwxNE86E5W8iDd34FY,2302
+setuptools/ssl_support.py,sha256=nLjPUBBw7RTTx6O4RJZ5eAMGgjJG8beiDbkFXDZpLuM,8493
+setuptools/unicode_utils.py,sha256=NOiZ_5hD72A6w-4wVj8awHFM3n51Kmw1Ic_vx15XFqw,996
+setuptools/version.py,sha256=og_cuZQb0QI6ukKZFfZWPlr1HgJBPPn2vO2m_bI9ZTE,144
+setuptools/wheel.py,sha256=zct-SEj5_LoHg6XELt2cVRdulsUENenCdS1ekM7TlZA,8455
+setuptools/windows_support.py,sha256=5GrfqSP2-dLGJoZTq2g6dCKkyQxxa2n5IQiXlJCoYEE,714
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/WHEEL b/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..ef99c6cf3283b50a273ac4c6d009a0aa85597070
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.34.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/dependency_links.txt b/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/dependency_links.txt
new file mode 100644
index 0000000000000000000000000000000000000000..e87d02103ede91545d70783dd59653d183424b68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/dependency_links.txt
@@ -0,0 +1,2 @@
+https://files.pythonhosted.org/packages/source/c/certifi/certifi-2016.9.26.tar.gz#md5=baa81e951a29958563689d868ef1064d
+https://files.pythonhosted.org/packages/source/w/wincertstore/wincertstore-0.2.zip#md5=ae728f2f007185648d0c7a8679b361e2
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/entry_points.txt b/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/entry_points.txt
new file mode 100644
index 0000000000000000000000000000000000000000..0fed3f1d83f3eb690dddad3f050da3d3f021eb6a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/entry_points.txt
@@ -0,0 +1,68 @@
+[console_scripts]
+easy_install = setuptools.command.easy_install:main
+
+[distutils.commands]
+alias = setuptools.command.alias:alias
+bdist_egg = setuptools.command.bdist_egg:bdist_egg
+bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm
+bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst
+build_clib = setuptools.command.build_clib:build_clib
+build_ext = setuptools.command.build_ext:build_ext
+build_py = setuptools.command.build_py:build_py
+develop = setuptools.command.develop:develop
+dist_info = setuptools.command.dist_info:dist_info
+easy_install = setuptools.command.easy_install:easy_install
+egg_info = setuptools.command.egg_info:egg_info
+install = setuptools.command.install:install
+install_egg_info = setuptools.command.install_egg_info:install_egg_info
+install_lib = setuptools.command.install_lib:install_lib
+install_scripts = setuptools.command.install_scripts:install_scripts
+rotate = setuptools.command.rotate:rotate
+saveopts = setuptools.command.saveopts:saveopts
+sdist = setuptools.command.sdist:sdist
+setopt = setuptools.command.setopt:setopt
+test = setuptools.command.test:test
+upload_docs = setuptools.command.upload_docs:upload_docs
+
+[distutils.setup_keywords]
+convert_2to3_doctests = setuptools.dist:assert_string_list
+dependency_links = setuptools.dist:assert_string_list
+eager_resources = setuptools.dist:assert_string_list
+entry_points = setuptools.dist:check_entry_points
+exclude_package_data = setuptools.dist:check_package_data
+extras_require = setuptools.dist:check_extras
+include_package_data = setuptools.dist:assert_bool
+install_requires = setuptools.dist:check_requirements
+namespace_packages = setuptools.dist:check_nsp
+package_data = setuptools.dist:check_package_data
+packages = setuptools.dist:check_packages
+python_requires = setuptools.dist:check_specifier
+setup_requires = setuptools.dist:check_requirements
+test_loader = setuptools.dist:check_importable
+test_runner = setuptools.dist:check_importable
+test_suite = setuptools.dist:check_test_suite
+tests_require = setuptools.dist:check_requirements
+use_2to3 = setuptools.dist:assert_bool
+use_2to3_exclude_fixers = setuptools.dist:assert_string_list
+use_2to3_fixers = setuptools.dist:assert_string_list
+zip_safe = setuptools.dist:assert_bool
+
+[egg_info.writers]
+PKG-INFO = setuptools.command.egg_info:write_pkg_info
+dependency_links.txt = setuptools.command.egg_info:overwrite_arg
+depends.txt = setuptools.command.egg_info:warn_depends_obsolete
+eager_resources.txt = setuptools.command.egg_info:overwrite_arg
+entry_points.txt = setuptools.command.egg_info:write_entries
+namespace_packages.txt = setuptools.command.egg_info:overwrite_arg
+requires.txt = setuptools.command.egg_info:write_requirements
+top_level.txt = setuptools.command.egg_info:write_toplevel_names
+
+[setuptools.finalize_distribution_options]
+2to3_doctests = setuptools.dist:Distribution._finalize_2to3_doctests
+features = setuptools.dist:Distribution._finalize_feature_opts
+keywords = setuptools.dist:Distribution._finalize_setup_keywords
+parent_finalize = setuptools.dist:_Distribution.finalize_options
+
+[setuptools.installation]
+eggsecutable = setuptools.command.easy_install:bootstrap
+
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/top_level.txt b/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..4577c6a795e510bf7578236665f582c3770fb42e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/top_level.txt
@@ -0,0 +1,3 @@
+easy_install
+pkg_resources
+setuptools
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/zip-safe b/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/zip-safe
new file mode 100644
index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/zip-safe
@@ -0,0 +1 @@
+
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__init__.py b/monEnvTP/lib/python3.8/site-packages/setuptools/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..a71b2bbdc6170963a66959c48080c1dedc7bb703
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/__init__.py
@@ -0,0 +1,228 @@
+"""Extensions to the 'distutils' for large or complex distributions"""
+
+import os
+import sys
+import functools
+import distutils.core
+import distutils.filelist
+import re
+from distutils.errors import DistutilsOptionError
+from distutils.util import convert_path
+from fnmatch import fnmatchcase
+
+from ._deprecation_warning import SetuptoolsDeprecationWarning
+
+from setuptools.extern.six import PY3, string_types
+from setuptools.extern.six.moves import filter, map
+
+import setuptools.version
+from setuptools.extension import Extension
+from setuptools.dist import Distribution, Feature
+from setuptools.depends import Require
+from . import monkey
+
+__metaclass__ = type
+
+
+__all__ = [
+ 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require',
+ 'SetuptoolsDeprecationWarning',
+ 'find_packages'
+]
+
+if PY3:
+ __all__.append('find_namespace_packages')
+
+__version__ = setuptools.version.__version__
+
+bootstrap_install_from = None
+
+# If we run 2to3 on .py files, should we also convert docstrings?
+# Default: yes; assume that we can detect doctests reliably
+run_2to3_on_doctests = True
+# Standard package names for fixer packages
+lib2to3_fixer_packages = ['lib2to3.fixes']
+
+
+class PackageFinder:
+ """
+ Generate a list of all Python packages found within a directory
+ """
+
+ @classmethod
+ def find(cls, where='.', exclude=(), include=('*',)):
+ """Return a list all Python packages found within directory 'where'
+
+ 'where' is the root directory which will be searched for packages. It
+ should be supplied as a "cross-platform" (i.e. URL-style) path; it will
+ be converted to the appropriate local path syntax.
+
+ 'exclude' is a sequence of package names to exclude; '*' can be used
+ as a wildcard in the names, such that 'foo.*' will exclude all
+ subpackages of 'foo' (but not 'foo' itself).
+
+ 'include' is a sequence of package names to include. If it's
+ specified, only the named packages will be included. If it's not
+ specified, all found packages will be included. 'include' can contain
+ shell style wildcard patterns just like 'exclude'.
+ """
+
+ return list(cls._find_packages_iter(
+ convert_path(where),
+ cls._build_filter('ez_setup', '*__pycache__', *exclude),
+ cls._build_filter(*include)))
+
+ @classmethod
+ def _find_packages_iter(cls, where, exclude, include):
+ """
+ All the packages found in 'where' that pass the 'include' filter, but
+ not the 'exclude' filter.
+ """
+ for root, dirs, files in os.walk(where, followlinks=True):
+ # Copy dirs to iterate over it, then empty dirs.
+ all_dirs = dirs[:]
+ dirs[:] = []
+
+ for dir in all_dirs:
+ full_path = os.path.join(root, dir)
+ rel_path = os.path.relpath(full_path, where)
+ package = rel_path.replace(os.path.sep, '.')
+
+ # Skip directory trees that are not valid packages
+ if ('.' in dir or not cls._looks_like_package(full_path)):
+ continue
+
+ # Should this package be included?
+ if include(package) and not exclude(package):
+ yield package
+
+ # Keep searching subdirectories, as there may be more packages
+ # down there, even if the parent was excluded.
+ dirs.append(dir)
+
+ @staticmethod
+ def _looks_like_package(path):
+ """Does a directory look like a package?"""
+ return os.path.isfile(os.path.join(path, '__init__.py'))
+
+ @staticmethod
+ def _build_filter(*patterns):
+ """
+ Given a list of patterns, return a callable that will be true only if
+ the input matches at least one of the patterns.
+ """
+ return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns)
+
+
+class PEP420PackageFinder(PackageFinder):
+ @staticmethod
+ def _looks_like_package(path):
+ return True
+
+
+find_packages = PackageFinder.find
+
+if PY3:
+ find_namespace_packages = PEP420PackageFinder.find
+
+
+def _install_setup_requires(attrs):
+ # Note: do not use `setuptools.Distribution` directly, as
+ # our PEP 517 backend patch `distutils.core.Distribution`.
+ dist = distutils.core.Distribution(dict(
+ (k, v) for k, v in attrs.items()
+ if k in ('dependency_links', 'setup_requires')
+ ))
+ # Honor setup.cfg's options.
+ dist.parse_config_files(ignore_option_errors=True)
+ if dist.setup_requires:
+ dist.fetch_build_eggs(dist.setup_requires)
+
+
+def setup(**attrs):
+ # Make sure we have any requirements needed to interpret 'attrs'.
+ _install_setup_requires(attrs)
+ return distutils.core.setup(**attrs)
+
+setup.__doc__ = distutils.core.setup.__doc__
+
+
+_Command = monkey.get_unpatched(distutils.core.Command)
+
+
+class Command(_Command):
+ __doc__ = _Command.__doc__
+
+ command_consumes_arguments = False
+
+ def __init__(self, dist, **kw):
+ """
+ Construct the command for dist, updating
+ vars(self) with any keyword parameters.
+ """
+ _Command.__init__(self, dist)
+ vars(self).update(kw)
+
+ def _ensure_stringlike(self, option, what, default=None):
+ val = getattr(self, option)
+ if val is None:
+ setattr(self, option, default)
+ return default
+ elif not isinstance(val, string_types):
+ raise DistutilsOptionError("'%s' must be a %s (got `%s`)"
+ % (option, what, val))
+ return val
+
+ def ensure_string_list(self, option):
+ r"""Ensure that 'option' is a list of strings. If 'option' is
+ currently a string, we split it either on /,\s*/ or /\s+/, so
+ "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become
+ ["foo", "bar", "baz"].
+ """
+ val = getattr(self, option)
+ if val is None:
+ return
+ elif isinstance(val, string_types):
+ setattr(self, option, re.split(r',\s*|\s+', val))
+ else:
+ if isinstance(val, list):
+ ok = all(isinstance(v, string_types) for v in val)
+ else:
+ ok = False
+ if not ok:
+ raise DistutilsOptionError(
+ "'%s' must be a list of strings (got %r)"
+ % (option, val))
+
+ def reinitialize_command(self, command, reinit_subcommands=0, **kw):
+ cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
+ vars(cmd).update(kw)
+ return cmd
+
+
+def _find_all_simple(path):
+ """
+ Find all files under 'path'
+ """
+ results = (
+ os.path.join(base, file)
+ for base, dirs, files in os.walk(path, followlinks=True)
+ for file in files
+ )
+ return filter(os.path.isfile, results)
+
+
+def findall(dir=os.curdir):
+ """
+ Find all files under 'dir' and return the list of full filenames.
+ Unless dir is '.', return full filenames with dir prepended.
+ """
+ files = _find_all_simple(dir)
+ if dir == os.curdir:
+ make_rel = functools.partial(os.path.relpath, start=dir)
+ files = map(make_rel, files)
+ return list(files)
+
+
+# Apply monkey patches
+monkey.patch_all()
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b81339ec84f55471b0dfd15f83e65b38c7f994f8
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/_deprecation_warning.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/_deprecation_warning.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fead1425348905e2f5dbcdf4020f6f88e8dd3417
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/_deprecation_warning.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/_imp.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/_imp.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..03f650084191cfba0371c70748059408448e2f6d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/_imp.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/archive_util.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/archive_util.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..538c8c915769a3d0009cdb3130ce2bbd7fde4cfb
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/archive_util.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/build_meta.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/build_meta.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d1370600cec74998ba2c5a0520d00ccada1970d3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/build_meta.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/config.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/config.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..08b42133859f0a03c51223e41decc33c87a05742
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/config.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/dep_util.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/dep_util.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..66af3c002f322a91f4da844b2abdce15bd5895a3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/dep_util.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/depends.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/depends.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fcbf978bb5074caa9c1ffa93c60798e7ff84353d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/depends.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/dist.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/dist.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..61caa5624dc51972da866ccf7a3b7e1d07ec0f5c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/dist.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/errors.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/errors.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e095c87ccd8a7d7e97a74e9ada0e118a81e51cd5
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/errors.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/extension.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/extension.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cafdd592aa2ee4380462debb7fd10f006d380252
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/extension.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/glob.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/glob.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..40e7589a731c3d577fdac4344e4c97fa58cdd778
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/glob.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/installer.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/installer.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..979bd65f0b3d74c8438cd3b65dbe159d11eeeaf7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/installer.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/launch.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/launch.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..15dee5cee8438c504ce25f8dd3fb8a7fb77bab77
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/launch.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/lib2to3_ex.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/lib2to3_ex.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a60c59849c993c1280f4b9a72cb59a8ede1801b0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/lib2to3_ex.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/monkey.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/monkey.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4686b3f98918406f8a24f6de16379538e3fae84d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/monkey.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/msvc.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/msvc.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7111b11a1f66ee3472594e897505e6df3749e76e
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/msvc.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/namespaces.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/namespaces.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5b32f8261efd54b88c6b8d06c8f5b5437560d485
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/namespaces.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/package_index.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/package_index.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f6cf9517d5c9079d971203e2e6d4491d287e3af9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/package_index.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/py27compat.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/py27compat.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c6c6f4ad127e73c891a81ddb143ad89a7edf9b78
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/py27compat.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/py31compat.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/py31compat.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0779a6a5e539278238576a59fa0f0260b90444c9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/py31compat.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/py33compat.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/py33compat.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7849356767ba18d5617bd3d1609009317e077c8b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/py33compat.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/py34compat.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/py34compat.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..842a886aa2d9b4ad5eadd7621ec0873ca852b4d9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/py34compat.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/sandbox.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/sandbox.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f0b00923b821bb97cc755b7322fbee64cfcb5249
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/sandbox.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/site-patch.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/site-patch.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..077acbb5dd4adcf94e3a000e31bb4b014504a8eb
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/site-patch.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/ssl_support.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/ssl_support.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6d694fec6cf466f7f120817e7e3ae623cda09783
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/ssl_support.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/unicode_utils.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/unicode_utils.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6804a314275bb3a7eeefc40f2198e6f404d4ec60
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/unicode_utils.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/version.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/version.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..44c610d399d8fa5488c7677e3658a4f9f5f00242
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/version.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/wheel.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/wheel.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7113c86dff3ac8c42e8a961eb3f085454a7d5a13
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/wheel.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/windows_support.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/windows_support.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b9c90f8c5c984b924b6c87fc3b2c4260dae5f3aa
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/__pycache__/windows_support.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_deprecation_warning.py b/monEnvTP/lib/python3.8/site-packages/setuptools/_deprecation_warning.py
new file mode 100644
index 0000000000000000000000000000000000000000..086b64dd3817c0c1a194ffc1959eeffdd2695bef
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/_deprecation_warning.py
@@ -0,0 +1,7 @@
+class SetuptoolsDeprecationWarning(Warning):
+ """
+ Base class for warning deprecations in ``setuptools``
+
+ This class is not derived from ``DeprecationWarning``, and as such is
+ visible by default.
+ """
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_imp.py b/monEnvTP/lib/python3.8/site-packages/setuptools/_imp.py
new file mode 100644
index 0000000000000000000000000000000000000000..a3cce9b284b1e580c1715c5e300a18077d63e8ce
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/_imp.py
@@ -0,0 +1,73 @@
+"""
+Re-implementation of find_module and get_frozen_object
+from the deprecated imp module.
+"""
+
+import os
+import importlib.util
+import importlib.machinery
+
+from .py34compat import module_from_spec
+
+
+PY_SOURCE = 1
+PY_COMPILED = 2
+C_EXTENSION = 3
+C_BUILTIN = 6
+PY_FROZEN = 7
+
+
+def find_module(module, paths=None):
+ """Just like 'imp.find_module()', but with package support"""
+ spec = importlib.util.find_spec(module, paths)
+ if spec is None:
+ raise ImportError("Can't find %s" % module)
+ if not spec.has_location and hasattr(spec, 'submodule_search_locations'):
+ spec = importlib.util.spec_from_loader('__init__.py', spec.loader)
+
+ kind = -1
+ file = None
+ static = isinstance(spec.loader, type)
+ if spec.origin == 'frozen' or static and issubclass(
+ spec.loader, importlib.machinery.FrozenImporter):
+ kind = PY_FROZEN
+ path = None # imp compabilty
+ suffix = mode = '' # imp compability
+ elif spec.origin == 'built-in' or static and issubclass(
+ spec.loader, importlib.machinery.BuiltinImporter):
+ kind = C_BUILTIN
+ path = None # imp compabilty
+ suffix = mode = '' # imp compability
+ elif spec.has_location:
+ path = spec.origin
+ suffix = os.path.splitext(path)[1]
+ mode = 'r' if suffix in importlib.machinery.SOURCE_SUFFIXES else 'rb'
+
+ if suffix in importlib.machinery.SOURCE_SUFFIXES:
+ kind = PY_SOURCE
+ elif suffix in importlib.machinery.BYTECODE_SUFFIXES:
+ kind = PY_COMPILED
+ elif suffix in importlib.machinery.EXTENSION_SUFFIXES:
+ kind = C_EXTENSION
+
+ if kind in {PY_SOURCE, PY_COMPILED}:
+ file = open(path, mode)
+ else:
+ path = None
+ suffix = mode = ''
+
+ return file, path, (suffix, mode, kind)
+
+
+def get_frozen_object(module, paths=None):
+ spec = importlib.util.find_spec(module, paths)
+ if not spec:
+ raise ImportError("Can't find %s" % module)
+ return spec.loader.get_code(module)
+
+
+def get_module(module, paths, info):
+ spec = importlib.util.find_spec(module, paths)
+ if not spec:
+ raise ImportError("Can't find %s" % module)
+ return module_from_spec(spec)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/__init__.py b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1a739fa688986386ea533d939ebfe472deb335be
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/ordered_set.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/ordered_set.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..606781320721b1f9c1538aaa02831f150c66d8c3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/ordered_set.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/pyparsing.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/pyparsing.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..38a5ed39fb4075d45c5a1b9d18fc068b73c23a45
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/pyparsing.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/six.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/six.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b863b963007f4005312c129e7025670b60dce89b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/six.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/ordered_set.py b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/ordered_set.py
new file mode 100644
index 0000000000000000000000000000000000000000..14876000de895a609d5b9f3de39c3c8fc44ef1fc
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/ordered_set.py
@@ -0,0 +1,488 @@
+"""
+An OrderedSet is a custom MutableSet that remembers its order, so that every
+entry has an index that can be looked up.
+
+Based on a recipe originally posted to ActiveState Recipes by Raymond Hettiger,
+and released under the MIT license.
+"""
+import itertools as it
+from collections import deque
+
+try:
+ # Python 3
+ from collections.abc import MutableSet, Sequence
+except ImportError:
+ # Python 2.7
+ from collections import MutableSet, Sequence
+
+SLICE_ALL = slice(None)
+__version__ = "3.1"
+
+
+def is_iterable(obj):
+ """
+ Are we being asked to look up a list of things, instead of a single thing?
+ We check for the `__iter__` attribute so that this can cover types that
+ don't have to be known by this module, such as NumPy arrays.
+
+ Strings, however, should be considered as atomic values to look up, not
+ iterables. The same goes for tuples, since they are immutable and therefore
+ valid entries.
+
+ We don't need to check for the Python 2 `unicode` type, because it doesn't
+ have an `__iter__` attribute anyway.
+ """
+ return (
+ hasattr(obj, "__iter__")
+ and not isinstance(obj, str)
+ and not isinstance(obj, tuple)
+ )
+
+
+class OrderedSet(MutableSet, Sequence):
+ """
+ An OrderedSet is a custom MutableSet that remembers its order, so that
+ every entry has an index that can be looked up.
+
+ Example:
+ >>> OrderedSet([1, 1, 2, 3, 2])
+ OrderedSet([1, 2, 3])
+ """
+
+ def __init__(self, iterable=None):
+ self.items = []
+ self.map = {}
+ if iterable is not None:
+ self |= iterable
+
+ def __len__(self):
+ """
+ Returns the number of unique elements in the ordered set
+
+ Example:
+ >>> len(OrderedSet([]))
+ 0
+ >>> len(OrderedSet([1, 2]))
+ 2
+ """
+ return len(self.items)
+
+ def __getitem__(self, index):
+ """
+ Get the item at a given index.
+
+ If `index` is a slice, you will get back that slice of items, as a
+ new OrderedSet.
+
+ If `index` is a list or a similar iterable, you'll get a list of
+ items corresponding to those indices. This is similar to NumPy's
+ "fancy indexing". The result is not an OrderedSet because you may ask
+ for duplicate indices, and the number of elements returned should be
+ the number of elements asked for.
+
+ Example:
+ >>> oset = OrderedSet([1, 2, 3])
+ >>> oset[1]
+ 2
+ """
+ if isinstance(index, slice) and index == SLICE_ALL:
+ return self.copy()
+ elif is_iterable(index):
+ return [self.items[i] for i in index]
+ elif hasattr(index, "__index__") or isinstance(index, slice):
+ result = self.items[index]
+ if isinstance(result, list):
+ return self.__class__(result)
+ else:
+ return result
+ else:
+ raise TypeError("Don't know how to index an OrderedSet by %r" % index)
+
+ def copy(self):
+ """
+ Return a shallow copy of this object.
+
+ Example:
+ >>> this = OrderedSet([1, 2, 3])
+ >>> other = this.copy()
+ >>> this == other
+ True
+ >>> this is other
+ False
+ """
+ return self.__class__(self)
+
+ def __getstate__(self):
+ if len(self) == 0:
+ # The state can't be an empty list.
+ # We need to return a truthy value, or else __setstate__ won't be run.
+ #
+ # This could have been done more gracefully by always putting the state
+ # in a tuple, but this way is backwards- and forwards- compatible with
+ # previous versions of OrderedSet.
+ return (None,)
+ else:
+ return list(self)
+
+ def __setstate__(self, state):
+ if state == (None,):
+ self.__init__([])
+ else:
+ self.__init__(state)
+
+ def __contains__(self, key):
+ """
+ Test if the item is in this ordered set
+
+ Example:
+ >>> 1 in OrderedSet([1, 3, 2])
+ True
+ >>> 5 in OrderedSet([1, 3, 2])
+ False
+ """
+ return key in self.map
+
+ def add(self, key):
+ """
+ Add `key` as an item to this OrderedSet, then return its index.
+
+ If `key` is already in the OrderedSet, return the index it already
+ had.
+
+ Example:
+ >>> oset = OrderedSet()
+ >>> oset.append(3)
+ 0
+ >>> print(oset)
+ OrderedSet([3])
+ """
+ if key not in self.map:
+ self.map[key] = len(self.items)
+ self.items.append(key)
+ return self.map[key]
+
+ append = add
+
+ def update(self, sequence):
+ """
+ Update the set with the given iterable sequence, then return the index
+ of the last element inserted.
+
+ Example:
+ >>> oset = OrderedSet([1, 2, 3])
+ >>> oset.update([3, 1, 5, 1, 4])
+ 4
+ >>> print(oset)
+ OrderedSet([1, 2, 3, 5, 4])
+ """
+ item_index = None
+ try:
+ for item in sequence:
+ item_index = self.add(item)
+ except TypeError:
+ raise ValueError(
+ "Argument needs to be an iterable, got %s" % type(sequence)
+ )
+ return item_index
+
+ def index(self, key):
+ """
+ Get the index of a given entry, raising an IndexError if it's not
+ present.
+
+ `key` can be an iterable of entries that is not a string, in which case
+ this returns a list of indices.
+
+ Example:
+ >>> oset = OrderedSet([1, 2, 3])
+ >>> oset.index(2)
+ 1
+ """
+ if is_iterable(key):
+ return [self.index(subkey) for subkey in key]
+ return self.map[key]
+
+ # Provide some compatibility with pd.Index
+ get_loc = index
+ get_indexer = index
+
+ def pop(self):
+ """
+ Remove and return the last element from the set.
+
+ Raises KeyError if the set is empty.
+
+ Example:
+ >>> oset = OrderedSet([1, 2, 3])
+ >>> oset.pop()
+ 3
+ """
+ if not self.items:
+ raise KeyError("Set is empty")
+
+ elem = self.items[-1]
+ del self.items[-1]
+ del self.map[elem]
+ return elem
+
+ def discard(self, key):
+ """
+ Remove an element. Do not raise an exception if absent.
+
+ The MutableSet mixin uses this to implement the .remove() method, which
+ *does* raise an error when asked to remove a non-existent item.
+
+ Example:
+ >>> oset = OrderedSet([1, 2, 3])
+ >>> oset.discard(2)
+ >>> print(oset)
+ OrderedSet([1, 3])
+ >>> oset.discard(2)
+ >>> print(oset)
+ OrderedSet([1, 3])
+ """
+ if key in self:
+ i = self.map[key]
+ del self.items[i]
+ del self.map[key]
+ for k, v in self.map.items():
+ if v >= i:
+ self.map[k] = v - 1
+
+ def clear(self):
+ """
+ Remove all items from this OrderedSet.
+ """
+ del self.items[:]
+ self.map.clear()
+
+ def __iter__(self):
+ """
+ Example:
+ >>> list(iter(OrderedSet([1, 2, 3])))
+ [1, 2, 3]
+ """
+ return iter(self.items)
+
+ def __reversed__(self):
+ """
+ Example:
+ >>> list(reversed(OrderedSet([1, 2, 3])))
+ [3, 2, 1]
+ """
+ return reversed(self.items)
+
+ def __repr__(self):
+ if not self:
+ return "%s()" % (self.__class__.__name__,)
+ return "%s(%r)" % (self.__class__.__name__, list(self))
+
+ def __eq__(self, other):
+ """
+ Returns true if the containers have the same items. If `other` is a
+ Sequence, then order is checked, otherwise it is ignored.
+
+ Example:
+ >>> oset = OrderedSet([1, 3, 2])
+ >>> oset == [1, 3, 2]
+ True
+ >>> oset == [1, 2, 3]
+ False
+ >>> oset == [2, 3]
+ False
+ >>> oset == OrderedSet([3, 2, 1])
+ False
+ """
+ # In Python 2 deque is not a Sequence, so treat it as one for
+ # consistent behavior with Python 3.
+ if isinstance(other, (Sequence, deque)):
+ # Check that this OrderedSet contains the same elements, in the
+ # same order, as the other object.
+ return list(self) == list(other)
+ try:
+ other_as_set = set(other)
+ except TypeError:
+ # If `other` can't be converted into a set, it's not equal.
+ return False
+ else:
+ return set(self) == other_as_set
+
+ def union(self, *sets):
+ """
+ Combines all unique items.
+ Each items order is defined by its first appearance.
+
+ Example:
+ >>> oset = OrderedSet.union(OrderedSet([3, 1, 4, 1, 5]), [1, 3], [2, 0])
+ >>> print(oset)
+ OrderedSet([3, 1, 4, 5, 2, 0])
+ >>> oset.union([8, 9])
+ OrderedSet([3, 1, 4, 5, 2, 0, 8, 9])
+ >>> oset | {10}
+ OrderedSet([3, 1, 4, 5, 2, 0, 10])
+ """
+ cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
+ containers = map(list, it.chain([self], sets))
+ items = it.chain.from_iterable(containers)
+ return cls(items)
+
+ def __and__(self, other):
+ # the parent implementation of this is backwards
+ return self.intersection(other)
+
+ def intersection(self, *sets):
+ """
+ Returns elements in common between all sets. Order is defined only
+ by the first set.
+
+ Example:
+ >>> oset = OrderedSet.intersection(OrderedSet([0, 1, 2, 3]), [1, 2, 3])
+ >>> print(oset)
+ OrderedSet([1, 2, 3])
+ >>> oset.intersection([2, 4, 5], [1, 2, 3, 4])
+ OrderedSet([2])
+ >>> oset.intersection()
+ OrderedSet([1, 2, 3])
+ """
+ cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
+ if sets:
+ common = set.intersection(*map(set, sets))
+ items = (item for item in self if item in common)
+ else:
+ items = self
+ return cls(items)
+
+ def difference(self, *sets):
+ """
+ Returns all elements that are in this set but not the others.
+
+ Example:
+ >>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]))
+ OrderedSet([1, 3])
+ >>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]), OrderedSet([3]))
+ OrderedSet([1])
+ >>> OrderedSet([1, 2, 3]) - OrderedSet([2])
+ OrderedSet([1, 3])
+ >>> OrderedSet([1, 2, 3]).difference()
+ OrderedSet([1, 2, 3])
+ """
+ cls = self.__class__
+ if sets:
+ other = set.union(*map(set, sets))
+ items = (item for item in self if item not in other)
+ else:
+ items = self
+ return cls(items)
+
+ def issubset(self, other):
+ """
+ Report whether another set contains this set.
+
+ Example:
+ >>> OrderedSet([1, 2, 3]).issubset({1, 2})
+ False
+ >>> OrderedSet([1, 2, 3]).issubset({1, 2, 3, 4})
+ True
+ >>> OrderedSet([1, 2, 3]).issubset({1, 4, 3, 5})
+ False
+ """
+ if len(self) > len(other): # Fast check for obvious cases
+ return False
+ return all(item in other for item in self)
+
+ def issuperset(self, other):
+ """
+ Report whether this set contains another set.
+
+ Example:
+ >>> OrderedSet([1, 2]).issuperset([1, 2, 3])
+ False
+ >>> OrderedSet([1, 2, 3, 4]).issuperset({1, 2, 3})
+ True
+ >>> OrderedSet([1, 4, 3, 5]).issuperset({1, 2, 3})
+ False
+ """
+ if len(self) < len(other): # Fast check for obvious cases
+ return False
+ return all(item in self for item in other)
+
+ def symmetric_difference(self, other):
+ """
+ Return the symmetric difference of two OrderedSets as a new set.
+ That is, the new set will contain all elements that are in exactly
+ one of the sets.
+
+ Their order will be preserved, with elements from `self` preceding
+ elements from `other`.
+
+ Example:
+ >>> this = OrderedSet([1, 4, 3, 5, 7])
+ >>> other = OrderedSet([9, 7, 1, 3, 2])
+ >>> this.symmetric_difference(other)
+ OrderedSet([4, 5, 9, 2])
+ """
+ cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
+ diff1 = cls(self).difference(other)
+ diff2 = cls(other).difference(self)
+ return diff1.union(diff2)
+
+ def _update_items(self, items):
+ """
+ Replace the 'items' list of this OrderedSet with a new one, updating
+ self.map accordingly.
+ """
+ self.items = items
+ self.map = {item: idx for (idx, item) in enumerate(items)}
+
+ def difference_update(self, *sets):
+ """
+ Update this OrderedSet to remove items from one or more other sets.
+
+ Example:
+ >>> this = OrderedSet([1, 2, 3])
+ >>> this.difference_update(OrderedSet([2, 4]))
+ >>> print(this)
+ OrderedSet([1, 3])
+
+ >>> this = OrderedSet([1, 2, 3, 4, 5])
+ >>> this.difference_update(OrderedSet([2, 4]), OrderedSet([1, 4, 6]))
+ >>> print(this)
+ OrderedSet([3, 5])
+ """
+ items_to_remove = set()
+ for other in sets:
+ items_to_remove |= set(other)
+ self._update_items([item for item in self.items if item not in items_to_remove])
+
+ def intersection_update(self, other):
+ """
+ Update this OrderedSet to keep only items in another set, preserving
+ their order in this set.
+
+ Example:
+ >>> this = OrderedSet([1, 4, 3, 5, 7])
+ >>> other = OrderedSet([9, 7, 1, 3, 2])
+ >>> this.intersection_update(other)
+ >>> print(this)
+ OrderedSet([1, 3, 7])
+ """
+ other = set(other)
+ self._update_items([item for item in self.items if item in other])
+
+ def symmetric_difference_update(self, other):
+ """
+ Update this OrderedSet to remove items from another set, then
+ add items from the other set that were not present in this set.
+
+ Example:
+ >>> this = OrderedSet([1, 4, 3, 5, 7])
+ >>> other = OrderedSet([9, 7, 1, 3, 2])
+ >>> this.symmetric_difference_update(other)
+ >>> print(this)
+ OrderedSet([4, 5, 9, 2])
+ """
+ items_to_add = [item for item in other if item not in self]
+ items_to_remove = set(other)
+ self._update_items(
+ [item for item in self.items if item not in items_to_remove] + items_to_add
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__about__.py b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__about__.py
new file mode 100644
index 0000000000000000000000000000000000000000..dc95138d049ba3194964d528b552a6d1514fa382
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__about__.py
@@ -0,0 +1,27 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+__all__ = [
+ "__title__",
+ "__summary__",
+ "__uri__",
+ "__version__",
+ "__author__",
+ "__email__",
+ "__license__",
+ "__copyright__",
+]
+
+__title__ = "packaging"
+__summary__ = "Core utilities for Python packages"
+__uri__ = "https://github.com/pypa/packaging"
+
+__version__ = "19.2"
+
+__author__ = "Donald Stufft and individual contributors"
+__email__ = "donald@stufft.io"
+
+__license__ = "BSD or Apache License, Version 2.0"
+__copyright__ = "Copyright 2014-2019 %s" % __author__
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__init__.py b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..a0cf67df5245be16a020ca048832e180f7ce8661
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__init__.py
@@ -0,0 +1,26 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+from .__about__ import (
+ __author__,
+ __copyright__,
+ __email__,
+ __license__,
+ __summary__,
+ __title__,
+ __uri__,
+ __version__,
+)
+
+__all__ = [
+ "__title__",
+ "__summary__",
+ "__uri__",
+ "__version__",
+ "__author__",
+ "__email__",
+ "__license__",
+ "__copyright__",
+]
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/__about__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/__about__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7d9037f1bb6d5968811baf8942e3fbf7e60d59ee
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/__about__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f243de1a7139d6fb4f71042a37fd1b90d956785f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/_compat.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/_compat.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3b6a9d3d5388b0ece9cde557a5bb73030473d6a9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/_compat.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/_structures.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/_structures.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fab90397ab45fa4b5f0450fac26e41338082b5a9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/_structures.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/markers.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/markers.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a1f3828005bf18b818e1229218c90f0284e10383
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/markers.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/requirements.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/requirements.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..88ecbd9a958890aa0fadbf73b0b97e3b98478ea1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/requirements.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..82ab8b2199b22b9cf188c5b2819623eba341bbd1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/tags.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/tags.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b959100376b91731f28775beb0beb799a4698856
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/tags.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/utils.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/utils.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3c3013fa445474184c55b0573f6c788091657d0b
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/utils.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/version.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/version.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ccc47de4351403f315ead1051bd966c80d904618
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/version.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/_compat.py b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/_compat.py
new file mode 100644
index 0000000000000000000000000000000000000000..25da473c196855ad59a6d2d785ef1ddef49795be
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/_compat.py
@@ -0,0 +1,31 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import sys
+
+
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+
+# flake8: noqa
+
+if PY3:
+ string_types = (str,)
+else:
+ string_types = (basestring,)
+
+
+def with_metaclass(meta, *bases):
+ """
+ Create a base class with a metaclass.
+ """
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
+ class metaclass(meta):
+ def __new__(cls, name, this_bases, d):
+ return meta(name, bases, d)
+
+ return type.__new__(metaclass, "temporary_class", (), {})
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/_structures.py b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/_structures.py
new file mode 100644
index 0000000000000000000000000000000000000000..68dcca634d8e3f0081bad2f9ae5e653a2942db68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/_structures.py
@@ -0,0 +1,68 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+
+class Infinity(object):
+ def __repr__(self):
+ return "Infinity"
+
+ def __hash__(self):
+ return hash(repr(self))
+
+ def __lt__(self, other):
+ return False
+
+ def __le__(self, other):
+ return False
+
+ def __eq__(self, other):
+ return isinstance(other, self.__class__)
+
+ def __ne__(self, other):
+ return not isinstance(other, self.__class__)
+
+ def __gt__(self, other):
+ return True
+
+ def __ge__(self, other):
+ return True
+
+ def __neg__(self):
+ return NegativeInfinity
+
+
+Infinity = Infinity()
+
+
+class NegativeInfinity(object):
+ def __repr__(self):
+ return "-Infinity"
+
+ def __hash__(self):
+ return hash(repr(self))
+
+ def __lt__(self, other):
+ return True
+
+ def __le__(self, other):
+ return True
+
+ def __eq__(self, other):
+ return isinstance(other, self.__class__)
+
+ def __ne__(self, other):
+ return not isinstance(other, self.__class__)
+
+ def __gt__(self, other):
+ return False
+
+ def __ge__(self, other):
+ return False
+
+ def __neg__(self):
+ return Infinity
+
+
+NegativeInfinity = NegativeInfinity()
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/markers.py b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/markers.py
new file mode 100644
index 0000000000000000000000000000000000000000..4bdfdb24f2096eac046bb9a576065bb96cfd476e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/markers.py
@@ -0,0 +1,296 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import operator
+import os
+import platform
+import sys
+
+from setuptools.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd
+from setuptools.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString
+from setuptools.extern.pyparsing import Literal as L # noqa
+
+from ._compat import string_types
+from .specifiers import Specifier, InvalidSpecifier
+
+
+__all__ = [
+ "InvalidMarker",
+ "UndefinedComparison",
+ "UndefinedEnvironmentName",
+ "Marker",
+ "default_environment",
+]
+
+
+class InvalidMarker(ValueError):
+ """
+ An invalid marker was found, users should refer to PEP 508.
+ """
+
+
+class UndefinedComparison(ValueError):
+ """
+ An invalid operation was attempted on a value that doesn't support it.
+ """
+
+
+class UndefinedEnvironmentName(ValueError):
+ """
+ A name was attempted to be used that does not exist inside of the
+ environment.
+ """
+
+
+class Node(object):
+ def __init__(self, value):
+ self.value = value
+
+ def __str__(self):
+ return str(self.value)
+
+ def __repr__(self):
+ return "<{0}({1!r})>".format(self.__class__.__name__, str(self))
+
+ def serialize(self):
+ raise NotImplementedError
+
+
+class Variable(Node):
+ def serialize(self):
+ return str(self)
+
+
+class Value(Node):
+ def serialize(self):
+ return '"{0}"'.format(self)
+
+
+class Op(Node):
+ def serialize(self):
+ return str(self)
+
+
+VARIABLE = (
+ L("implementation_version")
+ | L("platform_python_implementation")
+ | L("implementation_name")
+ | L("python_full_version")
+ | L("platform_release")
+ | L("platform_version")
+ | L("platform_machine")
+ | L("platform_system")
+ | L("python_version")
+ | L("sys_platform")
+ | L("os_name")
+ | L("os.name")
+ | L("sys.platform") # PEP-345
+ | L("platform.version") # PEP-345
+ | L("platform.machine") # PEP-345
+ | L("platform.python_implementation") # PEP-345
+ | L("python_implementation") # PEP-345
+ | L("extra") # undocumented setuptools legacy
+)
+ALIASES = {
+ "os.name": "os_name",
+ "sys.platform": "sys_platform",
+ "platform.version": "platform_version",
+ "platform.machine": "platform_machine",
+ "platform.python_implementation": "platform_python_implementation",
+ "python_implementation": "platform_python_implementation",
+}
+VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
+
+VERSION_CMP = (
+ L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<")
+)
+
+MARKER_OP = VERSION_CMP | L("not in") | L("in")
+MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
+
+MARKER_VALUE = QuotedString("'") | QuotedString('"')
+MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
+
+BOOLOP = L("and") | L("or")
+
+MARKER_VAR = VARIABLE | MARKER_VALUE
+
+MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
+MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
+
+LPAREN = L("(").suppress()
+RPAREN = L(")").suppress()
+
+MARKER_EXPR = Forward()
+MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
+MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
+
+MARKER = stringStart + MARKER_EXPR + stringEnd
+
+
+def _coerce_parse_result(results):
+ if isinstance(results, ParseResults):
+ return [_coerce_parse_result(i) for i in results]
+ else:
+ return results
+
+
+def _format_marker(marker, first=True):
+ assert isinstance(marker, (list, tuple, string_types))
+
+ # Sometimes we have a structure like [[...]] which is a single item list
+ # where the single item is itself it's own list. In that case we want skip
+ # the rest of this function so that we don't get extraneous () on the
+ # outside.
+ if (
+ isinstance(marker, list)
+ and len(marker) == 1
+ and isinstance(marker[0], (list, tuple))
+ ):
+ return _format_marker(marker[0])
+
+ if isinstance(marker, list):
+ inner = (_format_marker(m, first=False) for m in marker)
+ if first:
+ return " ".join(inner)
+ else:
+ return "(" + " ".join(inner) + ")"
+ elif isinstance(marker, tuple):
+ return " ".join([m.serialize() for m in marker])
+ else:
+ return marker
+
+
+_operators = {
+ "in": lambda lhs, rhs: lhs in rhs,
+ "not in": lambda lhs, rhs: lhs not in rhs,
+ "<": operator.lt,
+ "<=": operator.le,
+ "==": operator.eq,
+ "!=": operator.ne,
+ ">=": operator.ge,
+ ">": operator.gt,
+}
+
+
+def _eval_op(lhs, op, rhs):
+ try:
+ spec = Specifier("".join([op.serialize(), rhs]))
+ except InvalidSpecifier:
+ pass
+ else:
+ return spec.contains(lhs)
+
+ oper = _operators.get(op.serialize())
+ if oper is None:
+ raise UndefinedComparison(
+ "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
+ )
+
+ return oper(lhs, rhs)
+
+
+_undefined = object()
+
+
+def _get_env(environment, name):
+ value = environment.get(name, _undefined)
+
+ if value is _undefined:
+ raise UndefinedEnvironmentName(
+ "{0!r} does not exist in evaluation environment.".format(name)
+ )
+
+ return value
+
+
+def _evaluate_markers(markers, environment):
+ groups = [[]]
+
+ for marker in markers:
+ assert isinstance(marker, (list, tuple, string_types))
+
+ if isinstance(marker, list):
+ groups[-1].append(_evaluate_markers(marker, environment))
+ elif isinstance(marker, tuple):
+ lhs, op, rhs = marker
+
+ if isinstance(lhs, Variable):
+ lhs_value = _get_env(environment, lhs.value)
+ rhs_value = rhs.value
+ else:
+ lhs_value = lhs.value
+ rhs_value = _get_env(environment, rhs.value)
+
+ groups[-1].append(_eval_op(lhs_value, op, rhs_value))
+ else:
+ assert marker in ["and", "or"]
+ if marker == "or":
+ groups.append([])
+
+ return any(all(item) for item in groups)
+
+
+def format_full_version(info):
+ version = "{0.major}.{0.minor}.{0.micro}".format(info)
+ kind = info.releaselevel
+ if kind != "final":
+ version += kind[0] + str(info.serial)
+ return version
+
+
+def default_environment():
+ if hasattr(sys, "implementation"):
+ iver = format_full_version(sys.implementation.version)
+ implementation_name = sys.implementation.name
+ else:
+ iver = "0"
+ implementation_name = ""
+
+ return {
+ "implementation_name": implementation_name,
+ "implementation_version": iver,
+ "os_name": os.name,
+ "platform_machine": platform.machine(),
+ "platform_release": platform.release(),
+ "platform_system": platform.system(),
+ "platform_version": platform.version(),
+ "python_full_version": platform.python_version(),
+ "platform_python_implementation": platform.python_implementation(),
+ "python_version": ".".join(platform.python_version_tuple()[:2]),
+ "sys_platform": sys.platform,
+ }
+
+
+class Marker(object):
+ def __init__(self, marker):
+ try:
+ self._markers = _coerce_parse_result(MARKER.parseString(marker))
+ except ParseException as e:
+ err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(
+ marker, marker[e.loc : e.loc + 8]
+ )
+ raise InvalidMarker(err_str)
+
+ def __str__(self):
+ return _format_marker(self._markers)
+
+ def __repr__(self):
+ return "<Marker({0!r})>".format(str(self))
+
+ def evaluate(self, environment=None):
+ """Evaluate a marker.
+
+ Return the boolean from evaluating the given marker against the
+ environment. environment is an optional argument to override all or
+ part of the determined environment.
+
+ The environment is determined from the current Python process.
+ """
+ current_environment = default_environment()
+ if environment is not None:
+ current_environment.update(environment)
+
+ return _evaluate_markers(self._markers, current_environment)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/requirements.py b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/requirements.py
new file mode 100644
index 0000000000000000000000000000000000000000..8a0c2cb9be06e633b26c7205d6efe42827835910
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/requirements.py
@@ -0,0 +1,138 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import string
+import re
+
+from setuptools.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException
+from setuptools.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine
+from setuptools.extern.pyparsing import Literal as L # noqa
+from setuptools.extern.six.moves.urllib import parse as urlparse
+
+from .markers import MARKER_EXPR, Marker
+from .specifiers import LegacySpecifier, Specifier, SpecifierSet
+
+
+class InvalidRequirement(ValueError):
+ """
+ An invalid requirement was found, users should refer to PEP 508.
+ """
+
+
+ALPHANUM = Word(string.ascii_letters + string.digits)
+
+LBRACKET = L("[").suppress()
+RBRACKET = L("]").suppress()
+LPAREN = L("(").suppress()
+RPAREN = L(")").suppress()
+COMMA = L(",").suppress()
+SEMICOLON = L(";").suppress()
+AT = L("@").suppress()
+
+PUNCTUATION = Word("-_.")
+IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
+IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
+
+NAME = IDENTIFIER("name")
+EXTRA = IDENTIFIER
+
+URI = Regex(r"[^ ]+")("url")
+URL = AT + URI
+
+EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
+EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
+
+VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
+VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
+
+VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
+VERSION_MANY = Combine(
+ VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False
+)("_raw_spec")
+_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
+_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "")
+
+VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
+VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
+
+MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
+MARKER_EXPR.setParseAction(
+ lambda s, l, t: Marker(s[t._original_start : t._original_end])
+)
+MARKER_SEPARATOR = SEMICOLON
+MARKER = MARKER_SEPARATOR + MARKER_EXPR
+
+VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
+URL_AND_MARKER = URL + Optional(MARKER)
+
+NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
+
+REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
+# setuptools.extern.pyparsing isn't thread safe during initialization, so we do it eagerly, see
+# issue #104
+REQUIREMENT.parseString("x[]")
+
+
+class Requirement(object):
+ """Parse a requirement.
+
+ Parse a given requirement string into its parts, such as name, specifier,
+ URL, and extras. Raises InvalidRequirement on a badly-formed requirement
+ string.
+ """
+
+ # TODO: Can we test whether something is contained within a requirement?
+ # If so how do we do that? Do we need to test against the _name_ of
+ # the thing as well as the version? What about the markers?
+ # TODO: Can we normalize the name and extra name?
+
+ def __init__(self, requirement_string):
+ try:
+ req = REQUIREMENT.parseString(requirement_string)
+ except ParseException as e:
+ raise InvalidRequirement(
+ 'Parse error at "{0!r}": {1}'.format(
+ requirement_string[e.loc : e.loc + 8], e.msg
+ )
+ )
+
+ self.name = req.name
+ if req.url:
+ parsed_url = urlparse.urlparse(req.url)
+ if parsed_url.scheme == "file":
+ if urlparse.urlunparse(parsed_url) != req.url:
+ raise InvalidRequirement("Invalid URL given")
+ elif not (parsed_url.scheme and parsed_url.netloc) or (
+ not parsed_url.scheme and not parsed_url.netloc
+ ):
+ raise InvalidRequirement("Invalid URL: {0}".format(req.url))
+ self.url = req.url
+ else:
+ self.url = None
+ self.extras = set(req.extras.asList() if req.extras else [])
+ self.specifier = SpecifierSet(req.specifier)
+ self.marker = req.marker if req.marker else None
+
+ def __str__(self):
+ parts = [self.name]
+
+ if self.extras:
+ parts.append("[{0}]".format(",".join(sorted(self.extras))))
+
+ if self.specifier:
+ parts.append(str(self.specifier))
+
+ if self.url:
+ parts.append("@ {0}".format(self.url))
+ if self.marker:
+ parts.append(" ")
+
+ if self.marker:
+ parts.append("; {0}".format(self.marker))
+
+ return "".join(parts)
+
+ def __repr__(self):
+ return "<Requirement({0!r})>".format(str(self))
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/specifiers.py b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/specifiers.py
new file mode 100644
index 0000000000000000000000000000000000000000..743576a080a0af8d0995f307ea6afc645b13ca61
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/specifiers.py
@@ -0,0 +1,749 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import abc
+import functools
+import itertools
+import re
+
+from ._compat import string_types, with_metaclass
+from .version import Version, LegacyVersion, parse
+
+
+class InvalidSpecifier(ValueError):
+ """
+ An invalid specifier was found, users should refer to PEP 440.
+ """
+
+
+class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
+ @abc.abstractmethod
+ def __str__(self):
+ """
+ Returns the str representation of this Specifier like object. This
+ should be representative of the Specifier itself.
+ """
+
+ @abc.abstractmethod
+ def __hash__(self):
+ """
+ Returns a hash value for this Specifier like object.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other):
+ """
+ Returns a boolean representing whether or not the two Specifier like
+ objects are equal.
+ """
+
+ @abc.abstractmethod
+ def __ne__(self, other):
+ """
+ Returns a boolean representing whether or not the two Specifier like
+ objects are not equal.
+ """
+
+ @abc.abstractproperty
+ def prereleases(self):
+ """
+ Returns whether or not pre-releases as a whole are allowed by this
+ specifier.
+ """
+
+ @prereleases.setter
+ def prereleases(self, value):
+ """
+ Sets whether or not pre-releases as a whole are allowed by this
+ specifier.
+ """
+
+ @abc.abstractmethod
+ def contains(self, item, prereleases=None):
+ """
+ Determines if the given item is contained within this specifier.
+ """
+
+ @abc.abstractmethod
+ def filter(self, iterable, prereleases=None):
+ """
+ Takes an iterable of items and filters them so that only items which
+ are contained within this specifier are allowed in it.
+ """
+
+
+class _IndividualSpecifier(BaseSpecifier):
+
+ _operators = {}
+
+ def __init__(self, spec="", prereleases=None):
+ match = self._regex.search(spec)
+ if not match:
+ raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
+
+ self._spec = (match.group("operator").strip(), match.group("version").strip())
+
+ # Store whether or not this Specifier should accept prereleases
+ self._prereleases = prereleases
+
+ def __repr__(self):
+ pre = (
+ ", prereleases={0!r}".format(self.prereleases)
+ if self._prereleases is not None
+ else ""
+ )
+
+ return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre)
+
+ def __str__(self):
+ return "{0}{1}".format(*self._spec)
+
+ def __hash__(self):
+ return hash(self._spec)
+
+ def __eq__(self, other):
+ if isinstance(other, string_types):
+ try:
+ other = self.__class__(other)
+ except InvalidSpecifier:
+ return NotImplemented
+ elif not isinstance(other, self.__class__):
+ return NotImplemented
+
+ return self._spec == other._spec
+
+ def __ne__(self, other):
+ if isinstance(other, string_types):
+ try:
+ other = self.__class__(other)
+ except InvalidSpecifier:
+ return NotImplemented
+ elif not isinstance(other, self.__class__):
+ return NotImplemented
+
+ return self._spec != other._spec
+
+ def _get_operator(self, op):
+ return getattr(self, "_compare_{0}".format(self._operators[op]))
+
+ def _coerce_version(self, version):
+ if not isinstance(version, (LegacyVersion, Version)):
+ version = parse(version)
+ return version
+
+ @property
+ def operator(self):
+ return self._spec[0]
+
+ @property
+ def version(self):
+ return self._spec[1]
+
+ @property
+ def prereleases(self):
+ return self._prereleases
+
+ @prereleases.setter
+ def prereleases(self, value):
+ self._prereleases = value
+
+ def __contains__(self, item):
+ return self.contains(item)
+
+ def contains(self, item, prereleases=None):
+ # Determine if prereleases are to be allowed or not.
+ if prereleases is None:
+ prereleases = self.prereleases
+
+ # Normalize item to a Version or LegacyVersion, this allows us to have
+ # a shortcut for ``"2.0" in Specifier(">=2")
+ item = self._coerce_version(item)
+
+ # Determine if we should be supporting prereleases in this specifier
+ # or not, if we do not support prereleases than we can short circuit
+ # logic if this version is a prereleases.
+ if item.is_prerelease and not prereleases:
+ return False
+
+ # Actually do the comparison to determine if this item is contained
+ # within this Specifier or not.
+ return self._get_operator(self.operator)(item, self.version)
+
+ def filter(self, iterable, prereleases=None):
+ yielded = False
+ found_prereleases = []
+
+ kw = {"prereleases": prereleases if prereleases is not None else True}
+
+ # Attempt to iterate over all the values in the iterable and if any of
+ # them match, yield them.
+ for version in iterable:
+ parsed_version = self._coerce_version(version)
+
+ if self.contains(parsed_version, **kw):
+ # If our version is a prerelease, and we were not set to allow
+ # prereleases, then we'll store it for later incase nothing
+ # else matches this specifier.
+ if parsed_version.is_prerelease and not (
+ prereleases or self.prereleases
+ ):
+ found_prereleases.append(version)
+ # Either this is not a prerelease, or we should have been
+ # accepting prereleases from the beginning.
+ else:
+ yielded = True
+ yield version
+
+ # Now that we've iterated over everything, determine if we've yielded
+ # any values, and if we have not and we have any prereleases stored up
+ # then we will go ahead and yield the prereleases.
+ if not yielded and found_prereleases:
+ for version in found_prereleases:
+ yield version
+
+
+class LegacySpecifier(_IndividualSpecifier):
+
+ _regex_str = r"""
+ (?P<operator>(==|!=|<=|>=|<|>))
+ \s*
+ (?P<version>
+ [^,;\s)]* # Since this is a "legacy" specifier, and the version
+ # string can be just about anything, we match everything
+ # except for whitespace, a semi-colon for marker support,
+ # a closing paren since versions can be enclosed in
+ # them, and a comma since it's a version separator.
+ )
+ """
+
+ _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+ _operators = {
+ "==": "equal",
+ "!=": "not_equal",
+ "<=": "less_than_equal",
+ ">=": "greater_than_equal",
+ "<": "less_than",
+ ">": "greater_than",
+ }
+
+ def _coerce_version(self, version):
+ if not isinstance(version, LegacyVersion):
+ version = LegacyVersion(str(version))
+ return version
+
+ def _compare_equal(self, prospective, spec):
+ return prospective == self._coerce_version(spec)
+
+ def _compare_not_equal(self, prospective, spec):
+ return prospective != self._coerce_version(spec)
+
+ def _compare_less_than_equal(self, prospective, spec):
+ return prospective <= self._coerce_version(spec)
+
+ def _compare_greater_than_equal(self, prospective, spec):
+ return prospective >= self._coerce_version(spec)
+
+ def _compare_less_than(self, prospective, spec):
+ return prospective < self._coerce_version(spec)
+
+ def _compare_greater_than(self, prospective, spec):
+ return prospective > self._coerce_version(spec)
+
+
+def _require_version_compare(fn):
+ @functools.wraps(fn)
+ def wrapped(self, prospective, spec):
+ if not isinstance(prospective, Version):
+ return False
+ return fn(self, prospective, spec)
+
+ return wrapped
+
+
+class Specifier(_IndividualSpecifier):
+
+ _regex_str = r"""
+ (?P<operator>(~=|==|!=|<=|>=|<|>|===))
+ (?P<version>
+ (?:
+ # The identity operators allow for an escape hatch that will
+ # do an exact string match of the version you wish to install.
+ # This will not be parsed by PEP 440 and we cannot determine
+ # any semantic meaning from it. This operator is discouraged
+ # but included entirely as an escape hatch.
+ (?<====) # Only match for the identity operator
+ \s*
+ [^\s]* # We just match everything, except for whitespace
+ # since we are only testing for strict identity.
+ )
+ |
+ (?:
+ # The (non)equality operators allow for wild card and local
+ # versions to be specified so we have to define these two
+ # operators separately to enable that.
+ (?<===|!=) # Only match for equals and not equals
+
+ \s*
+ v?
+ (?:[0-9]+!)? # epoch
+ [0-9]+(?:\.[0-9]+)* # release
+ (?: # pre release
+ [-_\.]?
+ (a|b|c|rc|alpha|beta|pre|preview)
+ [-_\.]?
+ [0-9]*
+ )?
+ (?: # post release
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+ )?
+
+ # You cannot use a wild card and a dev or local version
+ # together so group them with a | and make them optional.
+ (?:
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
+ (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
+ |
+ \.\* # Wild card syntax of .*
+ )?
+ )
+ |
+ (?:
+ # The compatible operator requires at least two digits in the
+ # release segment.
+ (?<=~=) # Only match for the compatible operator
+
+ \s*
+ v?
+ (?:[0-9]+!)? # epoch
+ [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
+ (?: # pre release
+ [-_\.]?
+ (a|b|c|rc|alpha|beta|pre|preview)
+ [-_\.]?
+ [0-9]*
+ )?
+ (?: # post release
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+ )?
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
+ )
+ |
+ (?:
+ # All other operators only allow a sub set of what the
+ # (non)equality operators do. Specifically they do not allow
+ # local versions to be specified nor do they allow the prefix
+ # matching wild cards.
+ (?<!==|!=|~=) # We have special cases for these
+ # operators so we want to make sure they
+ # don't match here.
+
+ \s*
+ v?
+ (?:[0-9]+!)? # epoch
+ [0-9]+(?:\.[0-9]+)* # release
+ (?: # pre release
+ [-_\.]?
+ (a|b|c|rc|alpha|beta|pre|preview)
+ [-_\.]?
+ [0-9]*
+ )?
+ (?: # post release
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+ )?
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
+ )
+ )
+ """
+
+ _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+ _operators = {
+ "~=": "compatible",
+ "==": "equal",
+ "!=": "not_equal",
+ "<=": "less_than_equal",
+ ">=": "greater_than_equal",
+ "<": "less_than",
+ ">": "greater_than",
+ "===": "arbitrary",
+ }
+
+ @_require_version_compare
+ def _compare_compatible(self, prospective, spec):
+ # Compatible releases have an equivalent combination of >= and ==. That
+ # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
+ # implement this in terms of the other specifiers instead of
+ # implementing it ourselves. The only thing we need to do is construct
+ # the other specifiers.
+
+ # We want everything but the last item in the version, but we want to
+ # ignore post and dev releases and we want to treat the pre-release as
+ # it's own separate segment.
+ prefix = ".".join(
+ list(
+ itertools.takewhile(
+ lambda x: (not x.startswith("post") and not x.startswith("dev")),
+ _version_split(spec),
+ )
+ )[:-1]
+ )
+
+ # Add the prefix notation to the end of our string
+ prefix += ".*"
+
+ return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
+ prospective, prefix
+ )
+
+ @_require_version_compare
+ def _compare_equal(self, prospective, spec):
+ # We need special logic to handle prefix matching
+ if spec.endswith(".*"):
+ # In the case of prefix matching we want to ignore local segment.
+ prospective = Version(prospective.public)
+ # Split the spec out by dots, and pretend that there is an implicit
+ # dot in between a release segment and a pre-release segment.
+ spec = _version_split(spec[:-2]) # Remove the trailing .*
+
+ # Split the prospective version out by dots, and pretend that there
+ # is an implicit dot in between a release segment and a pre-release
+ # segment.
+ prospective = _version_split(str(prospective))
+
+ # Shorten the prospective version to be the same length as the spec
+ # so that we can determine if the specifier is a prefix of the
+ # prospective version or not.
+ prospective = prospective[: len(spec)]
+
+ # Pad out our two sides with zeros so that they both equal the same
+ # length.
+ spec, prospective = _pad_version(spec, prospective)
+ else:
+ # Convert our spec string into a Version
+ spec = Version(spec)
+
+ # If the specifier does not have a local segment, then we want to
+ # act as if the prospective version also does not have a local
+ # segment.
+ if not spec.local:
+ prospective = Version(prospective.public)
+
+ return prospective == spec
+
+ @_require_version_compare
+ def _compare_not_equal(self, prospective, spec):
+ return not self._compare_equal(prospective, spec)
+
+ @_require_version_compare
+ def _compare_less_than_equal(self, prospective, spec):
+ return prospective <= Version(spec)
+
+ @_require_version_compare
+ def _compare_greater_than_equal(self, prospective, spec):
+ return prospective >= Version(spec)
+
+ @_require_version_compare
+ def _compare_less_than(self, prospective, spec):
+ # Convert our spec to a Version instance, since we'll want to work with
+ # it as a version.
+ spec = Version(spec)
+
+ # Check to see if the prospective version is less than the spec
+ # version. If it's not we can short circuit and just return False now
+ # instead of doing extra unneeded work.
+ if not prospective < spec:
+ return False
+
+ # This special case is here so that, unless the specifier itself
+ # includes is a pre-release version, that we do not accept pre-release
+ # versions for the version mentioned in the specifier (e.g. <3.1 should
+ # not match 3.1.dev0, but should match 3.0.dev0).
+ if not spec.is_prerelease and prospective.is_prerelease:
+ if Version(prospective.base_version) == Version(spec.base_version):
+ return False
+
+ # If we've gotten to here, it means that prospective version is both
+ # less than the spec version *and* it's not a pre-release of the same
+ # version in the spec.
+ return True
+
+ @_require_version_compare
+ def _compare_greater_than(self, prospective, spec):
+ # Convert our spec to a Version instance, since we'll want to work with
+ # it as a version.
+ spec = Version(spec)
+
+ # Check to see if the prospective version is greater than the spec
+ # version. If it's not we can short circuit and just return False now
+ # instead of doing extra unneeded work.
+ if not prospective > spec:
+ return False
+
+ # This special case is here so that, unless the specifier itself
+ # includes is a post-release version, that we do not accept
+ # post-release versions for the version mentioned in the specifier
+ # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
+ if not spec.is_postrelease and prospective.is_postrelease:
+ if Version(prospective.base_version) == Version(spec.base_version):
+ return False
+
+ # Ensure that we do not allow a local version of the version mentioned
+ # in the specifier, which is technically greater than, to match.
+ if prospective.local is not None:
+ if Version(prospective.base_version) == Version(spec.base_version):
+ return False
+
+ # If we've gotten to here, it means that prospective version is both
+ # greater than the spec version *and* it's not a pre-release of the
+ # same version in the spec.
+ return True
+
+ def _compare_arbitrary(self, prospective, spec):
+ return str(prospective).lower() == str(spec).lower()
+
+ @property
+ def prereleases(self):
+ # If there is an explicit prereleases set for this, then we'll just
+ # blindly use that.
+ if self._prereleases is not None:
+ return self._prereleases
+
+ # Look at all of our specifiers and determine if they are inclusive
+ # operators, and if they are if they are including an explicit
+ # prerelease.
+ operator, version = self._spec
+ if operator in ["==", ">=", "<=", "~=", "==="]:
+ # The == specifier can include a trailing .*, if it does we
+ # want to remove before parsing.
+ if operator == "==" and version.endswith(".*"):
+ version = version[:-2]
+
+ # Parse the version, and if it is a pre-release than this
+ # specifier allows pre-releases.
+ if parse(version).is_prerelease:
+ return True
+
+ return False
+
+ @prereleases.setter
+ def prereleases(self, value):
+ self._prereleases = value
+
+
+_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
+
+
+def _version_split(version):
+ result = []
+ for item in version.split("."):
+ match = _prefix_regex.search(item)
+ if match:
+ result.extend(match.groups())
+ else:
+ result.append(item)
+ return result
+
+
+def _pad_version(left, right):
+ left_split, right_split = [], []
+
+ # Get the release segment of our versions
+ left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
+ right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
+
+ # Get the rest of our versions
+ left_split.append(left[len(left_split[0]) :])
+ right_split.append(right[len(right_split[0]) :])
+
+ # Insert our padding
+ left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
+ right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
+
+ return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
+
+
+class SpecifierSet(BaseSpecifier):
+ def __init__(self, specifiers="", prereleases=None):
+ # Split on , to break each indidivual specifier into it's own item, and
+ # strip each item to remove leading/trailing whitespace.
+ specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
+
+ # Parsed each individual specifier, attempting first to make it a
+ # Specifier and falling back to a LegacySpecifier.
+ parsed = set()
+ for specifier in specifiers:
+ try:
+ parsed.add(Specifier(specifier))
+ except InvalidSpecifier:
+ parsed.add(LegacySpecifier(specifier))
+
+ # Turn our parsed specifiers into a frozen set and save them for later.
+ self._specs = frozenset(parsed)
+
+ # Store our prereleases value so we can use it later to determine if
+ # we accept prereleases or not.
+ self._prereleases = prereleases
+
+ def __repr__(self):
+ pre = (
+ ", prereleases={0!r}".format(self.prereleases)
+ if self._prereleases is not None
+ else ""
+ )
+
+ return "<SpecifierSet({0!r}{1})>".format(str(self), pre)
+
+ def __str__(self):
+ return ",".join(sorted(str(s) for s in self._specs))
+
+ def __hash__(self):
+ return hash(self._specs)
+
+ def __and__(self, other):
+ if isinstance(other, string_types):
+ other = SpecifierSet(other)
+ elif not isinstance(other, SpecifierSet):
+ return NotImplemented
+
+ specifier = SpecifierSet()
+ specifier._specs = frozenset(self._specs | other._specs)
+
+ if self._prereleases is None and other._prereleases is not None:
+ specifier._prereleases = other._prereleases
+ elif self._prereleases is not None and other._prereleases is None:
+ specifier._prereleases = self._prereleases
+ elif self._prereleases == other._prereleases:
+ specifier._prereleases = self._prereleases
+ else:
+ raise ValueError(
+ "Cannot combine SpecifierSets with True and False prerelease "
+ "overrides."
+ )
+
+ return specifier
+
+ def __eq__(self, other):
+ if isinstance(other, string_types):
+ other = SpecifierSet(other)
+ elif isinstance(other, _IndividualSpecifier):
+ other = SpecifierSet(str(other))
+ elif not isinstance(other, SpecifierSet):
+ return NotImplemented
+
+ return self._specs == other._specs
+
+ def __ne__(self, other):
+ if isinstance(other, string_types):
+ other = SpecifierSet(other)
+ elif isinstance(other, _IndividualSpecifier):
+ other = SpecifierSet(str(other))
+ elif not isinstance(other, SpecifierSet):
+ return NotImplemented
+
+ return self._specs != other._specs
+
+ def __len__(self):
+ return len(self._specs)
+
+ def __iter__(self):
+ return iter(self._specs)
+
+ @property
+ def prereleases(self):
+ # If we have been given an explicit prerelease modifier, then we'll
+ # pass that through here.
+ if self._prereleases is not None:
+ return self._prereleases
+
+ # If we don't have any specifiers, and we don't have a forced value,
+ # then we'll just return None since we don't know if this should have
+ # pre-releases or not.
+ if not self._specs:
+ return None
+
+ # Otherwise we'll see if any of the given specifiers accept
+ # prereleases, if any of them do we'll return True, otherwise False.
+ return any(s.prereleases for s in self._specs)
+
+ @prereleases.setter
+ def prereleases(self, value):
+ self._prereleases = value
+
+ def __contains__(self, item):
+ return self.contains(item)
+
+ def contains(self, item, prereleases=None):
+ # Ensure that our item is a Version or LegacyVersion instance.
+ if not isinstance(item, (LegacyVersion, Version)):
+ item = parse(item)
+
+ # Determine if we're forcing a prerelease or not, if we're not forcing
+ # one for this particular filter call, then we'll use whatever the
+ # SpecifierSet thinks for whether or not we should support prereleases.
+ if prereleases is None:
+ prereleases = self.prereleases
+
+ # We can determine if we're going to allow pre-releases by looking to
+ # see if any of the underlying items supports them. If none of them do
+ # and this item is a pre-release then we do not allow it and we can
+ # short circuit that here.
+ # Note: This means that 1.0.dev1 would not be contained in something
+ # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
+ if not prereleases and item.is_prerelease:
+ return False
+
+ # We simply dispatch to the underlying specs here to make sure that the
+ # given version is contained within all of them.
+ # Note: This use of all() here means that an empty set of specifiers
+ # will always return True, this is an explicit design decision.
+ return all(s.contains(item, prereleases=prereleases) for s in self._specs)
+
+ def filter(self, iterable, prereleases=None):
+ # Determine if we're forcing a prerelease or not, if we're not forcing
+ # one for this particular filter call, then we'll use whatever the
+ # SpecifierSet thinks for whether or not we should support prereleases.
+ if prereleases is None:
+ prereleases = self.prereleases
+
+ # If we have any specifiers, then we want to wrap our iterable in the
+ # filter method for each one, this will act as a logical AND amongst
+ # each specifier.
+ if self._specs:
+ for spec in self._specs:
+ iterable = spec.filter(iterable, prereleases=bool(prereleases))
+ return iterable
+ # If we do not have any specifiers, then we need to have a rough filter
+ # which will filter out any pre-releases, unless there are no final
+ # releases, and which will filter out LegacyVersion in general.
+ else:
+ filtered = []
+ found_prereleases = []
+
+ for item in iterable:
+ # Ensure that we some kind of Version class for this item.
+ if not isinstance(item, (LegacyVersion, Version)):
+ parsed_version = parse(item)
+ else:
+ parsed_version = item
+
+ # Filter out any item which is parsed as a LegacyVersion
+ if isinstance(parsed_version, LegacyVersion):
+ continue
+
+ # Store any item which is a pre-release for later unless we've
+ # already found a final version or we are accepting prereleases
+ if parsed_version.is_prerelease and not prereleases:
+ if not filtered:
+ found_prereleases.append(item)
+ else:
+ filtered.append(item)
+
+ # If we've found no items except for pre-releases, then we'll go
+ # ahead and use the pre-releases
+ if not filtered and found_prereleases and prereleases is None:
+ return found_prereleases
+
+ return filtered
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/tags.py b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/tags.py
new file mode 100644
index 0000000000000000000000000000000000000000..ec9942f0f6627f34554082a8c0909bc70bd2a260
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/tags.py
@@ -0,0 +1,404 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import
+
+import distutils.util
+
+try:
+ from importlib.machinery import EXTENSION_SUFFIXES
+except ImportError: # pragma: no cover
+ import imp
+
+ EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()]
+ del imp
+import platform
+import re
+import sys
+import sysconfig
+import warnings
+
+
+INTERPRETER_SHORT_NAMES = {
+ "python": "py", # Generic.
+ "cpython": "cp",
+ "pypy": "pp",
+ "ironpython": "ip",
+ "jython": "jy",
+}
+
+
+_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32
+
+
+class Tag(object):
+
+ __slots__ = ["_interpreter", "_abi", "_platform"]
+
+ def __init__(self, interpreter, abi, platform):
+ self._interpreter = interpreter.lower()
+ self._abi = abi.lower()
+ self._platform = platform.lower()
+
+ @property
+ def interpreter(self):
+ return self._interpreter
+
+ @property
+ def abi(self):
+ return self._abi
+
+ @property
+ def platform(self):
+ return self._platform
+
+ def __eq__(self, other):
+ return (
+ (self.platform == other.platform)
+ and (self.abi == other.abi)
+ and (self.interpreter == other.interpreter)
+ )
+
+ def __hash__(self):
+ return hash((self._interpreter, self._abi, self._platform))
+
+ def __str__(self):
+ return "{}-{}-{}".format(self._interpreter, self._abi, self._platform)
+
+ def __repr__(self):
+ return "<{self} @ {self_id}>".format(self=self, self_id=id(self))
+
+
+def parse_tag(tag):
+ tags = set()
+ interpreters, abis, platforms = tag.split("-")
+ for interpreter in interpreters.split("."):
+ for abi in abis.split("."):
+ for platform_ in platforms.split("."):
+ tags.add(Tag(interpreter, abi, platform_))
+ return frozenset(tags)
+
+
+def _normalize_string(string):
+ return string.replace(".", "_").replace("-", "_")
+
+
+def _cpython_interpreter(py_version):
+ # TODO: Is using py_version_nodot for interpreter version critical?
+ return "cp{major}{minor}".format(major=py_version[0], minor=py_version[1])
+
+
+def _cpython_abis(py_version):
+ abis = []
+ version = "{}{}".format(*py_version[:2])
+ debug = pymalloc = ucs4 = ""
+ with_debug = sysconfig.get_config_var("Py_DEBUG")
+ has_refcount = hasattr(sys, "gettotalrefcount")
+ # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
+ # extension modules is the best option.
+ # https://github.com/pypa/pip/issues/3383#issuecomment-173267692
+ has_ext = "_d.pyd" in EXTENSION_SUFFIXES
+ if with_debug or (with_debug is None and (has_refcount or has_ext)):
+ debug = "d"
+ if py_version < (3, 8):
+ with_pymalloc = sysconfig.get_config_var("WITH_PYMALLOC")
+ if with_pymalloc or with_pymalloc is None:
+ pymalloc = "m"
+ if py_version < (3, 3):
+ unicode_size = sysconfig.get_config_var("Py_UNICODE_SIZE")
+ if unicode_size == 4 or (
+ unicode_size is None and sys.maxunicode == 0x10FFFF
+ ):
+ ucs4 = "u"
+ elif debug:
+ # Debug builds can also load "normal" extension modules.
+ # We can also assume no UCS-4 or pymalloc requirement.
+ abis.append("cp{version}".format(version=version))
+ abis.insert(
+ 0,
+ "cp{version}{debug}{pymalloc}{ucs4}".format(
+ version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
+ ),
+ )
+ return abis
+
+
+def _cpython_tags(py_version, interpreter, abis, platforms):
+ for abi in abis:
+ for platform_ in platforms:
+ yield Tag(interpreter, abi, platform_)
+ for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms):
+ yield tag
+ for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms):
+ yield tag
+ # PEP 384 was first implemented in Python 3.2.
+ for minor_version in range(py_version[1] - 1, 1, -1):
+ for platform_ in platforms:
+ interpreter = "cp{major}{minor}".format(
+ major=py_version[0], minor=minor_version
+ )
+ yield Tag(interpreter, "abi3", platform_)
+
+
+def _pypy_interpreter():
+ return "pp{py_major}{pypy_major}{pypy_minor}".format(
+ py_major=sys.version_info[0],
+ pypy_major=sys.pypy_version_info.major,
+ pypy_minor=sys.pypy_version_info.minor,
+ )
+
+
+def _generic_abi():
+ abi = sysconfig.get_config_var("SOABI")
+ if abi:
+ return _normalize_string(abi)
+ else:
+ return "none"
+
+
+def _pypy_tags(py_version, interpreter, abi, platforms):
+ for tag in (Tag(interpreter, abi, platform) for platform in platforms):
+ yield tag
+ for tag in (Tag(interpreter, "none", platform) for platform in platforms):
+ yield tag
+
+
+def _generic_tags(interpreter, py_version, abi, platforms):
+ for tag in (Tag(interpreter, abi, platform) for platform in platforms):
+ yield tag
+ if abi != "none":
+ tags = (Tag(interpreter, "none", platform_) for platform_ in platforms)
+ for tag in tags:
+ yield tag
+
+
+def _py_interpreter_range(py_version):
+ """
+ Yield Python versions in descending order.
+
+ After the latest version, the major-only version will be yielded, and then
+ all following versions up to 'end'.
+ """
+ yield "py{major}{minor}".format(major=py_version[0], minor=py_version[1])
+ yield "py{major}".format(major=py_version[0])
+ for minor in range(py_version[1] - 1, -1, -1):
+ yield "py{major}{minor}".format(major=py_version[0], minor=minor)
+
+
+def _independent_tags(interpreter, py_version, platforms):
+ """
+ Return the sequence of tags that are consistent across implementations.
+
+ The tags consist of:
+ - py*-none-<platform>
+ - <interpreter>-none-any
+ - py*-none-any
+ """
+ for version in _py_interpreter_range(py_version):
+ for platform_ in platforms:
+ yield Tag(version, "none", platform_)
+ yield Tag(interpreter, "none", "any")
+ for version in _py_interpreter_range(py_version):
+ yield Tag(version, "none", "any")
+
+
+def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER):
+ if not is_32bit:
+ return arch
+
+ if arch.startswith("ppc"):
+ return "ppc"
+
+ return "i386"
+
+
+def _mac_binary_formats(version, cpu_arch):
+ formats = [cpu_arch]
+ if cpu_arch == "x86_64":
+ if version < (10, 4):
+ return []
+ formats.extend(["intel", "fat64", "fat32"])
+
+ elif cpu_arch == "i386":
+ if version < (10, 4):
+ return []
+ formats.extend(["intel", "fat32", "fat"])
+
+ elif cpu_arch == "ppc64":
+ # TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
+ if version > (10, 5) or version < (10, 4):
+ return []
+ formats.append("fat64")
+
+ elif cpu_arch == "ppc":
+ if version > (10, 6):
+ return []
+ formats.extend(["fat32", "fat"])
+
+ formats.append("universal")
+ return formats
+
+
+def _mac_platforms(version=None, arch=None):
+ version_str, _, cpu_arch = platform.mac_ver()
+ if version is None:
+ version = tuple(map(int, version_str.split(".")[:2]))
+ if arch is None:
+ arch = _mac_arch(cpu_arch)
+ platforms = []
+ for minor_version in range(version[1], -1, -1):
+ compat_version = version[0], minor_version
+ binary_formats = _mac_binary_formats(compat_version, arch)
+ for binary_format in binary_formats:
+ platforms.append(
+ "macosx_{major}_{minor}_{binary_format}".format(
+ major=compat_version[0],
+ minor=compat_version[1],
+ binary_format=binary_format,
+ )
+ )
+ return platforms
+
+
+# From PEP 513.
+def _is_manylinux_compatible(name, glibc_version):
+ # Check for presence of _manylinux module.
+ try:
+ import _manylinux
+
+ return bool(getattr(_manylinux, name + "_compatible"))
+ except (ImportError, AttributeError):
+ # Fall through to heuristic check below.
+ pass
+
+ return _have_compatible_glibc(*glibc_version)
+
+
+def _glibc_version_string():
+ # Returns glibc version string, or None if not using glibc.
+ import ctypes
+
+ # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+ # manpage says, "If filename is NULL, then the returned handle is for the
+ # main program". This way we can let the linker do the work to figure out
+ # which libc our process is actually using.
+ process_namespace = ctypes.CDLL(None)
+ try:
+ gnu_get_libc_version = process_namespace.gnu_get_libc_version
+ except AttributeError:
+ # Symbol doesn't exist -> therefore, we are not linked to
+ # glibc.
+ return None
+
+ # Call gnu_get_libc_version, which returns a string like "2.5"
+ gnu_get_libc_version.restype = ctypes.c_char_p
+ version_str = gnu_get_libc_version()
+ # py2 / py3 compatibility:
+ if not isinstance(version_str, str):
+ version_str = version_str.decode("ascii")
+
+ return version_str
+
+
+# Separated out from have_compatible_glibc for easier unit testing.
+def _check_glibc_version(version_str, required_major, minimum_minor):
+ # Parse string and check against requested version.
+ #
+ # We use a regexp instead of str.split because we want to discard any
+ # random junk that might come after the minor version -- this might happen
+ # in patched/forked versions of glibc (e.g. Linaro's version of glibc
+ # uses version strings like "2.20-2014.11"). See gh-3588.
+ m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
+ if not m:
+ warnings.warn(
+ "Expected glibc version with 2 components major.minor,"
+ " got: %s" % version_str,
+ RuntimeWarning,
+ )
+ return False
+ return (
+ int(m.group("major")) == required_major
+ and int(m.group("minor")) >= minimum_minor
+ )
+
+
+def _have_compatible_glibc(required_major, minimum_minor):
+ version_str = _glibc_version_string()
+ if version_str is None:
+ return False
+ return _check_glibc_version(version_str, required_major, minimum_minor)
+
+
+def _linux_platforms(is_32bit=_32_BIT_INTERPRETER):
+ linux = _normalize_string(distutils.util.get_platform())
+ if linux == "linux_x86_64" and is_32bit:
+ linux = "linux_i686"
+ manylinux_support = (
+ ("manylinux2014", (2, 17)), # CentOS 7 w/ glibc 2.17 (PEP 599)
+ ("manylinux2010", (2, 12)), # CentOS 6 w/ glibc 2.12 (PEP 571)
+ ("manylinux1", (2, 5)), # CentOS 5 w/ glibc 2.5 (PEP 513)
+ )
+ manylinux_support_iter = iter(manylinux_support)
+ for name, glibc_version in manylinux_support_iter:
+ if _is_manylinux_compatible(name, glibc_version):
+ platforms = [linux.replace("linux", name)]
+ break
+ else:
+ platforms = []
+ # Support for a later manylinux implies support for an earlier version.
+ platforms += [linux.replace("linux", name) for name, _ in manylinux_support_iter]
+ platforms.append(linux)
+ return platforms
+
+
+def _generic_platforms():
+ platform = _normalize_string(distutils.util.get_platform())
+ return [platform]
+
+
+def _interpreter_name():
+ name = platform.python_implementation().lower()
+ return INTERPRETER_SHORT_NAMES.get(name) or name
+
+
+def _generic_interpreter(name, py_version):
+ version = sysconfig.get_config_var("py_version_nodot")
+ if not version:
+ version = "".join(map(str, py_version[:2]))
+ return "{name}{version}".format(name=name, version=version)
+
+
+def sys_tags():
+ """
+ Returns the sequence of tag triples for the running interpreter.
+
+ The order of the sequence corresponds to priority order for the
+ interpreter, from most to least important.
+ """
+ py_version = sys.version_info[:2]
+ interpreter_name = _interpreter_name()
+ if platform.system() == "Darwin":
+ platforms = _mac_platforms()
+ elif platform.system() == "Linux":
+ platforms = _linux_platforms()
+ else:
+ platforms = _generic_platforms()
+
+ if interpreter_name == "cp":
+ interpreter = _cpython_interpreter(py_version)
+ abis = _cpython_abis(py_version)
+ for tag in _cpython_tags(py_version, interpreter, abis, platforms):
+ yield tag
+ elif interpreter_name == "pp":
+ interpreter = _pypy_interpreter()
+ abi = _generic_abi()
+ for tag in _pypy_tags(py_version, interpreter, abi, platforms):
+ yield tag
+ else:
+ interpreter = _generic_interpreter(interpreter_name, py_version)
+ abi = _generic_abi()
+ for tag in _generic_tags(interpreter, py_version, abi, platforms):
+ yield tag
+ for tag in _independent_tags(interpreter, py_version, platforms):
+ yield tag
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/utils.py b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..88418786933b8bc5f6179b8e191f60f79efd7074
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/utils.py
@@ -0,0 +1,57 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import re
+
+from .version import InvalidVersion, Version
+
+
+_canonicalize_regex = re.compile(r"[-_.]+")
+
+
+def canonicalize_name(name):
+ # This is taken from PEP 503.
+ return _canonicalize_regex.sub("-", name).lower()
+
+
+def canonicalize_version(version):
+ """
+ This is very similar to Version.__str__, but has one subtle differences
+ with the way it handles the release segment.
+ """
+
+ try:
+ version = Version(version)
+ except InvalidVersion:
+ # Legacy versions cannot be normalized
+ return version
+
+ parts = []
+
+ # Epoch
+ if version.epoch != 0:
+ parts.append("{0}!".format(version.epoch))
+
+ # Release segment
+ # NB: This strips trailing '.0's to normalize
+ parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release)))
+
+ # Pre-release
+ if version.pre is not None:
+ parts.append("".join(str(x) for x in version.pre))
+
+ # Post-release
+ if version.post is not None:
+ parts.append(".post{0}".format(version.post))
+
+ # Development release
+ if version.dev is not None:
+ parts.append(".dev{0}".format(version.dev))
+
+ # Local version segment
+ if version.local is not None:
+ parts.append("+{0}".format(version.local))
+
+ return "".join(parts)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/version.py b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/version.py
new file mode 100644
index 0000000000000000000000000000000000000000..95157a1f78c26829ffbe1bd2463f7735b636d16f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/packaging/version.py
@@ -0,0 +1,420 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+from __future__ import absolute_import, division, print_function
+
+import collections
+import itertools
+import re
+
+from ._structures import Infinity
+
+
+__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"]
+
+
+_Version = collections.namedtuple(
+ "_Version", ["epoch", "release", "dev", "pre", "post", "local"]
+)
+
+
+def parse(version):
+ """
+ Parse the given version string and return either a :class:`Version` object
+ or a :class:`LegacyVersion` object depending on if the given version is
+ a valid PEP 440 version or a legacy version.
+ """
+ try:
+ return Version(version)
+ except InvalidVersion:
+ return LegacyVersion(version)
+
+
+class InvalidVersion(ValueError):
+ """
+ An invalid version was found, users should refer to PEP 440.
+ """
+
+
+class _BaseVersion(object):
+ def __hash__(self):
+ return hash(self._key)
+
+ def __lt__(self, other):
+ return self._compare(other, lambda s, o: s < o)
+
+ def __le__(self, other):
+ return self._compare(other, lambda s, o: s <= o)
+
+ def __eq__(self, other):
+ return self._compare(other, lambda s, o: s == o)
+
+ def __ge__(self, other):
+ return self._compare(other, lambda s, o: s >= o)
+
+ def __gt__(self, other):
+ return self._compare(other, lambda s, o: s > o)
+
+ def __ne__(self, other):
+ return self._compare(other, lambda s, o: s != o)
+
+ def _compare(self, other, method):
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return method(self._key, other._key)
+
+
+class LegacyVersion(_BaseVersion):
+ def __init__(self, version):
+ self._version = str(version)
+ self._key = _legacy_cmpkey(self._version)
+
+ def __str__(self):
+ return self._version
+
+ def __repr__(self):
+ return "<LegacyVersion({0})>".format(repr(str(self)))
+
+ @property
+ def public(self):
+ return self._version
+
+ @property
+ def base_version(self):
+ return self._version
+
+ @property
+ def epoch(self):
+ return -1
+
+ @property
+ def release(self):
+ return None
+
+ @property
+ def pre(self):
+ return None
+
+ @property
+ def post(self):
+ return None
+
+ @property
+ def dev(self):
+ return None
+
+ @property
+ def local(self):
+ return None
+
+ @property
+ def is_prerelease(self):
+ return False
+
+ @property
+ def is_postrelease(self):
+ return False
+
+ @property
+ def is_devrelease(self):
+ return False
+
+
+_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
+
+_legacy_version_replacement_map = {
+ "pre": "c",
+ "preview": "c",
+ "-": "final-",
+ "rc": "c",
+ "dev": "@",
+}
+
+
+def _parse_version_parts(s):
+ for part in _legacy_version_component_re.split(s):
+ part = _legacy_version_replacement_map.get(part, part)
+
+ if not part or part == ".":
+ continue
+
+ if part[:1] in "0123456789":
+ # pad for numeric comparison
+ yield part.zfill(8)
+ else:
+ yield "*" + part
+
+ # ensure that alpha/beta/candidate are before final
+ yield "*final"
+
+
+def _legacy_cmpkey(version):
+ # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
+ # greater than or equal to 0. This will effectively put the LegacyVersion,
+ # which uses the defacto standard originally implemented by setuptools,
+ # as before all PEP 440 versions.
+ epoch = -1
+
+ # This scheme is taken from pkg_resources.parse_version setuptools prior to
+ # it's adoption of the packaging library.
+ parts = []
+ for part in _parse_version_parts(version.lower()):
+ if part.startswith("*"):
+ # remove "-" before a prerelease tag
+ if part < "*final":
+ while parts and parts[-1] == "*final-":
+ parts.pop()
+
+ # remove trailing zeros from each series of numeric parts
+ while parts and parts[-1] == "00000000":
+ parts.pop()
+
+ parts.append(part)
+ parts = tuple(parts)
+
+ return epoch, parts
+
+
+# Deliberately not anchored to the start and end of the string, to make it
+# easier for 3rd party code to reuse
+VERSION_PATTERN = r"""
+ v?
+ (?:
+ (?:(?P<epoch>[0-9]+)!)? # epoch
+ (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
+ (?P<pre> # pre-release
+ [-_\.]?
+ (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
+ [-_\.]?
+ (?P<pre_n>[0-9]+)?
+ )?
+ (?P<post> # post release
+ (?:-(?P<post_n1>[0-9]+))
+ |
+ (?:
+ [-_\.]?
+ (?P<post_l>post|rev|r)
+ [-_\.]?
+ (?P<post_n2>[0-9]+)?
+ )
+ )?
+ (?P<dev> # dev release
+ [-_\.]?
+ (?P<dev_l>dev)
+ [-_\.]?
+ (?P<dev_n>[0-9]+)?
+ )?
+ )
+ (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
+"""
+
+
+class Version(_BaseVersion):
+
+ _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+
+ def __init__(self, version):
+ # Validate the version and parse it into pieces
+ match = self._regex.search(version)
+ if not match:
+ raise InvalidVersion("Invalid version: '{0}'".format(version))
+
+ # Store the parsed out pieces of the version
+ self._version = _Version(
+ epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+ release=tuple(int(i) for i in match.group("release").split(".")),
+ pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
+ post=_parse_letter_version(
+ match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+ ),
+ dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
+ local=_parse_local_version(match.group("local")),
+ )
+
+ # Generate a key which will be used for sorting
+ self._key = _cmpkey(
+ self._version.epoch,
+ self._version.release,
+ self._version.pre,
+ self._version.post,
+ self._version.dev,
+ self._version.local,
+ )
+
+ def __repr__(self):
+ return "<Version({0})>".format(repr(str(self)))
+
+ def __str__(self):
+ parts = []
+
+ # Epoch
+ if self.epoch != 0:
+ parts.append("{0}!".format(self.epoch))
+
+ # Release segment
+ parts.append(".".join(str(x) for x in self.release))
+
+ # Pre-release
+ if self.pre is not None:
+ parts.append("".join(str(x) for x in self.pre))
+
+ # Post-release
+ if self.post is not None:
+ parts.append(".post{0}".format(self.post))
+
+ # Development release
+ if self.dev is not None:
+ parts.append(".dev{0}".format(self.dev))
+
+ # Local version segment
+ if self.local is not None:
+ parts.append("+{0}".format(self.local))
+
+ return "".join(parts)
+
+ @property
+ def epoch(self):
+ return self._version.epoch
+
+ @property
+ def release(self):
+ return self._version.release
+
+ @property
+ def pre(self):
+ return self._version.pre
+
+ @property
+ def post(self):
+ return self._version.post[1] if self._version.post else None
+
+ @property
+ def dev(self):
+ return self._version.dev[1] if self._version.dev else None
+
+ @property
+ def local(self):
+ if self._version.local:
+ return ".".join(str(x) for x in self._version.local)
+ else:
+ return None
+
+ @property
+ def public(self):
+ return str(self).split("+", 1)[0]
+
+ @property
+ def base_version(self):
+ parts = []
+
+ # Epoch
+ if self.epoch != 0:
+ parts.append("{0}!".format(self.epoch))
+
+ # Release segment
+ parts.append(".".join(str(x) for x in self.release))
+
+ return "".join(parts)
+
+ @property
+ def is_prerelease(self):
+ return self.dev is not None or self.pre is not None
+
+ @property
+ def is_postrelease(self):
+ return self.post is not None
+
+ @property
+ def is_devrelease(self):
+ return self.dev is not None
+
+
+def _parse_letter_version(letter, number):
+ if letter:
+ # We consider there to be an implicit 0 in a pre-release if there is
+ # not a numeral associated with it.
+ if number is None:
+ number = 0
+
+ # We normalize any letters to their lower case form
+ letter = letter.lower()
+
+ # We consider some words to be alternate spellings of other words and
+ # in those cases we want to normalize the spellings to our preferred
+ # spelling.
+ if letter == "alpha":
+ letter = "a"
+ elif letter == "beta":
+ letter = "b"
+ elif letter in ["c", "pre", "preview"]:
+ letter = "rc"
+ elif letter in ["rev", "r"]:
+ letter = "post"
+
+ return letter, int(number)
+ if not letter and number:
+ # We assume if we are given a number, but we are not given a letter
+ # then this is using the implicit post release syntax (e.g. 1.0-1)
+ letter = "post"
+
+ return letter, int(number)
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local):
+ """
+ Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+ """
+ if local is not None:
+ return tuple(
+ part.lower() if not part.isdigit() else int(part)
+ for part in _local_version_separators.split(local)
+ )
+
+
+def _cmpkey(epoch, release, pre, post, dev, local):
+ # When we compare a release version, we want to compare it with all of the
+ # trailing zeros removed. So we'll use a reverse the list, drop all the now
+ # leading zeros until we come to something non zero, then take the rest
+ # re-reverse it back into the correct order and make it a tuple and use
+ # that for our sorting key.
+ release = tuple(
+ reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
+ )
+
+ # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+ # We'll do this by abusing the pre segment, but we _only_ want to do this
+ # if there is not a pre or a post segment. If we have one of those then
+ # the normal sorting rules will handle this case correctly.
+ if pre is None and post is None and dev is not None:
+ pre = -Infinity
+ # Versions without a pre-release (except as noted above) should sort after
+ # those with one.
+ elif pre is None:
+ pre = Infinity
+
+ # Versions without a post segment should sort before those with one.
+ if post is None:
+ post = -Infinity
+
+ # Versions without a development segment should sort after those with one.
+ if dev is None:
+ dev = Infinity
+
+ if local is None:
+ # Versions without a local segment should sort before those with one.
+ local = -Infinity
+ else:
+ # Versions with a local segment need that segment parsed to implement
+ # the sorting rules in PEP440.
+ # - Alpha numeric segments sort before numeric segments
+ # - Alpha numeric segments sort lexicographically
+ # - Numeric segments sort numerically
+ # - Shorter versions sort before longer versions when the prefixes
+ # match exactly
+ local = tuple((i, "") if isinstance(i, int) else (-Infinity, i) for i in local)
+
+ return epoch, release, pre, post, dev, local
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/pyparsing.py b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/pyparsing.py
new file mode 100644
index 0000000000000000000000000000000000000000..cf75e1e5fcbfe7eac41d2a9e446c5c980741087b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/pyparsing.py
@@ -0,0 +1,5742 @@
+# module pyparsing.py
+#
+# Copyright (c) 2003-2018 Paul T. McGuire
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__doc__ = \
+"""
+pyparsing module - Classes and methods to define and execute parsing grammars
+=============================================================================
+
+The pyparsing module is an alternative approach to creating and executing simple grammars,
+vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you
+don't need to learn a new syntax for defining grammars or matching expressions - the parsing module
+provides a library of classes that you use to construct the grammar directly in Python.
+
+Here is a program to parse "Hello, World!" (or any greeting of the form
+C{"<salutation>, <addressee>!"}), built up using L{Word}, L{Literal}, and L{And} elements
+(L{'+'<ParserElement.__add__>} operator gives L{And} expressions, strings are auto-converted to
+L{Literal} expressions)::
+
+ from pyparsing import Word, alphas
+
+ # define grammar of a greeting
+ greet = Word(alphas) + "," + Word(alphas) + "!"
+
+ hello = "Hello, World!"
+ print (hello, "->", greet.parseString(hello))
+
+The program outputs the following::
+
+ Hello, World! -> ['Hello', ',', 'World', '!']
+
+The Python representation of the grammar is quite readable, owing to the self-explanatory
+class names, and the use of '+', '|' and '^' operators.
+
+The L{ParseResults} object returned from L{ParserElement.parseString<ParserElement.parseString>} can be accessed as a nested list, a dictionary, or an
+object with named attributes.
+
+The pyparsing module handles some of the problems that are typically vexing when writing text parsers:
+ - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.)
+ - quoted strings
+ - embedded comments
+
+
+Getting Started -
+-----------------
+Visit the classes L{ParserElement} and L{ParseResults} to see the base classes that most other pyparsing
+classes inherit from. Use the docstrings for examples of how to:
+ - construct literal match expressions from L{Literal} and L{CaselessLiteral} classes
+ - construct character word-group expressions using the L{Word} class
+ - see how to create repetitive expressions using L{ZeroOrMore} and L{OneOrMore} classes
+ - use L{'+'<And>}, L{'|'<MatchFirst>}, L{'^'<Or>}, and L{'&'<Each>} operators to combine simple expressions into more complex ones
+ - associate names with your parsed results using L{ParserElement.setResultsName}
+ - find some helpful expression short-cuts like L{delimitedList} and L{oneOf}
+ - find more useful common expressions in the L{pyparsing_common} namespace class
+"""
+
+__version__ = "2.2.1"
+__versionTime__ = "18 Sep 2018 00:49 UTC"
+__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>"
+
+import string
+from weakref import ref as wkref
+import copy
+import sys
+import warnings
+import re
+import sre_constants
+import collections
+import pprint
+import traceback
+import types
+from datetime import datetime
+
+try:
+ from _thread import RLock
+except ImportError:
+ from threading import RLock
+
+try:
+ # Python 3
+ from collections.abc import Iterable
+ from collections.abc import MutableMapping
+except ImportError:
+ # Python 2.7
+ from collections import Iterable
+ from collections import MutableMapping
+
+try:
+ from collections import OrderedDict as _OrderedDict
+except ImportError:
+ try:
+ from ordereddict import OrderedDict as _OrderedDict
+ except ImportError:
+ _OrderedDict = None
+
+#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) )
+
+__all__ = [
+'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty',
+'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal',
+'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or',
+'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException',
+'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException',
+'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter',
+'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore',
+'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col',
+'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString',
+'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums',
+'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno',
+'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral',
+'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables',
+'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity',
+'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd',
+'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute',
+'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass',
+'CloseMatch', 'tokenMap', 'pyparsing_common',
+]
+
+system_version = tuple(sys.version_info)[:3]
+PY_3 = system_version[0] == 3
+if PY_3:
+ _MAX_INT = sys.maxsize
+ basestring = str
+ unichr = chr
+ _ustr = str
+
+ # build list of single arg builtins, that can be used as parse actions
+ singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max]
+
+else:
+ _MAX_INT = sys.maxint
+ range = xrange
+
+ def _ustr(obj):
+ """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries
+ str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It
+ then < returns the unicode object | encodes it with the default encoding | ... >.
+ """
+ if isinstance(obj,unicode):
+ return obj
+
+ try:
+ # If this works, then _ustr(obj) has the same behaviour as str(obj), so
+ # it won't break any existing code.
+ return str(obj)
+
+ except UnicodeEncodeError:
+ # Else encode it
+ ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace')
+ xmlcharref = Regex(r'&#\d+;')
+ xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:])
+ return xmlcharref.transformString(ret)
+
+ # build list of single arg builtins, tolerant of Python version, that can be used as parse actions
+ singleArgBuiltins = []
+ import __builtin__
+ for fname in "sum len sorted reversed list tuple set any all min max".split():
+ try:
+ singleArgBuiltins.append(getattr(__builtin__,fname))
+ except AttributeError:
+ continue
+
+_generatorType = type((y for y in range(1)))
+
+def _xml_escape(data):
+ """Escape &, <, >, ", ', etc. in a string of data."""
+
+ # ampersand must be replaced first
+ from_symbols = '&><"\''
+ to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split())
+ for from_,to_ in zip(from_symbols, to_symbols):
+ data = data.replace(from_, to_)
+ return data
+
+class _Constants(object):
+ pass
+
+alphas = string.ascii_uppercase + string.ascii_lowercase
+nums = "0123456789"
+hexnums = nums + "ABCDEFabcdef"
+alphanums = alphas + nums
+_bslash = chr(92)
+printables = "".join(c for c in string.printable if c not in string.whitespace)
+
+class ParseBaseException(Exception):
+ """base exception class for all parsing runtime exceptions"""
+ # Performance tuning: we construct a *lot* of these, so keep this
+ # constructor as small and fast as possible
+ def __init__( self, pstr, loc=0, msg=None, elem=None ):
+ self.loc = loc
+ if msg is None:
+ self.msg = pstr
+ self.pstr = ""
+ else:
+ self.msg = msg
+ self.pstr = pstr
+ self.parserElement = elem
+ self.args = (pstr, loc, msg)
+
+ @classmethod
+ def _from_exception(cls, pe):
+ """
+ internal factory method to simplify creating one type of ParseException
+ from another - avoids having __init__ signature conflicts among subclasses
+ """
+ return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement)
+
+ def __getattr__( self, aname ):
+ """supported attributes by name are:
+ - lineno - returns the line number of the exception text
+ - col - returns the column number of the exception text
+ - line - returns the line containing the exception text
+ """
+ if( aname == "lineno" ):
+ return lineno( self.loc, self.pstr )
+ elif( aname in ("col", "column") ):
+ return col( self.loc, self.pstr )
+ elif( aname == "line" ):
+ return line( self.loc, self.pstr )
+ else:
+ raise AttributeError(aname)
+
+ def __str__( self ):
+ return "%s (at char %d), (line:%d, col:%d)" % \
+ ( self.msg, self.loc, self.lineno, self.column )
+ def __repr__( self ):
+ return _ustr(self)
+ def markInputline( self, markerString = ">!<" ):
+ """Extracts the exception line from the input string, and marks
+ the location of the exception with a special symbol.
+ """
+ line_str = self.line
+ line_column = self.column - 1
+ if markerString:
+ line_str = "".join((line_str[:line_column],
+ markerString, line_str[line_column:]))
+ return line_str.strip()
+ def __dir__(self):
+ return "lineno col line".split() + dir(type(self))
+
+class ParseException(ParseBaseException):
+ """
+ Exception thrown when parse expressions don't match class;
+ supported attributes by name are:
+ - lineno - returns the line number of the exception text
+ - col - returns the column number of the exception text
+ - line - returns the line containing the exception text
+
+ Example::
+ try:
+ Word(nums).setName("integer").parseString("ABC")
+ except ParseException as pe:
+ print(pe)
+ print("column: {}".format(pe.col))
+
+ prints::
+ Expected integer (at char 0), (line:1, col:1)
+ column: 1
+ """
+ pass
+
+class ParseFatalException(ParseBaseException):
+ """user-throwable exception thrown when inconsistent parse content
+ is found; stops all parsing immediately"""
+ pass
+
+class ParseSyntaxException(ParseFatalException):
+ """just like L{ParseFatalException}, but thrown internally when an
+ L{ErrorStop<And._ErrorStop>} ('-' operator) indicates that parsing is to stop
+ immediately because an unbacktrackable syntax error has been found"""
+ pass
+
+#~ class ReparseException(ParseBaseException):
+ #~ """Experimental class - parse actions can raise this exception to cause
+ #~ pyparsing to reparse the input string:
+ #~ - with a modified input string, and/or
+ #~ - with a modified start location
+ #~ Set the values of the ReparseException in the constructor, and raise the
+ #~ exception in a parse action to cause pyparsing to use the new string/location.
+ #~ Setting the values as None causes no change to be made.
+ #~ """
+ #~ def __init_( self, newstring, restartLoc ):
+ #~ self.newParseText = newstring
+ #~ self.reparseLoc = restartLoc
+
+class RecursiveGrammarException(Exception):
+ """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive"""
+ def __init__( self, parseElementList ):
+ self.parseElementTrace = parseElementList
+
+ def __str__( self ):
+ return "RecursiveGrammarException: %s" % self.parseElementTrace
+
+class _ParseResultsWithOffset(object):
+ def __init__(self,p1,p2):
+ self.tup = (p1,p2)
+ def __getitem__(self,i):
+ return self.tup[i]
+ def __repr__(self):
+ return repr(self.tup[0])
+ def setOffset(self,i):
+ self.tup = (self.tup[0],i)
+
+class ParseResults(object):
+ """
+ Structured parse results, to provide multiple means of access to the parsed data:
+ - as a list (C{len(results)})
+ - by list index (C{results[0], results[1]}, etc.)
+ - by attribute (C{results.<resultsName>} - see L{ParserElement.setResultsName})
+
+ Example::
+ integer = Word(nums)
+ date_str = (integer.setResultsName("year") + '/'
+ + integer.setResultsName("month") + '/'
+ + integer.setResultsName("day"))
+ # equivalent form:
+ # date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ # parseString returns a ParseResults object
+ result = date_str.parseString("1999/12/31")
+
+ def test(s, fn=repr):
+ print("%s -> %s" % (s, fn(eval(s))))
+ test("list(result)")
+ test("result[0]")
+ test("result['month']")
+ test("result.day")
+ test("'month' in result")
+ test("'minutes' in result")
+ test("result.dump()", str)
+ prints::
+ list(result) -> ['1999', '/', '12', '/', '31']
+ result[0] -> '1999'
+ result['month'] -> '12'
+ result.day -> '31'
+ 'month' in result -> True
+ 'minutes' in result -> False
+ result.dump() -> ['1999', '/', '12', '/', '31']
+ - day: 31
+ - month: 12
+ - year: 1999
+ """
+ def __new__(cls, toklist=None, name=None, asList=True, modal=True ):
+ if isinstance(toklist, cls):
+ return toklist
+ retobj = object.__new__(cls)
+ retobj.__doinit = True
+ return retobj
+
+ # Performance tuning: we construct a *lot* of these, so keep this
+ # constructor as small and fast as possible
+ def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ):
+ if self.__doinit:
+ self.__doinit = False
+ self.__name = None
+ self.__parent = None
+ self.__accumNames = {}
+ self.__asList = asList
+ self.__modal = modal
+ if toklist is None:
+ toklist = []
+ if isinstance(toklist, list):
+ self.__toklist = toklist[:]
+ elif isinstance(toklist, _generatorType):
+ self.__toklist = list(toklist)
+ else:
+ self.__toklist = [toklist]
+ self.__tokdict = dict()
+
+ if name is not None and name:
+ if not modal:
+ self.__accumNames[name] = 0
+ if isinstance(name,int):
+ name = _ustr(name) # will always return a str, but use _ustr for consistency
+ self.__name = name
+ if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])):
+ if isinstance(toklist,basestring):
+ toklist = [ toklist ]
+ if asList:
+ if isinstance(toklist,ParseResults):
+ self[name] = _ParseResultsWithOffset(toklist.copy(),0)
+ else:
+ self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0)
+ self[name].__name = name
+ else:
+ try:
+ self[name] = toklist[0]
+ except (KeyError,TypeError,IndexError):
+ self[name] = toklist
+
+ def __getitem__( self, i ):
+ if isinstance( i, (int,slice) ):
+ return self.__toklist[i]
+ else:
+ if i not in self.__accumNames:
+ return self.__tokdict[i][-1][0]
+ else:
+ return ParseResults([ v[0] for v in self.__tokdict[i] ])
+
+ def __setitem__( self, k, v, isinstance=isinstance ):
+ if isinstance(v,_ParseResultsWithOffset):
+ self.__tokdict[k] = self.__tokdict.get(k,list()) + [v]
+ sub = v[0]
+ elif isinstance(k,(int,slice)):
+ self.__toklist[k] = v
+ sub = v
+ else:
+ self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)]
+ sub = v
+ if isinstance(sub,ParseResults):
+ sub.__parent = wkref(self)
+
+ def __delitem__( self, i ):
+ if isinstance(i,(int,slice)):
+ mylen = len( self.__toklist )
+ del self.__toklist[i]
+
+ # convert int to slice
+ if isinstance(i, int):
+ if i < 0:
+ i += mylen
+ i = slice(i, i+1)
+ # get removed indices
+ removed = list(range(*i.indices(mylen)))
+ removed.reverse()
+ # fixup indices in token dictionary
+ for name,occurrences in self.__tokdict.items():
+ for j in removed:
+ for k, (value, position) in enumerate(occurrences):
+ occurrences[k] = _ParseResultsWithOffset(value, position - (position > j))
+ else:
+ del self.__tokdict[i]
+
+ def __contains__( self, k ):
+ return k in self.__tokdict
+
+ def __len__( self ): return len( self.__toklist )
+ def __bool__(self): return ( not not self.__toklist )
+ __nonzero__ = __bool__
+ def __iter__( self ): return iter( self.__toklist )
+ def __reversed__( self ): return iter( self.__toklist[::-1] )
+ def _iterkeys( self ):
+ if hasattr(self.__tokdict, "iterkeys"):
+ return self.__tokdict.iterkeys()
+ else:
+ return iter(self.__tokdict)
+
+ def _itervalues( self ):
+ return (self[k] for k in self._iterkeys())
+
+ def _iteritems( self ):
+ return ((k, self[k]) for k in self._iterkeys())
+
+ if PY_3:
+ keys = _iterkeys
+ """Returns an iterator of all named result keys (Python 3.x only)."""
+
+ values = _itervalues
+ """Returns an iterator of all named result values (Python 3.x only)."""
+
+ items = _iteritems
+ """Returns an iterator of all named result key-value tuples (Python 3.x only)."""
+
+ else:
+ iterkeys = _iterkeys
+ """Returns an iterator of all named result keys (Python 2.x only)."""
+
+ itervalues = _itervalues
+ """Returns an iterator of all named result values (Python 2.x only)."""
+
+ iteritems = _iteritems
+ """Returns an iterator of all named result key-value tuples (Python 2.x only)."""
+
+ def keys( self ):
+ """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x)."""
+ return list(self.iterkeys())
+
+ def values( self ):
+ """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x)."""
+ return list(self.itervalues())
+
+ def items( self ):
+ """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x)."""
+ return list(self.iteritems())
+
+ def haskeys( self ):
+ """Since keys() returns an iterator, this method is helpful in bypassing
+ code that looks for the existence of any defined results names."""
+ return bool(self.__tokdict)
+
+ def pop( self, *args, **kwargs):
+ """
+ Removes and returns item at specified index (default=C{last}).
+ Supports both C{list} and C{dict} semantics for C{pop()}. If passed no
+ argument or an integer argument, it will use C{list} semantics
+ and pop tokens from the list of parsed tokens. If passed a
+ non-integer argument (most likely a string), it will use C{dict}
+ semantics and pop the corresponding value from any defined
+ results names. A second default return value argument is
+ supported, just as in C{dict.pop()}.
+
+ Example::
+ def remove_first(tokens):
+ tokens.pop(0)
+ print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+ print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321']
+
+ label = Word(alphas)
+ patt = label("LABEL") + OneOrMore(Word(nums))
+ print(patt.parseString("AAB 123 321").dump())
+
+ # Use pop() in a parse action to remove named result (note that corresponding value is not
+ # removed from list form of results)
+ def remove_LABEL(tokens):
+ tokens.pop("LABEL")
+ return tokens
+ patt.addParseAction(remove_LABEL)
+ print(patt.parseString("AAB 123 321").dump())
+ prints::
+ ['AAB', '123', '321']
+ - LABEL: AAB
+
+ ['AAB', '123', '321']
+ """
+ if not args:
+ args = [-1]
+ for k,v in kwargs.items():
+ if k == 'default':
+ args = (args[0], v)
+ else:
+ raise TypeError("pop() got an unexpected keyword argument '%s'" % k)
+ if (isinstance(args[0], int) or
+ len(args) == 1 or
+ args[0] in self):
+ index = args[0]
+ ret = self[index]
+ del self[index]
+ return ret
+ else:
+ defaultvalue = args[1]
+ return defaultvalue
+
+ def get(self, key, defaultValue=None):
+ """
+ Returns named result matching the given key, or if there is no
+ such name, then returns the given C{defaultValue} or C{None} if no
+ C{defaultValue} is specified.
+
+ Similar to C{dict.get()}.
+
+ Example::
+ integer = Word(nums)
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ result = date_str.parseString("1999/12/31")
+ print(result.get("year")) # -> '1999'
+ print(result.get("hour", "not specified")) # -> 'not specified'
+ print(result.get("hour")) # -> None
+ """
+ if key in self:
+ return self[key]
+ else:
+ return defaultValue
+
+ def insert( self, index, insStr ):
+ """
+ Inserts new element at location index in the list of parsed tokens.
+
+ Similar to C{list.insert()}.
+
+ Example::
+ print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+
+ # use a parse action to insert the parse location in the front of the parsed results
+ def insert_locn(locn, tokens):
+ tokens.insert(0, locn)
+ print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321']
+ """
+ self.__toklist.insert(index, insStr)
+ # fixup indices in token dictionary
+ for name,occurrences in self.__tokdict.items():
+ for k, (value, position) in enumerate(occurrences):
+ occurrences[k] = _ParseResultsWithOffset(value, position + (position > index))
+
+ def append( self, item ):
+ """
+ Add single element to end of ParseResults list of elements.
+
+ Example::
+ print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321']
+
+ # use a parse action to compute the sum of the parsed integers, and add it to the end
+ def append_sum(tokens):
+ tokens.append(sum(map(int, tokens)))
+ print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444]
+ """
+ self.__toklist.append(item)
+
+ def extend( self, itemseq ):
+ """
+ Add sequence of elements to end of ParseResults list of elements.
+
+ Example::
+ patt = OneOrMore(Word(alphas))
+
+ # use a parse action to append the reverse of the matched strings, to make a palindrome
+ def make_palindrome(tokens):
+ tokens.extend(reversed([t[::-1] for t in tokens]))
+ return ''.join(tokens)
+ print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'
+ """
+ if isinstance(itemseq, ParseResults):
+ self += itemseq
+ else:
+ self.__toklist.extend(itemseq)
+
+ def clear( self ):
+ """
+ Clear all elements and results names.
+ """
+ del self.__toklist[:]
+ self.__tokdict.clear()
+
+ def __getattr__( self, name ):
+ try:
+ return self[name]
+ except KeyError:
+ return ""
+
+ if name in self.__tokdict:
+ if name not in self.__accumNames:
+ return self.__tokdict[name][-1][0]
+ else:
+ return ParseResults([ v[0] for v in self.__tokdict[name] ])
+ else:
+ return ""
+
+ def __add__( self, other ):
+ ret = self.copy()
+ ret += other
+ return ret
+
+ def __iadd__( self, other ):
+ if other.__tokdict:
+ offset = len(self.__toklist)
+ addoffset = lambda a: offset if a<0 else a+offset
+ otheritems = other.__tokdict.items()
+ otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) )
+ for (k,vlist) in otheritems for v in vlist]
+ for k,v in otherdictitems:
+ self[k] = v
+ if isinstance(v[0],ParseResults):
+ v[0].__parent = wkref(self)
+
+ self.__toklist += other.__toklist
+ self.__accumNames.update( other.__accumNames )
+ return self
+
+ def __radd__(self, other):
+ if isinstance(other,int) and other == 0:
+ # useful for merging many ParseResults using sum() builtin
+ return self.copy()
+ else:
+ # this may raise a TypeError - so be it
+ return other + self
+
+ def __repr__( self ):
+ return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) )
+
+ def __str__( self ):
+ return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']'
+
+ def _asStringList( self, sep='' ):
+ out = []
+ for item in self.__toklist:
+ if out and sep:
+ out.append(sep)
+ if isinstance( item, ParseResults ):
+ out += item._asStringList()
+ else:
+ out.append( _ustr(item) )
+ return out
+
+ def asList( self ):
+ """
+ Returns the parse results as a nested list of matching tokens, all converted to strings.
+
+ Example::
+ patt = OneOrMore(Word(alphas))
+ result = patt.parseString("sldkj lsdkj sldkj")
+ # even though the result prints in string-like form, it is actually a pyparsing ParseResults
+ print(type(result), result) # -> <class 'pyparsing.ParseResults'> ['sldkj', 'lsdkj', 'sldkj']
+
+ # Use asList() to create an actual list
+ result_list = result.asList()
+ print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj']
+ """
+ return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist]
+
+ def asDict( self ):
+ """
+ Returns the named parse results as a nested dictionary.
+
+ Example::
+ integer = Word(nums)
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ result = date_str.parseString('12/31/1999')
+ print(type(result), repr(result)) # -> <class 'pyparsing.ParseResults'> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]})
+
+ result_dict = result.asDict()
+ print(type(result_dict), repr(result_dict)) # -> <class 'dict'> {'day': '1999', 'year': '12', 'month': '31'}
+
+ # even though a ParseResults supports dict-like access, sometime you just need to have a dict
+ import json
+ print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable
+ print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"}
+ """
+ if PY_3:
+ item_fn = self.items
+ else:
+ item_fn = self.iteritems
+
+ def toItem(obj):
+ if isinstance(obj, ParseResults):
+ if obj.haskeys():
+ return obj.asDict()
+ else:
+ return [toItem(v) for v in obj]
+ else:
+ return obj
+
+ return dict((k,toItem(v)) for k,v in item_fn())
+
+ def copy( self ):
+ """
+ Returns a new copy of a C{ParseResults} object.
+ """
+ ret = ParseResults( self.__toklist )
+ ret.__tokdict = self.__tokdict.copy()
+ ret.__parent = self.__parent
+ ret.__accumNames.update( self.__accumNames )
+ ret.__name = self.__name
+ return ret
+
+ def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ):
+ """
+ (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names.
+ """
+ nl = "\n"
+ out = []
+ namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items()
+ for v in vlist)
+ nextLevelIndent = indent + " "
+
+ # collapse out indents if formatting is not desired
+ if not formatted:
+ indent = ""
+ nextLevelIndent = ""
+ nl = ""
+
+ selfTag = None
+ if doctag is not None:
+ selfTag = doctag
+ else:
+ if self.__name:
+ selfTag = self.__name
+
+ if not selfTag:
+ if namedItemsOnly:
+ return ""
+ else:
+ selfTag = "ITEM"
+
+ out += [ nl, indent, "<", selfTag, ">" ]
+
+ for i,res in enumerate(self.__toklist):
+ if isinstance(res,ParseResults):
+ if i in namedItems:
+ out += [ res.asXML(namedItems[i],
+ namedItemsOnly and doctag is None,
+ nextLevelIndent,
+ formatted)]
+ else:
+ out += [ res.asXML(None,
+ namedItemsOnly and doctag is None,
+ nextLevelIndent,
+ formatted)]
+ else:
+ # individual token, see if there is a name for it
+ resTag = None
+ if i in namedItems:
+ resTag = namedItems[i]
+ if not resTag:
+ if namedItemsOnly:
+ continue
+ else:
+ resTag = "ITEM"
+ xmlBodyText = _xml_escape(_ustr(res))
+ out += [ nl, nextLevelIndent, "<", resTag, ">",
+ xmlBodyText,
+ "</", resTag, ">" ]
+
+ out += [ nl, indent, "</", selfTag, ">" ]
+ return "".join(out)
+
+ def __lookup(self,sub):
+ for k,vlist in self.__tokdict.items():
+ for v,loc in vlist:
+ if sub is v:
+ return k
+ return None
+
+ def getName(self):
+ r"""
+ Returns the results name for this token expression. Useful when several
+ different expressions might match at a particular location.
+
+ Example::
+ integer = Word(nums)
+ ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d")
+ house_number_expr = Suppress('#') + Word(nums, alphanums)
+ user_data = (Group(house_number_expr)("house_number")
+ | Group(ssn_expr)("ssn")
+ | Group(integer)("age"))
+ user_info = OneOrMore(user_data)
+
+ result = user_info.parseString("22 111-22-3333 #221B")
+ for item in result:
+ print(item.getName(), ':', item[0])
+ prints::
+ age : 22
+ ssn : 111-22-3333
+ house_number : 221B
+ """
+ if self.__name:
+ return self.__name
+ elif self.__parent:
+ par = self.__parent()
+ if par:
+ return par.__lookup(self)
+ else:
+ return None
+ elif (len(self) == 1 and
+ len(self.__tokdict) == 1 and
+ next(iter(self.__tokdict.values()))[0][1] in (0,-1)):
+ return next(iter(self.__tokdict.keys()))
+ else:
+ return None
+
+ def dump(self, indent='', depth=0, full=True):
+ """
+ Diagnostic method for listing out the contents of a C{ParseResults}.
+ Accepts an optional C{indent} argument so that this string can be embedded
+ in a nested display of other data.
+
+ Example::
+ integer = Word(nums)
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ result = date_str.parseString('12/31/1999')
+ print(result.dump())
+ prints::
+ ['12', '/', '31', '/', '1999']
+ - day: 1999
+ - month: 31
+ - year: 12
+ """
+ out = []
+ NL = '\n'
+ out.append( indent+_ustr(self.asList()) )
+ if full:
+ if self.haskeys():
+ items = sorted((str(k), v) for k,v in self.items())
+ for k,v in items:
+ if out:
+ out.append(NL)
+ out.append( "%s%s- %s: " % (indent,(' '*depth), k) )
+ if isinstance(v,ParseResults):
+ if v:
+ out.append( v.dump(indent,depth+1) )
+ else:
+ out.append(_ustr(v))
+ else:
+ out.append(repr(v))
+ elif any(isinstance(vv,ParseResults) for vv in self):
+ v = self
+ for i,vv in enumerate(v):
+ if isinstance(vv,ParseResults):
+ out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) ))
+ else:
+ out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv)))
+
+ return "".join(out)
+
+ def pprint(self, *args, **kwargs):
+ """
+ Pretty-printer for parsed results as a list, using the C{pprint} module.
+ Accepts additional positional or keyword args as defined for the
+ C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint})
+
+ Example::
+ ident = Word(alphas, alphanums)
+ num = Word(nums)
+ func = Forward()
+ term = ident | num | Group('(' + func + ')')
+ func <<= ident + Group(Optional(delimitedList(term)))
+ result = func.parseString("fna a,b,(fnb c,d,200),100")
+ result.pprint(width=40)
+ prints::
+ ['fna',
+ ['a',
+ 'b',
+ ['(', 'fnb', ['c', 'd', '200'], ')'],
+ '100']]
+ """
+ pprint.pprint(self.asList(), *args, **kwargs)
+
+ # add support for pickle protocol
+ def __getstate__(self):
+ return ( self.__toklist,
+ ( self.__tokdict.copy(),
+ self.__parent is not None and self.__parent() or None,
+ self.__accumNames,
+ self.__name ) )
+
+ def __setstate__(self,state):
+ self.__toklist = state[0]
+ (self.__tokdict,
+ par,
+ inAccumNames,
+ self.__name) = state[1]
+ self.__accumNames = {}
+ self.__accumNames.update(inAccumNames)
+ if par is not None:
+ self.__parent = wkref(par)
+ else:
+ self.__parent = None
+
+ def __getnewargs__(self):
+ return self.__toklist, self.__name, self.__asList, self.__modal
+
+ def __dir__(self):
+ return (dir(type(self)) + list(self.keys()))
+
+MutableMapping.register(ParseResults)
+
+def col (loc,strg):
+ """Returns current column within a string, counting newlines as line separators.
+ The first column is number 1.
+
+ Note: the default parsing behavior is to expand tabs in the input string
+ before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information
+ on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
+ consistent view of the parsed string, the parse location, and line and column
+ positions within the parsed string.
+ """
+ s = strg
+ return 1 if 0<loc<len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc)
+
+def lineno(loc,strg):
+ """Returns current line number within a string, counting newlines as line separators.
+ The first line is number 1.
+
+ Note: the default parsing behavior is to expand tabs in the input string
+ before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information
+ on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
+ consistent view of the parsed string, the parse location, and line and column
+ positions within the parsed string.
+ """
+ return strg.count("\n",0,loc) + 1
+
+def line( loc, strg ):
+ """Returns the line of text containing loc within a string, counting newlines as line separators.
+ """
+ lastCR = strg.rfind("\n", 0, loc)
+ nextCR = strg.find("\n", loc)
+ if nextCR >= 0:
+ return strg[lastCR+1:nextCR]
+ else:
+ return strg[lastCR+1:]
+
+def _defaultStartDebugAction( instring, loc, expr ):
+ print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )))
+
+def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ):
+ print ("Matched " + _ustr(expr) + " -> " + str(toks.asList()))
+
+def _defaultExceptionDebugAction( instring, loc, expr, exc ):
+ print ("Exception raised:" + _ustr(exc))
+
+def nullDebugAction(*args):
+ """'Do-nothing' debug action, to suppress debugging output during parsing."""
+ pass
+
+# Only works on Python 3.x - nonlocal is toxic to Python 2 installs
+#~ 'decorator to trim function calls to match the arity of the target'
+#~ def _trim_arity(func, maxargs=3):
+ #~ if func in singleArgBuiltins:
+ #~ return lambda s,l,t: func(t)
+ #~ limit = 0
+ #~ foundArity = False
+ #~ def wrapper(*args):
+ #~ nonlocal limit,foundArity
+ #~ while 1:
+ #~ try:
+ #~ ret = func(*args[limit:])
+ #~ foundArity = True
+ #~ return ret
+ #~ except TypeError:
+ #~ if limit == maxargs or foundArity:
+ #~ raise
+ #~ limit += 1
+ #~ continue
+ #~ return wrapper
+
+# this version is Python 2.x-3.x cross-compatible
+'decorator to trim function calls to match the arity of the target'
+def _trim_arity(func, maxargs=2):
+ if func in singleArgBuiltins:
+ return lambda s,l,t: func(t)
+ limit = [0]
+ foundArity = [False]
+
+ # traceback return data structure changed in Py3.5 - normalize back to plain tuples
+ if system_version[:2] >= (3,5):
+ def extract_stack(limit=0):
+ # special handling for Python 3.5.0 - extra deep call stack by 1
+ offset = -3 if system_version == (3,5,0) else -2
+ frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset]
+ return [frame_summary[:2]]
+ def extract_tb(tb, limit=0):
+ frames = traceback.extract_tb(tb, limit=limit)
+ frame_summary = frames[-1]
+ return [frame_summary[:2]]
+ else:
+ extract_stack = traceback.extract_stack
+ extract_tb = traceback.extract_tb
+
+ # synthesize what would be returned by traceback.extract_stack at the call to
+ # user's parse action 'func', so that we don't incur call penalty at parse time
+
+ LINE_DIFF = 6
+ # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND
+ # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!!
+ this_line = extract_stack(limit=2)[-1]
+ pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF)
+
+ def wrapper(*args):
+ while 1:
+ try:
+ ret = func(*args[limit[0]:])
+ foundArity[0] = True
+ return ret
+ except TypeError:
+ # re-raise TypeErrors if they did not come from our arity testing
+ if foundArity[0]:
+ raise
+ else:
+ try:
+ tb = sys.exc_info()[-1]
+ if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth:
+ raise
+ finally:
+ del tb
+
+ if limit[0] <= maxargs:
+ limit[0] += 1
+ continue
+ raise
+
+ # copy func name to wrapper for sensible debug output
+ func_name = "<parse action>"
+ try:
+ func_name = getattr(func, '__name__',
+ getattr(func, '__class__').__name__)
+ except Exception:
+ func_name = str(func)
+ wrapper.__name__ = func_name
+
+ return wrapper
+
+class ParserElement(object):
+ """Abstract base level parser element class."""
+ DEFAULT_WHITE_CHARS = " \n\t\r"
+ verbose_stacktrace = False
+
+ @staticmethod
+ def setDefaultWhitespaceChars( chars ):
+ r"""
+ Overrides the default whitespace chars
+
+ Example::
+ # default whitespace chars are space, <TAB> and newline
+ OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl']
+
+ # change to just treat newline as significant
+ ParserElement.setDefaultWhitespaceChars(" \t")
+ OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def']
+ """
+ ParserElement.DEFAULT_WHITE_CHARS = chars
+
+ @staticmethod
+ def inlineLiteralsUsing(cls):
+ """
+ Set class to be used for inclusion of string literals into a parser.
+
+ Example::
+ # default literal class used is Literal
+ integer = Word(nums)
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31']
+
+
+ # change to Suppress
+ ParserElement.inlineLiteralsUsing(Suppress)
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+
+ date_str.parseString("1999/12/31") # -> ['1999', '12', '31']
+ """
+ ParserElement._literalStringClass = cls
+
+ def __init__( self, savelist=False ):
+ self.parseAction = list()
+ self.failAction = None
+ #~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcall
+ self.strRepr = None
+ self.resultsName = None
+ self.saveAsList = savelist
+ self.skipWhitespace = True
+ self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
+ self.copyDefaultWhiteChars = True
+ self.mayReturnEmpty = False # used when checking for left-recursion
+ self.keepTabs = False
+ self.ignoreExprs = list()
+ self.debug = False
+ self.streamlined = False
+ self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index
+ self.errmsg = ""
+ self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all)
+ self.debugActions = ( None, None, None ) #custom debug actions
+ self.re = None
+ self.callPreparse = True # used to avoid redundant calls to preParse
+ self.callDuringTry = False
+
+ def copy( self ):
+ """
+ Make a copy of this C{ParserElement}. Useful for defining different parse actions
+ for the same parsing pattern, using copies of the original parse element.
+
+ Example::
+ integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+ integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K")
+ integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
+
+ print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M"))
+ prints::
+ [5120, 100, 655360, 268435456]
+ Equivalent form of C{expr.copy()} is just C{expr()}::
+ integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M")
+ """
+ cpy = copy.copy( self )
+ cpy.parseAction = self.parseAction[:]
+ cpy.ignoreExprs = self.ignoreExprs[:]
+ if self.copyDefaultWhiteChars:
+ cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS
+ return cpy
+
+ def setName( self, name ):
+ """
+ Define name for this expression, makes debugging and exception messages clearer.
+
+ Example::
+ Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1)
+ Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1)
+ """
+ self.name = name
+ self.errmsg = "Expected " + self.name
+ if hasattr(self,"exception"):
+ self.exception.msg = self.errmsg
+ return self
+
+ def setResultsName( self, name, listAllMatches=False ):
+ """
+ Define name for referencing matching tokens as a nested attribute
+ of the returned parse results.
+ NOTE: this returns a *copy* of the original C{ParserElement} object;
+ this is so that the client can define a basic element, such as an
+ integer, and reference it in multiple places with different names.
+
+ You can also set results names using the abbreviated syntax,
+ C{expr("name")} in place of C{expr.setResultsName("name")} -
+ see L{I{__call__}<__call__>}.
+
+ Example::
+ date_str = (integer.setResultsName("year") + '/'
+ + integer.setResultsName("month") + '/'
+ + integer.setResultsName("day"))
+
+ # equivalent form:
+ date_str = integer("year") + '/' + integer("month") + '/' + integer("day")
+ """
+ newself = self.copy()
+ if name.endswith("*"):
+ name = name[:-1]
+ listAllMatches=True
+ newself.resultsName = name
+ newself.modalResults = not listAllMatches
+ return newself
+
+ def setBreak(self,breakFlag = True):
+ """Method to invoke the Python pdb debugger when this element is
+ about to be parsed. Set C{breakFlag} to True to enable, False to
+ disable.
+ """
+ if breakFlag:
+ _parseMethod = self._parse
+ def breaker(instring, loc, doActions=True, callPreParse=True):
+ import pdb
+ pdb.set_trace()
+ return _parseMethod( instring, loc, doActions, callPreParse )
+ breaker._originalParseMethod = _parseMethod
+ self._parse = breaker
+ else:
+ if hasattr(self._parse,"_originalParseMethod"):
+ self._parse = self._parse._originalParseMethod
+ return self
+
+ def setParseAction( self, *fns, **kwargs ):
+ """
+ Define one or more actions to perform when successfully matching parse element definition.
+ Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)},
+ C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where:
+ - s = the original string being parsed (see note below)
+ - loc = the location of the matching substring
+ - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object
+ If the functions in fns modify the tokens, they can return them as the return
+ value from fn, and the modified list of tokens will replace the original.
+ Otherwise, fn does not need to return any value.
+
+ Optional keyword arguments:
+ - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing
+
+ Note: the default parsing behavior is to expand tabs in the input string
+ before starting the parsing process. See L{I{parseString}<parseString>} for more information
+ on parsing strings containing C{<TAB>}s, and suggested methods to maintain a
+ consistent view of the parsed string, the parse location, and line and column
+ positions within the parsed string.
+
+ Example::
+ integer = Word(nums)
+ date_str = integer + '/' + integer + '/' + integer
+
+ date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31']
+
+ # use parse action to convert to ints at parse time
+ integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+ date_str = integer + '/' + integer + '/' + integer
+
+ # note that integer fields are now ints, not strings
+ date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31]
+ """
+ self.parseAction = list(map(_trim_arity, list(fns)))
+ self.callDuringTry = kwargs.get("callDuringTry", False)
+ return self
+
+ def addParseAction( self, *fns, **kwargs ):
+ """
+ Add one or more parse actions to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}.
+
+ See examples in L{I{copy}<copy>}.
+ """
+ self.parseAction += list(map(_trim_arity, list(fns)))
+ self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
+ return self
+
+ def addCondition(self, *fns, **kwargs):
+ """Add a boolean predicate function to expression's list of parse actions. See
+ L{I{setParseAction}<setParseAction>} for function call signatures. Unlike C{setParseAction},
+ functions passed to C{addCondition} need to return boolean success/fail of the condition.
+
+ Optional keyword arguments:
+ - message = define a custom message to be used in the raised exception
+ - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException
+
+ Example::
+ integer = Word(nums).setParseAction(lambda toks: int(toks[0]))
+ year_int = integer.copy()
+ year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later")
+ date_str = year_int + '/' + integer + '/' + integer
+
+ result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1)
+ """
+ msg = kwargs.get("message", "failed user-defined condition")
+ exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException
+ for fn in fns:
+ def pa(s,l,t):
+ if not bool(_trim_arity(fn)(s,l,t)):
+ raise exc_type(s,l,msg)
+ self.parseAction.append(pa)
+ self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False)
+ return self
+
+ def setFailAction( self, fn ):
+ """Define action to perform if parsing fails at this expression.
+ Fail acton fn is a callable function that takes the arguments
+ C{fn(s,loc,expr,err)} where:
+ - s = string being parsed
+ - loc = location where expression match was attempted and failed
+ - expr = the parse expression that failed
+ - err = the exception thrown
+ The function returns no value. It may throw C{L{ParseFatalException}}
+ if it is desired to stop parsing immediately."""
+ self.failAction = fn
+ return self
+
+ def _skipIgnorables( self, instring, loc ):
+ exprsFound = True
+ while exprsFound:
+ exprsFound = False
+ for e in self.ignoreExprs:
+ try:
+ while 1:
+ loc,dummy = e._parse( instring, loc )
+ exprsFound = True
+ except ParseException:
+ pass
+ return loc
+
+ def preParse( self, instring, loc ):
+ if self.ignoreExprs:
+ loc = self._skipIgnorables( instring, loc )
+
+ if self.skipWhitespace:
+ wt = self.whiteChars
+ instrlen = len(instring)
+ while loc < instrlen and instring[loc] in wt:
+ loc += 1
+
+ return loc
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ return loc, []
+
+ def postParse( self, instring, loc, tokenlist ):
+ return tokenlist
+
+ #~ @profile
+ def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ):
+ debugging = ( self.debug ) #and doActions )
+
+ if debugging or self.failAction:
+ #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))
+ if (self.debugActions[0] ):
+ self.debugActions[0]( instring, loc, self )
+ if callPreParse and self.callPreparse:
+ preloc = self.preParse( instring, loc )
+ else:
+ preloc = loc
+ tokensStart = preloc
+ try:
+ try:
+ loc,tokens = self.parseImpl( instring, preloc, doActions )
+ except IndexError:
+ raise ParseException( instring, len(instring), self.errmsg, self )
+ except ParseBaseException as err:
+ #~ print ("Exception raised:", err)
+ if self.debugActions[2]:
+ self.debugActions[2]( instring, tokensStart, self, err )
+ if self.failAction:
+ self.failAction( instring, tokensStart, self, err )
+ raise
+ else:
+ if callPreParse and self.callPreparse:
+ preloc = self.preParse( instring, loc )
+ else:
+ preloc = loc
+ tokensStart = preloc
+ if self.mayIndexError or preloc >= len(instring):
+ try:
+ loc,tokens = self.parseImpl( instring, preloc, doActions )
+ except IndexError:
+ raise ParseException( instring, len(instring), self.errmsg, self )
+ else:
+ loc,tokens = self.parseImpl( instring, preloc, doActions )
+
+ tokens = self.postParse( instring, loc, tokens )
+
+ retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults )
+ if self.parseAction and (doActions or self.callDuringTry):
+ if debugging:
+ try:
+ for fn in self.parseAction:
+ tokens = fn( instring, tokensStart, retTokens )
+ if tokens is not None:
+ retTokens = ParseResults( tokens,
+ self.resultsName,
+ asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
+ modal=self.modalResults )
+ except ParseBaseException as err:
+ #~ print "Exception raised in user parse action:", err
+ if (self.debugActions[2] ):
+ self.debugActions[2]( instring, tokensStart, self, err )
+ raise
+ else:
+ for fn in self.parseAction:
+ tokens = fn( instring, tokensStart, retTokens )
+ if tokens is not None:
+ retTokens = ParseResults( tokens,
+ self.resultsName,
+ asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
+ modal=self.modalResults )
+ if debugging:
+ #~ print ("Matched",self,"->",retTokens.asList())
+ if (self.debugActions[1] ):
+ self.debugActions[1]( instring, tokensStart, loc, self, retTokens )
+
+ return loc, retTokens
+
+ def tryParse( self, instring, loc ):
+ try:
+ return self._parse( instring, loc, doActions=False )[0]
+ except ParseFatalException:
+ raise ParseException( instring, loc, self.errmsg, self)
+
+ def canParseNext(self, instring, loc):
+ try:
+ self.tryParse(instring, loc)
+ except (ParseException, IndexError):
+ return False
+ else:
+ return True
+
+ class _UnboundedCache(object):
+ def __init__(self):
+ cache = {}
+ self.not_in_cache = not_in_cache = object()
+
+ def get(self, key):
+ return cache.get(key, not_in_cache)
+
+ def set(self, key, value):
+ cache[key] = value
+
+ def clear(self):
+ cache.clear()
+
+ def cache_len(self):
+ return len(cache)
+
+ self.get = types.MethodType(get, self)
+ self.set = types.MethodType(set, self)
+ self.clear = types.MethodType(clear, self)
+ self.__len__ = types.MethodType(cache_len, self)
+
+ if _OrderedDict is not None:
+ class _FifoCache(object):
+ def __init__(self, size):
+ self.not_in_cache = not_in_cache = object()
+
+ cache = _OrderedDict()
+
+ def get(self, key):
+ return cache.get(key, not_in_cache)
+
+ def set(self, key, value):
+ cache[key] = value
+ while len(cache) > size:
+ try:
+ cache.popitem(False)
+ except KeyError:
+ pass
+
+ def clear(self):
+ cache.clear()
+
+ def cache_len(self):
+ return len(cache)
+
+ self.get = types.MethodType(get, self)
+ self.set = types.MethodType(set, self)
+ self.clear = types.MethodType(clear, self)
+ self.__len__ = types.MethodType(cache_len, self)
+
+ else:
+ class _FifoCache(object):
+ def __init__(self, size):
+ self.not_in_cache = not_in_cache = object()
+
+ cache = {}
+ key_fifo = collections.deque([], size)
+
+ def get(self, key):
+ return cache.get(key, not_in_cache)
+
+ def set(self, key, value):
+ cache[key] = value
+ while len(key_fifo) > size:
+ cache.pop(key_fifo.popleft(), None)
+ key_fifo.append(key)
+
+ def clear(self):
+ cache.clear()
+ key_fifo.clear()
+
+ def cache_len(self):
+ return len(cache)
+
+ self.get = types.MethodType(get, self)
+ self.set = types.MethodType(set, self)
+ self.clear = types.MethodType(clear, self)
+ self.__len__ = types.MethodType(cache_len, self)
+
+ # argument cache for optimizing repeated calls when backtracking through recursive expressions
+ packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail
+ packrat_cache_lock = RLock()
+ packrat_cache_stats = [0, 0]
+
+ # this method gets repeatedly called during backtracking with the same arguments -
+ # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression
+ def _parseCache( self, instring, loc, doActions=True, callPreParse=True ):
+ HIT, MISS = 0, 1
+ lookup = (self, instring, loc, callPreParse, doActions)
+ with ParserElement.packrat_cache_lock:
+ cache = ParserElement.packrat_cache
+ value = cache.get(lookup)
+ if value is cache.not_in_cache:
+ ParserElement.packrat_cache_stats[MISS] += 1
+ try:
+ value = self._parseNoCache(instring, loc, doActions, callPreParse)
+ except ParseBaseException as pe:
+ # cache a copy of the exception, without the traceback
+ cache.set(lookup, pe.__class__(*pe.args))
+ raise
+ else:
+ cache.set(lookup, (value[0], value[1].copy()))
+ return value
+ else:
+ ParserElement.packrat_cache_stats[HIT] += 1
+ if isinstance(value, Exception):
+ raise value
+ return (value[0], value[1].copy())
+
+ _parse = _parseNoCache
+
+ @staticmethod
+ def resetCache():
+ ParserElement.packrat_cache.clear()
+ ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats)
+
+ _packratEnabled = False
+ @staticmethod
+ def enablePackrat(cache_size_limit=128):
+ """Enables "packrat" parsing, which adds memoizing to the parsing logic.
+ Repeated parse attempts at the same string location (which happens
+ often in many complex grammars) can immediately return a cached value,
+ instead of re-executing parsing/validating code. Memoizing is done of
+ both valid results and parsing exceptions.
+
+ Parameters:
+ - cache_size_limit - (default=C{128}) - if an integer value is provided
+ will limit the size of the packrat cache; if None is passed, then
+ the cache size will be unbounded; if 0 is passed, the cache will
+ be effectively disabled.
+
+ This speedup may break existing programs that use parse actions that
+ have side-effects. For this reason, packrat parsing is disabled when
+ you first import pyparsing. To activate the packrat feature, your
+ program must call the class method C{ParserElement.enablePackrat()}. If
+ your program uses C{psyco} to "compile as you go", you must call
+ C{enablePackrat} before calling C{psyco.full()}. If you do not do this,
+ Python will crash. For best results, call C{enablePackrat()} immediately
+ after importing pyparsing.
+
+ Example::
+ import pyparsing
+ pyparsing.ParserElement.enablePackrat()
+ """
+ if not ParserElement._packratEnabled:
+ ParserElement._packratEnabled = True
+ if cache_size_limit is None:
+ ParserElement.packrat_cache = ParserElement._UnboundedCache()
+ else:
+ ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit)
+ ParserElement._parse = ParserElement._parseCache
+
+ def parseString( self, instring, parseAll=False ):
+ """
+ Execute the parse expression with the given string.
+ This is the main interface to the client code, once the complete
+ expression has been built.
+
+ If you want the grammar to require that the entire input string be
+ successfully parsed, then set C{parseAll} to True (equivalent to ending
+ the grammar with C{L{StringEnd()}}).
+
+ Note: C{parseString} implicitly calls C{expandtabs()} on the input string,
+ in order to report proper column numbers in parse actions.
+ If the input string contains tabs and
+ the grammar uses parse actions that use the C{loc} argument to index into the
+ string being parsed, you can ensure you have a consistent view of the input
+ string by:
+ - calling C{parseWithTabs} on your grammar before calling C{parseString}
+ (see L{I{parseWithTabs}<parseWithTabs>})
+ - define your parse action using the full C{(s,loc,toks)} signature, and
+ reference the input string using the parse action's C{s} argument
+ - explictly expand the tabs in your input string before calling
+ C{parseString}
+
+ Example::
+ Word('a').parseString('aaaaabaaa') # -> ['aaaaa']
+ Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text
+ """
+ ParserElement.resetCache()
+ if not self.streamlined:
+ self.streamline()
+ #~ self.saveAsList = True
+ for e in self.ignoreExprs:
+ e.streamline()
+ if not self.keepTabs:
+ instring = instring.expandtabs()
+ try:
+ loc, tokens = self._parse( instring, 0 )
+ if parseAll:
+ loc = self.preParse( instring, loc )
+ se = Empty() + StringEnd()
+ se._parse( instring, loc )
+ except ParseBaseException as exc:
+ if ParserElement.verbose_stacktrace:
+ raise
+ else:
+ # catch and re-raise exception from here, clears out pyparsing internal stack trace
+ raise exc
+ else:
+ return tokens
+
+ def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ):
+ """
+ Scan the input string for expression matches. Each match will return the
+ matching tokens, start location, and end location. May be called with optional
+ C{maxMatches} argument, to clip scanning after 'n' matches are found. If
+ C{overlap} is specified, then overlapping matches will be reported.
+
+ Note that the start and end locations are reported relative to the string
+ being parsed. See L{I{parseString}<parseString>} for more information on parsing
+ strings with embedded tabs.
+
+ Example::
+ source = "sldjf123lsdjjkf345sldkjf879lkjsfd987"
+ print(source)
+ for tokens,start,end in Word(alphas).scanString(source):
+ print(' '*start + '^'*(end-start))
+ print(' '*start + tokens[0])
+
+ prints::
+
+ sldjf123lsdjjkf345sldkjf879lkjsfd987
+ ^^^^^
+ sldjf
+ ^^^^^^^
+ lsdjjkf
+ ^^^^^^
+ sldkjf
+ ^^^^^^
+ lkjsfd
+ """
+ if not self.streamlined:
+ self.streamline()
+ for e in self.ignoreExprs:
+ e.streamline()
+
+ if not self.keepTabs:
+ instring = _ustr(instring).expandtabs()
+ instrlen = len(instring)
+ loc = 0
+ preparseFn = self.preParse
+ parseFn = self._parse
+ ParserElement.resetCache()
+ matches = 0
+ try:
+ while loc <= instrlen and matches < maxMatches:
+ try:
+ preloc = preparseFn( instring, loc )
+ nextLoc,tokens = parseFn( instring, preloc, callPreParse=False )
+ except ParseException:
+ loc = preloc+1
+ else:
+ if nextLoc > loc:
+ matches += 1
+ yield tokens, preloc, nextLoc
+ if overlap:
+ nextloc = preparseFn( instring, loc )
+ if nextloc > loc:
+ loc = nextLoc
+ else:
+ loc += 1
+ else:
+ loc = nextLoc
+ else:
+ loc = preloc+1
+ except ParseBaseException as exc:
+ if ParserElement.verbose_stacktrace:
+ raise
+ else:
+ # catch and re-raise exception from here, clears out pyparsing internal stack trace
+ raise exc
+
+ def transformString( self, instring ):
+ """
+ Extension to C{L{scanString}}, to modify matching text with modified tokens that may
+ be returned from a parse action. To use C{transformString}, define a grammar and
+ attach a parse action to it that modifies the returned token list.
+ Invoking C{transformString()} on a target string will then scan for matches,
+ and replace the matched text patterns according to the logic in the parse
+ action. C{transformString()} returns the resulting transformed string.
+
+ Example::
+ wd = Word(alphas)
+ wd.setParseAction(lambda toks: toks[0].title())
+
+ print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york."))
+ Prints::
+ Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York.
+ """
+ out = []
+ lastE = 0
+ # force preservation of <TAB>s, to minimize unwanted transformation of string, and to
+ # keep string locs straight between transformString and scanString
+ self.keepTabs = True
+ try:
+ for t,s,e in self.scanString( instring ):
+ out.append( instring[lastE:s] )
+ if t:
+ if isinstance(t,ParseResults):
+ out += t.asList()
+ elif isinstance(t,list):
+ out += t
+ else:
+ out.append(t)
+ lastE = e
+ out.append(instring[lastE:])
+ out = [o for o in out if o]
+ return "".join(map(_ustr,_flatten(out)))
+ except ParseBaseException as exc:
+ if ParserElement.verbose_stacktrace:
+ raise
+ else:
+ # catch and re-raise exception from here, clears out pyparsing internal stack trace
+ raise exc
+
+ def searchString( self, instring, maxMatches=_MAX_INT ):
+ """
+ Another extension to C{L{scanString}}, simplifying the access to the tokens found
+ to match the given parse expression. May be called with optional
+ C{maxMatches} argument, to clip searching after 'n' matches are found.
+
+ Example::
+ # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters
+ cap_word = Word(alphas.upper(), alphas.lower())
+
+ print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))
+
+ # the sum() builtin can be used to merge results into a single ParseResults object
+ print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")))
+ prints::
+ [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']]
+ ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity']
+ """
+ try:
+ return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ])
+ except ParseBaseException as exc:
+ if ParserElement.verbose_stacktrace:
+ raise
+ else:
+ # catch and re-raise exception from here, clears out pyparsing internal stack trace
+ raise exc
+
+ def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False):
+ """
+ Generator method to split a string using the given expression as a separator.
+ May be called with optional C{maxsplit} argument, to limit the number of splits;
+ and the optional C{includeSeparators} argument (default=C{False}), if the separating
+ matching text should be included in the split results.
+
+ Example::
+ punc = oneOf(list(".,;:/-!?"))
+ print(list(punc.split("This, this?, this sentence, is badly punctuated!")))
+ prints::
+ ['This', ' this', '', ' this sentence', ' is badly punctuated', '']
+ """
+ splits = 0
+ last = 0
+ for t,s,e in self.scanString(instring, maxMatches=maxsplit):
+ yield instring[last:s]
+ if includeSeparators:
+ yield t[0]
+ last = e
+ yield instring[last:]
+
+ def __add__(self, other ):
+ """
+ Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement
+ converts them to L{Literal}s by default.
+
+ Example::
+ greet = Word(alphas) + "," + Word(alphas) + "!"
+ hello = "Hello, World!"
+ print (hello, "->", greet.parseString(hello))
+ Prints::
+ Hello, World! -> ['Hello', ',', 'World', '!']
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return And( [ self, other ] )
+
+ def __radd__(self, other ):
+ """
+ Implementation of + operator when left operand is not a C{L{ParserElement}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return other + self
+
+ def __sub__(self, other):
+ """
+ Implementation of - operator, returns C{L{And}} with error stop
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return self + And._ErrorStop() + other
+
+ def __rsub__(self, other ):
+ """
+ Implementation of - operator when left operand is not a C{L{ParserElement}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return other - self
+
+ def __mul__(self,other):
+ """
+ Implementation of * operator, allows use of C{expr * 3} in place of
+ C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer
+ tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples
+ may also include C{None} as in:
+ - C{expr*(n,None)} or C{expr*(n,)} is equivalent
+ to C{expr*n + L{ZeroOrMore}(expr)}
+ (read as "at least n instances of C{expr}")
+ - C{expr*(None,n)} is equivalent to C{expr*(0,n)}
+ (read as "0 to n instances of C{expr}")
+ - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)}
+ - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)}
+
+ Note that C{expr*(None,n)} does not raise an exception if
+ more than n exprs exist in the input stream; that is,
+ C{expr*(None,n)} does not enforce a maximum number of expr
+ occurrences. If this behavior is desired, then write
+ C{expr*(None,n) + ~expr}
+ """
+ if isinstance(other,int):
+ minElements, optElements = other,0
+ elif isinstance(other,tuple):
+ other = (other + (None, None))[:2]
+ if other[0] is None:
+ other = (0, other[1])
+ if isinstance(other[0],int) and other[1] is None:
+ if other[0] == 0:
+ return ZeroOrMore(self)
+ if other[0] == 1:
+ return OneOrMore(self)
+ else:
+ return self*other[0] + ZeroOrMore(self)
+ elif isinstance(other[0],int) and isinstance(other[1],int):
+ minElements, optElements = other
+ optElements -= minElements
+ else:
+ raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1]))
+ else:
+ raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other))
+
+ if minElements < 0:
+ raise ValueError("cannot multiply ParserElement by negative value")
+ if optElements < 0:
+ raise ValueError("second tuple value must be greater or equal to first tuple value")
+ if minElements == optElements == 0:
+ raise ValueError("cannot multiply ParserElement by 0 or (0,0)")
+
+ if (optElements):
+ def makeOptionalList(n):
+ if n>1:
+ return Optional(self + makeOptionalList(n-1))
+ else:
+ return Optional(self)
+ if minElements:
+ if minElements == 1:
+ ret = self + makeOptionalList(optElements)
+ else:
+ ret = And([self]*minElements) + makeOptionalList(optElements)
+ else:
+ ret = makeOptionalList(optElements)
+ else:
+ if minElements == 1:
+ ret = self
+ else:
+ ret = And([self]*minElements)
+ return ret
+
+ def __rmul__(self, other):
+ return self.__mul__(other)
+
+ def __or__(self, other ):
+ """
+ Implementation of | operator - returns C{L{MatchFirst}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return MatchFirst( [ self, other ] )
+
+ def __ror__(self, other ):
+ """
+ Implementation of | operator when left operand is not a C{L{ParserElement}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return other | self
+
+ def __xor__(self, other ):
+ """
+ Implementation of ^ operator - returns C{L{Or}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return Or( [ self, other ] )
+
+ def __rxor__(self, other ):
+ """
+ Implementation of ^ operator when left operand is not a C{L{ParserElement}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return other ^ self
+
+ def __and__(self, other ):
+ """
+ Implementation of & operator - returns C{L{Each}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return Each( [ self, other ] )
+
+ def __rand__(self, other ):
+ """
+ Implementation of & operator when left operand is not a C{L{ParserElement}}
+ """
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ if not isinstance( other, ParserElement ):
+ warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
+ SyntaxWarning, stacklevel=2)
+ return None
+ return other & self
+
+ def __invert__( self ):
+ """
+ Implementation of ~ operator - returns C{L{NotAny}}
+ """
+ return NotAny( self )
+
+ def __call__(self, name=None):
+ """
+ Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}.
+
+ If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be
+ passed as C{True}.
+
+ If C{name} is omitted, same as calling C{L{copy}}.
+
+ Example::
+ # these are equivalent
+ userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno")
+ userdata = Word(alphas)("name") + Word(nums+"-")("socsecno")
+ """
+ if name is not None:
+ return self.setResultsName(name)
+ else:
+ return self.copy()
+
+ def suppress( self ):
+ """
+ Suppresses the output of this C{ParserElement}; useful to keep punctuation from
+ cluttering up returned output.
+ """
+ return Suppress( self )
+
+ def leaveWhitespace( self ):
+ """
+ Disables the skipping of whitespace before matching the characters in the
+ C{ParserElement}'s defined pattern. This is normally only used internally by
+ the pyparsing module, but may be needed in some whitespace-sensitive grammars.
+ """
+ self.skipWhitespace = False
+ return self
+
+ def setWhitespaceChars( self, chars ):
+ """
+ Overrides the default whitespace chars
+ """
+ self.skipWhitespace = True
+ self.whiteChars = chars
+ self.copyDefaultWhiteChars = False
+ return self
+
+ def parseWithTabs( self ):
+ """
+ Overrides default behavior to expand C{<TAB>}s to spaces before parsing the input string.
+ Must be called before C{parseString} when the input grammar contains elements that
+ match C{<TAB>} characters.
+ """
+ self.keepTabs = True
+ return self
+
+ def ignore( self, other ):
+ """
+ Define expression to be ignored (e.g., comments) while doing pattern
+ matching; may be called repeatedly, to define multiple comment or other
+ ignorable patterns.
+
+ Example::
+ patt = OneOrMore(Word(alphas))
+ patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj']
+
+ patt.ignore(cStyleComment)
+ patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd']
+ """
+ if isinstance(other, basestring):
+ other = Suppress(other)
+
+ if isinstance( other, Suppress ):
+ if other not in self.ignoreExprs:
+ self.ignoreExprs.append(other)
+ else:
+ self.ignoreExprs.append( Suppress( other.copy() ) )
+ return self
+
+ def setDebugActions( self, startAction, successAction, exceptionAction ):
+ """
+ Enable display of debugging messages while doing pattern matching.
+ """
+ self.debugActions = (startAction or _defaultStartDebugAction,
+ successAction or _defaultSuccessDebugAction,
+ exceptionAction or _defaultExceptionDebugAction)
+ self.debug = True
+ return self
+
+ def setDebug( self, flag=True ):
+ """
+ Enable display of debugging messages while doing pattern matching.
+ Set C{flag} to True to enable, False to disable.
+
+ Example::
+ wd = Word(alphas).setName("alphaword")
+ integer = Word(nums).setName("numword")
+ term = wd | integer
+
+ # turn on debugging for wd
+ wd.setDebug()
+
+ OneOrMore(term).parseString("abc 123 xyz 890")
+
+ prints::
+ Match alphaword at loc 0(1,1)
+ Matched alphaword -> ['abc']
+ Match alphaword at loc 3(1,4)
+ Exception raised:Expected alphaword (at char 4), (line:1, col:5)
+ Match alphaword at loc 7(1,8)
+ Matched alphaword -> ['xyz']
+ Match alphaword at loc 11(1,12)
+ Exception raised:Expected alphaword (at char 12), (line:1, col:13)
+ Match alphaword at loc 15(1,16)
+ Exception raised:Expected alphaword (at char 15), (line:1, col:16)
+
+ The output shown is that produced by the default debug actions - custom debug actions can be
+ specified using L{setDebugActions}. Prior to attempting
+ to match the C{wd} expression, the debugging message C{"Match <exprname> at loc <n>(<line>,<col>)"}
+ is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"}
+ message is shown. Also note the use of L{setName} to assign a human-readable name to the expression,
+ which makes debugging and exception messages easier to understand - for instance, the default
+ name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}.
+ """
+ if flag:
+ self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction )
+ else:
+ self.debug = False
+ return self
+
+ def __str__( self ):
+ return self.name
+
+ def __repr__( self ):
+ return _ustr(self)
+
+ def streamline( self ):
+ self.streamlined = True
+ self.strRepr = None
+ return self
+
+ def checkRecursion( self, parseElementList ):
+ pass
+
+ def validate( self, validateTrace=[] ):
+ """
+ Check defined expressions for valid structure, check for infinite recursive definitions.
+ """
+ self.checkRecursion( [] )
+
+ def parseFile( self, file_or_filename, parseAll=False ):
+ """
+ Execute the parse expression on the given file or filename.
+ If a filename is specified (instead of a file object),
+ the entire file is opened, read, and closed before parsing.
+ """
+ try:
+ file_contents = file_or_filename.read()
+ except AttributeError:
+ with open(file_or_filename, "r") as f:
+ file_contents = f.read()
+ try:
+ return self.parseString(file_contents, parseAll)
+ except ParseBaseException as exc:
+ if ParserElement.verbose_stacktrace:
+ raise
+ else:
+ # catch and re-raise exception from here, clears out pyparsing internal stack trace
+ raise exc
+
+ def __eq__(self,other):
+ if isinstance(other, ParserElement):
+ return self is other or vars(self) == vars(other)
+ elif isinstance(other, basestring):
+ return self.matches(other)
+ else:
+ return super(ParserElement,self)==other
+
+ def __ne__(self,other):
+ return not (self == other)
+
+ def __hash__(self):
+ return hash(id(self))
+
+ def __req__(self,other):
+ return self == other
+
+ def __rne__(self,other):
+ return not (self == other)
+
+ def matches(self, testString, parseAll=True):
+ """
+ Method for quick testing of a parser against a test string. Good for simple
+ inline microtests of sub expressions while building up larger parser.
+
+ Parameters:
+ - testString - to test against this expression for a match
+ - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests
+
+ Example::
+ expr = Word(nums)
+ assert expr.matches("100")
+ """
+ try:
+ self.parseString(_ustr(testString), parseAll=parseAll)
+ return True
+ except ParseBaseException:
+ return False
+
+ def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False):
+ """
+ Execute the parse expression on a series of test strings, showing each
+ test, the parsed results or where the parse failed. Quick and easy way to
+ run a parse expression against a list of sample strings.
+
+ Parameters:
+ - tests - a list of separate test strings, or a multiline string of test strings
+ - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests
+ - comment - (default=C{'#'}) - expression for indicating embedded comments in the test
+ string; pass None to disable comment filtering
+ - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline;
+ if False, only dump nested list
+ - printResults - (default=C{True}) prints test output to stdout
+ - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing
+
+ Returns: a (success, results) tuple, where success indicates that all tests succeeded
+ (or failed if C{failureTests} is True), and the results contain a list of lines of each
+ test's output
+
+ Example::
+ number_expr = pyparsing_common.number.copy()
+
+ result = number_expr.runTests('''
+ # unsigned integer
+ 100
+ # negative integer
+ -100
+ # float with scientific notation
+ 6.02e23
+ # integer with scientific notation
+ 1e-12
+ ''')
+ print("Success" if result[0] else "Failed!")
+
+ result = number_expr.runTests('''
+ # stray character
+ 100Z
+ # missing leading digit before '.'
+ -.100
+ # too many '.'
+ 3.14.159
+ ''', failureTests=True)
+ print("Success" if result[0] else "Failed!")
+ prints::
+ # unsigned integer
+ 100
+ [100]
+
+ # negative integer
+ -100
+ [-100]
+
+ # float with scientific notation
+ 6.02e23
+ [6.02e+23]
+
+ # integer with scientific notation
+ 1e-12
+ [1e-12]
+
+ Success
+
+ # stray character
+ 100Z
+ ^
+ FAIL: Expected end of text (at char 3), (line:1, col:4)
+
+ # missing leading digit before '.'
+ -.100
+ ^
+ FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1)
+
+ # too many '.'
+ 3.14.159
+ ^
+ FAIL: Expected end of text (at char 4), (line:1, col:5)
+
+ Success
+
+ Each test string must be on a single line. If you want to test a string that spans multiple
+ lines, create a test like this::
+
+ expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines")
+
+ (Note that this is a raw string literal, you must include the leading 'r'.)
+ """
+ if isinstance(tests, basestring):
+ tests = list(map(str.strip, tests.rstrip().splitlines()))
+ if isinstance(comment, basestring):
+ comment = Literal(comment)
+ allResults = []
+ comments = []
+ success = True
+ for t in tests:
+ if comment is not None and comment.matches(t, False) or comments and not t:
+ comments.append(t)
+ continue
+ if not t:
+ continue
+ out = ['\n'.join(comments), t]
+ comments = []
+ try:
+ t = t.replace(r'\n','\n')
+ result = self.parseString(t, parseAll=parseAll)
+ out.append(result.dump(full=fullDump))
+ success = success and not failureTests
+ except ParseBaseException as pe:
+ fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else ""
+ if '\n' in t:
+ out.append(line(pe.loc, t))
+ out.append(' '*(col(pe.loc,t)-1) + '^' + fatal)
+ else:
+ out.append(' '*pe.loc + '^' + fatal)
+ out.append("FAIL: " + str(pe))
+ success = success and failureTests
+ result = pe
+ except Exception as exc:
+ out.append("FAIL-EXCEPTION: " + str(exc))
+ success = success and failureTests
+ result = exc
+
+ if printResults:
+ if fullDump:
+ out.append('')
+ print('\n'.join(out))
+
+ allResults.append((t, result))
+
+ return success, allResults
+
+
+class Token(ParserElement):
+ """
+ Abstract C{ParserElement} subclass, for defining atomic matching patterns.
+ """
+ def __init__( self ):
+ super(Token,self).__init__( savelist=False )
+
+
+class Empty(Token):
+ """
+ An empty token, will always match.
+ """
+ def __init__( self ):
+ super(Empty,self).__init__()
+ self.name = "Empty"
+ self.mayReturnEmpty = True
+ self.mayIndexError = False
+
+
+class NoMatch(Token):
+ """
+ A token that will never match.
+ """
+ def __init__( self ):
+ super(NoMatch,self).__init__()
+ self.name = "NoMatch"
+ self.mayReturnEmpty = True
+ self.mayIndexError = False
+ self.errmsg = "Unmatchable token"
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ raise ParseException(instring, loc, self.errmsg, self)
+
+
+class Literal(Token):
+ """
+ Token to exactly match a specified string.
+
+ Example::
+ Literal('blah').parseString('blah') # -> ['blah']
+ Literal('blah').parseString('blahfooblah') # -> ['blah']
+ Literal('blah').parseString('bla') # -> Exception: Expected "blah"
+
+ For case-insensitive matching, use L{CaselessLiteral}.
+
+ For keyword matching (force word break before and after the matched string),
+ use L{Keyword} or L{CaselessKeyword}.
+ """
+ def __init__( self, matchString ):
+ super(Literal,self).__init__()
+ self.match = matchString
+ self.matchLen = len(matchString)
+ try:
+ self.firstMatchChar = matchString[0]
+ except IndexError:
+ warnings.warn("null string passed to Literal; use Empty() instead",
+ SyntaxWarning, stacklevel=2)
+ self.__class__ = Empty
+ self.name = '"%s"' % _ustr(self.match)
+ self.errmsg = "Expected " + self.name
+ self.mayReturnEmpty = False
+ self.mayIndexError = False
+
+ # Performance tuning: this routine gets called a *lot*
+ # if this is a single character match string and the first character matches,
+ # short-circuit as quickly as possible, and avoid calling startswith
+ #~ @profile
+ def parseImpl( self, instring, loc, doActions=True ):
+ if (instring[loc] == self.firstMatchChar and
+ (self.matchLen==1 or instring.startswith(self.match,loc)) ):
+ return loc+self.matchLen, self.match
+ raise ParseException(instring, loc, self.errmsg, self)
+_L = Literal
+ParserElement._literalStringClass = Literal
+
+class Keyword(Token):
+ """
+ Token to exactly match a specified string as a keyword, that is, it must be
+ immediately followed by a non-keyword character. Compare with C{L{Literal}}:
+ - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}.
+ - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'}
+ Accepts two optional constructor arguments in addition to the keyword string:
+ - C{identChars} is a string of characters that would be valid identifier characters,
+ defaulting to all alphanumerics + "_" and "$"
+ - C{caseless} allows case-insensitive matching, default is C{False}.
+
+ Example::
+ Keyword("start").parseString("start") # -> ['start']
+ Keyword("start").parseString("starting") # -> Exception
+
+ For case-insensitive matching, use L{CaselessKeyword}.
+ """
+ DEFAULT_KEYWORD_CHARS = alphanums+"_$"
+
+ def __init__( self, matchString, identChars=None, caseless=False ):
+ super(Keyword,self).__init__()
+ if identChars is None:
+ identChars = Keyword.DEFAULT_KEYWORD_CHARS
+ self.match = matchString
+ self.matchLen = len(matchString)
+ try:
+ self.firstMatchChar = matchString[0]
+ except IndexError:
+ warnings.warn("null string passed to Keyword; use Empty() instead",
+ SyntaxWarning, stacklevel=2)
+ self.name = '"%s"' % self.match
+ self.errmsg = "Expected " + self.name
+ self.mayReturnEmpty = False
+ self.mayIndexError = False
+ self.caseless = caseless
+ if caseless:
+ self.caselessmatch = matchString.upper()
+ identChars = identChars.upper()
+ self.identChars = set(identChars)
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if self.caseless:
+ if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
+ (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and
+ (loc == 0 or instring[loc-1].upper() not in self.identChars) ):
+ return loc+self.matchLen, self.match
+ else:
+ if (instring[loc] == self.firstMatchChar and
+ (self.matchLen==1 or instring.startswith(self.match,loc)) and
+ (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and
+ (loc == 0 or instring[loc-1] not in self.identChars) ):
+ return loc+self.matchLen, self.match
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ def copy(self):
+ c = super(Keyword,self).copy()
+ c.identChars = Keyword.DEFAULT_KEYWORD_CHARS
+ return c
+
+ @staticmethod
+ def setDefaultKeywordChars( chars ):
+ """Overrides the default Keyword chars
+ """
+ Keyword.DEFAULT_KEYWORD_CHARS = chars
+
+class CaselessLiteral(Literal):
+ """
+ Token to match a specified string, ignoring case of letters.
+ Note: the matched results will always be in the case of the given
+ match string, NOT the case of the input text.
+
+ Example::
+ OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD']
+
+ (Contrast with example for L{CaselessKeyword}.)
+ """
+ def __init__( self, matchString ):
+ super(CaselessLiteral,self).__init__( matchString.upper() )
+ # Preserve the defining literal.
+ self.returnString = matchString
+ self.name = "'%s'" % self.returnString
+ self.errmsg = "Expected " + self.name
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if instring[ loc:loc+self.matchLen ].upper() == self.match:
+ return loc+self.matchLen, self.returnString
+ raise ParseException(instring, loc, self.errmsg, self)
+
+class CaselessKeyword(Keyword):
+ """
+ Caseless version of L{Keyword}.
+
+ Example::
+ OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD']
+
+ (Contrast with example for L{CaselessLiteral}.)
+ """
+ def __init__( self, matchString, identChars=None ):
+ super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True )
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and
+ (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ):
+ return loc+self.matchLen, self.match
+ raise ParseException(instring, loc, self.errmsg, self)
+
+class CloseMatch(Token):
+ """
+ A variation on L{Literal} which matches "close" matches, that is,
+ strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters:
+ - C{match_string} - string to be matched
+ - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match
+
+ The results from a successful parse will contain the matched text from the input string and the following named results:
+ - C{mismatches} - a list of the positions within the match_string where mismatches were found
+ - C{original} - the original match_string used to compare against the input string
+
+ If C{mismatches} is an empty list, then the match was an exact match.
+
+ Example::
+ patt = CloseMatch("ATCATCGAATGGA")
+ patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']})
+ patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1)
+
+ # exact match
+ patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']})
+
+ # close match allowing up to 2 mismatches
+ patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2)
+ patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']})
+ """
+ def __init__(self, match_string, maxMismatches=1):
+ super(CloseMatch,self).__init__()
+ self.name = match_string
+ self.match_string = match_string
+ self.maxMismatches = maxMismatches
+ self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches)
+ self.mayIndexError = False
+ self.mayReturnEmpty = False
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ start = loc
+ instrlen = len(instring)
+ maxloc = start + len(self.match_string)
+
+ if maxloc <= instrlen:
+ match_string = self.match_string
+ match_stringloc = 0
+ mismatches = []
+ maxMismatches = self.maxMismatches
+
+ for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)):
+ src,mat = s_m
+ if src != mat:
+ mismatches.append(match_stringloc)
+ if len(mismatches) > maxMismatches:
+ break
+ else:
+ loc = match_stringloc + 1
+ results = ParseResults([instring[start:loc]])
+ results['original'] = self.match_string
+ results['mismatches'] = mismatches
+ return loc, results
+
+ raise ParseException(instring, loc, self.errmsg, self)
+
+
+class Word(Token):
+ """
+ Token for matching words composed of allowed character sets.
+ Defined with string containing all allowed initial characters,
+ an optional string containing allowed body characters (if omitted,
+ defaults to the initial character set), and an optional minimum,
+ maximum, and/or exact length. The default value for C{min} is 1 (a
+ minimum value < 1 is not valid); the default values for C{max} and C{exact}
+ are 0, meaning no maximum or exact length restriction. An optional
+ C{excludeChars} parameter can list characters that might be found in
+ the input C{bodyChars} string; useful to define a word of all printables
+ except for one or two characters, for instance.
+
+ L{srange} is useful for defining custom character set strings for defining
+ C{Word} expressions, using range notation from regular expression character sets.
+
+ A common mistake is to use C{Word} to match a specific literal string, as in
+ C{Word("Address")}. Remember that C{Word} uses the string argument to define
+ I{sets} of matchable characters. This expression would match "Add", "AAA",
+ "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'.
+ To match an exact literal string, use L{Literal} or L{Keyword}.
+
+ pyparsing includes helper strings for building Words:
+ - L{alphas}
+ - L{nums}
+ - L{alphanums}
+ - L{hexnums}
+ - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.)
+ - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.)
+ - L{printables} (any non-whitespace character)
+
+ Example::
+ # a word composed of digits
+ integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9"))
+
+ # a word with a leading capital, and zero or more lowercase
+ capital_word = Word(alphas.upper(), alphas.lower())
+
+ # hostnames are alphanumeric, with leading alpha, and '-'
+ hostname = Word(alphas, alphanums+'-')
+
+ # roman numeral (not a strict parser, accepts invalid mix of characters)
+ roman = Word("IVXLCDM")
+
+ # any string of non-whitespace characters, except for ','
+ csv_value = Word(printables, excludeChars=",")
+ """
+ def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ):
+ super(Word,self).__init__()
+ if excludeChars:
+ initChars = ''.join(c for c in initChars if c not in excludeChars)
+ if bodyChars:
+ bodyChars = ''.join(c for c in bodyChars if c not in excludeChars)
+ self.initCharsOrig = initChars
+ self.initChars = set(initChars)
+ if bodyChars :
+ self.bodyCharsOrig = bodyChars
+ self.bodyChars = set(bodyChars)
+ else:
+ self.bodyCharsOrig = initChars
+ self.bodyChars = set(initChars)
+
+ self.maxSpecified = max > 0
+
+ if min < 1:
+ raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted")
+
+ self.minLen = min
+
+ if max > 0:
+ self.maxLen = max
+ else:
+ self.maxLen = _MAX_INT
+
+ if exact > 0:
+ self.maxLen = exact
+ self.minLen = exact
+
+ self.name = _ustr(self)
+ self.errmsg = "Expected " + self.name
+ self.mayIndexError = False
+ self.asKeyword = asKeyword
+
+ if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0):
+ if self.bodyCharsOrig == self.initCharsOrig:
+ self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig)
+ elif len(self.initCharsOrig) == 1:
+ self.reString = "%s[%s]*" % \
+ (re.escape(self.initCharsOrig),
+ _escapeRegexRangeChars(self.bodyCharsOrig),)
+ else:
+ self.reString = "[%s][%s]*" % \
+ (_escapeRegexRangeChars(self.initCharsOrig),
+ _escapeRegexRangeChars(self.bodyCharsOrig),)
+ if self.asKeyword:
+ self.reString = r"\b"+self.reString+r"\b"
+ try:
+ self.re = re.compile( self.reString )
+ except Exception:
+ self.re = None
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if self.re:
+ result = self.re.match(instring,loc)
+ if not result:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ loc = result.end()
+ return loc, result.group()
+
+ if not(instring[ loc ] in self.initChars):
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ start = loc
+ loc += 1
+ instrlen = len(instring)
+ bodychars = self.bodyChars
+ maxloc = start + self.maxLen
+ maxloc = min( maxloc, instrlen )
+ while loc < maxloc and instring[loc] in bodychars:
+ loc += 1
+
+ throwException = False
+ if loc - start < self.minLen:
+ throwException = True
+ if self.maxSpecified and loc < instrlen and instring[loc] in bodychars:
+ throwException = True
+ if self.asKeyword:
+ if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars):
+ throwException = True
+
+ if throwException:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ return loc, instring[start:loc]
+
+ def __str__( self ):
+ try:
+ return super(Word,self).__str__()
+ except Exception:
+ pass
+
+
+ if self.strRepr is None:
+
+ def charsAsStr(s):
+ if len(s)>4:
+ return s[:4]+"..."
+ else:
+ return s
+
+ if ( self.initCharsOrig != self.bodyCharsOrig ):
+ self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) )
+ else:
+ self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig)
+
+ return self.strRepr
+
+
+class Regex(Token):
+ r"""
+ Token for matching strings that match a given regular expression.
+ Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module.
+ If the given regex contains named groups (defined using C{(?P<name>...)}), these will be preserved as
+ named parse results.
+
+ Example::
+ realnum = Regex(r"[+-]?\d+\.\d*")
+ date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)')
+ # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression
+ roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})")
+ """
+ compiledREtype = type(re.compile("[A-Z]"))
+ def __init__( self, pattern, flags=0):
+ """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags."""
+ super(Regex,self).__init__()
+
+ if isinstance(pattern, basestring):
+ if not pattern:
+ warnings.warn("null string passed to Regex; use Empty() instead",
+ SyntaxWarning, stacklevel=2)
+
+ self.pattern = pattern
+ self.flags = flags
+
+ try:
+ self.re = re.compile(self.pattern, self.flags)
+ self.reString = self.pattern
+ except sre_constants.error:
+ warnings.warn("invalid pattern (%s) passed to Regex" % pattern,
+ SyntaxWarning, stacklevel=2)
+ raise
+
+ elif isinstance(pattern, Regex.compiledREtype):
+ self.re = pattern
+ self.pattern = \
+ self.reString = str(pattern)
+ self.flags = flags
+
+ else:
+ raise ValueError("Regex may only be constructed with a string or a compiled RE object")
+
+ self.name = _ustr(self)
+ self.errmsg = "Expected " + self.name
+ self.mayIndexError = False
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ result = self.re.match(instring,loc)
+ if not result:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ loc = result.end()
+ d = result.groupdict()
+ ret = ParseResults(result.group())
+ if d:
+ for k in d:
+ ret[k] = d[k]
+ return loc,ret
+
+ def __str__( self ):
+ try:
+ return super(Regex,self).__str__()
+ except Exception:
+ pass
+
+ if self.strRepr is None:
+ self.strRepr = "Re:(%s)" % repr(self.pattern)
+
+ return self.strRepr
+
+
+class QuotedString(Token):
+ r"""
+ Token for matching strings that are delimited by quoting characters.
+
+ Defined with the following parameters:
+ - quoteChar - string of one or more characters defining the quote delimiting string
+ - escChar - character to escape quotes, typically backslash (default=C{None})
+ - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None})
+ - multiline - boolean indicating whether quotes can span multiple lines (default=C{False})
+ - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True})
+ - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar)
+ - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True})
+
+ Example::
+ qs = QuotedString('"')
+ print(qs.searchString('lsjdf "This is the quote" sldjf'))
+ complex_qs = QuotedString('{{', endQuoteChar='}}')
+ print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf'))
+ sql_qs = QuotedString('"', escQuote='""')
+ print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf'))
+ prints::
+ [['This is the quote']]
+ [['This is the "quote"']]
+ [['This is the quote with "embedded" quotes']]
+ """
+ def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True):
+ super(QuotedString,self).__init__()
+
+ # remove white space from quote chars - wont work anyway
+ quoteChar = quoteChar.strip()
+ if not quoteChar:
+ warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
+ raise SyntaxError()
+
+ if endQuoteChar is None:
+ endQuoteChar = quoteChar
+ else:
+ endQuoteChar = endQuoteChar.strip()
+ if not endQuoteChar:
+ warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2)
+ raise SyntaxError()
+
+ self.quoteChar = quoteChar
+ self.quoteCharLen = len(quoteChar)
+ self.firstQuoteChar = quoteChar[0]
+ self.endQuoteChar = endQuoteChar
+ self.endQuoteCharLen = len(endQuoteChar)
+ self.escChar = escChar
+ self.escQuote = escQuote
+ self.unquoteResults = unquoteResults
+ self.convertWhitespaceEscapes = convertWhitespaceEscapes
+
+ if multiline:
+ self.flags = re.MULTILINE | re.DOTALL
+ self.pattern = r'%s(?:[^%s%s]' % \
+ ( re.escape(self.quoteChar),
+ _escapeRegexRangeChars(self.endQuoteChar[0]),
+ (escChar is not None and _escapeRegexRangeChars(escChar) or '') )
+ else:
+ self.flags = 0
+ self.pattern = r'%s(?:[^%s\n\r%s]' % \
+ ( re.escape(self.quoteChar),
+ _escapeRegexRangeChars(self.endQuoteChar[0]),
+ (escChar is not None and _escapeRegexRangeChars(escChar) or '') )
+ if len(self.endQuoteChar) > 1:
+ self.pattern += (
+ '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]),
+ _escapeRegexRangeChars(self.endQuoteChar[i]))
+ for i in range(len(self.endQuoteChar)-1,0,-1)) + ')'
+ )
+ if escQuote:
+ self.pattern += (r'|(?:%s)' % re.escape(escQuote))
+ if escChar:
+ self.pattern += (r'|(?:%s.)' % re.escape(escChar))
+ self.escCharReplacePattern = re.escape(self.escChar)+"(.)"
+ self.pattern += (r')*%s' % re.escape(self.endQuoteChar))
+
+ try:
+ self.re = re.compile(self.pattern, self.flags)
+ self.reString = self.pattern
+ except sre_constants.error:
+ warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern,
+ SyntaxWarning, stacklevel=2)
+ raise
+
+ self.name = _ustr(self)
+ self.errmsg = "Expected " + self.name
+ self.mayIndexError = False
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None
+ if not result:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ loc = result.end()
+ ret = result.group()
+
+ if self.unquoteResults:
+
+ # strip off quotes
+ ret = ret[self.quoteCharLen:-self.endQuoteCharLen]
+
+ if isinstance(ret,basestring):
+ # replace escaped whitespace
+ if '\\' in ret and self.convertWhitespaceEscapes:
+ ws_map = {
+ r'\t' : '\t',
+ r'\n' : '\n',
+ r'\f' : '\f',
+ r'\r' : '\r',
+ }
+ for wslit,wschar in ws_map.items():
+ ret = ret.replace(wslit, wschar)
+
+ # replace escaped characters
+ if self.escChar:
+ ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret)
+
+ # replace escaped quotes
+ if self.escQuote:
+ ret = ret.replace(self.escQuote, self.endQuoteChar)
+
+ return loc, ret
+
+ def __str__( self ):
+ try:
+ return super(QuotedString,self).__str__()
+ except Exception:
+ pass
+
+ if self.strRepr is None:
+ self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar)
+
+ return self.strRepr
+
+
+class CharsNotIn(Token):
+ """
+ Token for matching words composed of characters I{not} in a given set (will
+ include whitespace in matched characters if not listed in the provided exclusion set - see example).
+ Defined with string containing all disallowed characters, and an optional
+ minimum, maximum, and/or exact length. The default value for C{min} is 1 (a
+ minimum value < 1 is not valid); the default values for C{max} and C{exact}
+ are 0, meaning no maximum or exact length restriction.
+
+ Example::
+ # define a comma-separated-value as anything that is not a ','
+ csv_value = CharsNotIn(',')
+ print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213"))
+ prints::
+ ['dkls', 'lsdkjf', 's12 34', '@!#', '213']
+ """
+ def __init__( self, notChars, min=1, max=0, exact=0 ):
+ super(CharsNotIn,self).__init__()
+ self.skipWhitespace = False
+ self.notChars = notChars
+
+ if min < 1:
+ raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted")
+
+ self.minLen = min
+
+ if max > 0:
+ self.maxLen = max
+ else:
+ self.maxLen = _MAX_INT
+
+ if exact > 0:
+ self.maxLen = exact
+ self.minLen = exact
+
+ self.name = _ustr(self)
+ self.errmsg = "Expected " + self.name
+ self.mayReturnEmpty = ( self.minLen == 0 )
+ self.mayIndexError = False
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if instring[loc] in self.notChars:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ start = loc
+ loc += 1
+ notchars = self.notChars
+ maxlen = min( start+self.maxLen, len(instring) )
+ while loc < maxlen and \
+ (instring[loc] not in notchars):
+ loc += 1
+
+ if loc - start < self.minLen:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ return loc, instring[start:loc]
+
+ def __str__( self ):
+ try:
+ return super(CharsNotIn, self).__str__()
+ except Exception:
+ pass
+
+ if self.strRepr is None:
+ if len(self.notChars) > 4:
+ self.strRepr = "!W:(%s...)" % self.notChars[:4]
+ else:
+ self.strRepr = "!W:(%s)" % self.notChars
+
+ return self.strRepr
+
+class White(Token):
+ """
+ Special matching class for matching whitespace. Normally, whitespace is ignored
+ by pyparsing grammars. This class is included when some whitespace structures
+ are significant. Define with a string containing the whitespace characters to be
+ matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments,
+ as defined for the C{L{Word}} class.
+ """
+ whiteStrs = {
+ " " : "<SPC>",
+ "\t": "<TAB>",
+ "\n": "<LF>",
+ "\r": "<CR>",
+ "\f": "<FF>",
+ }
+ def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0):
+ super(White,self).__init__()
+ self.matchWhite = ws
+ self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) )
+ #~ self.leaveWhitespace()
+ self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite))
+ self.mayReturnEmpty = True
+ self.errmsg = "Expected " + self.name
+
+ self.minLen = min
+
+ if max > 0:
+ self.maxLen = max
+ else:
+ self.maxLen = _MAX_INT
+
+ if exact > 0:
+ self.maxLen = exact
+ self.minLen = exact
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if not(instring[ loc ] in self.matchWhite):
+ raise ParseException(instring, loc, self.errmsg, self)
+ start = loc
+ loc += 1
+ maxloc = start + self.maxLen
+ maxloc = min( maxloc, len(instring) )
+ while loc < maxloc and instring[loc] in self.matchWhite:
+ loc += 1
+
+ if loc - start < self.minLen:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ return loc, instring[start:loc]
+
+
+class _PositionToken(Token):
+ def __init__( self ):
+ super(_PositionToken,self).__init__()
+ self.name=self.__class__.__name__
+ self.mayReturnEmpty = True
+ self.mayIndexError = False
+
+class GoToColumn(_PositionToken):
+ """
+ Token to advance to a specific column of input text; useful for tabular report scraping.
+ """
+ def __init__( self, colno ):
+ super(GoToColumn,self).__init__()
+ self.col = colno
+
+ def preParse( self, instring, loc ):
+ if col(loc,instring) != self.col:
+ instrlen = len(instring)
+ if self.ignoreExprs:
+ loc = self._skipIgnorables( instring, loc )
+ while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col :
+ loc += 1
+ return loc
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ thiscol = col( loc, instring )
+ if thiscol > self.col:
+ raise ParseException( instring, loc, "Text not in expected column", self )
+ newloc = loc + self.col - thiscol
+ ret = instring[ loc: newloc ]
+ return newloc, ret
+
+
+class LineStart(_PositionToken):
+ """
+ Matches if current position is at the beginning of a line within the parse string
+
+ Example::
+
+ test = '''\
+ AAA this line
+ AAA and this line
+ AAA but not this one
+ B AAA and definitely not this one
+ '''
+
+ for t in (LineStart() + 'AAA' + restOfLine).searchString(test):
+ print(t)
+
+ Prints::
+ ['AAA', ' this line']
+ ['AAA', ' and this line']
+
+ """
+ def __init__( self ):
+ super(LineStart,self).__init__()
+ self.errmsg = "Expected start of line"
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if col(loc, instring) == 1:
+ return loc, []
+ raise ParseException(instring, loc, self.errmsg, self)
+
+class LineEnd(_PositionToken):
+ """
+ Matches if current position is at the end of a line within the parse string
+ """
+ def __init__( self ):
+ super(LineEnd,self).__init__()
+ self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") )
+ self.errmsg = "Expected end of line"
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if loc<len(instring):
+ if instring[loc] == "\n":
+ return loc+1, "\n"
+ else:
+ raise ParseException(instring, loc, self.errmsg, self)
+ elif loc == len(instring):
+ return loc+1, []
+ else:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+class StringStart(_PositionToken):
+ """
+ Matches if current position is at the beginning of the parse string
+ """
+ def __init__( self ):
+ super(StringStart,self).__init__()
+ self.errmsg = "Expected start of text"
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if loc != 0:
+ # see if entire string up to here is just whitespace and ignoreables
+ if loc != self.preParse( instring, 0 ):
+ raise ParseException(instring, loc, self.errmsg, self)
+ return loc, []
+
+class StringEnd(_PositionToken):
+ """
+ Matches if current position is at the end of the parse string
+ """
+ def __init__( self ):
+ super(StringEnd,self).__init__()
+ self.errmsg = "Expected end of text"
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if loc < len(instring):
+ raise ParseException(instring, loc, self.errmsg, self)
+ elif loc == len(instring):
+ return loc+1, []
+ elif loc > len(instring):
+ return loc, []
+ else:
+ raise ParseException(instring, loc, self.errmsg, self)
+
+class WordStart(_PositionToken):
+ """
+ Matches if the current position is at the beginning of a Word, and
+ is not preceded by any character in a given set of C{wordChars}
+ (default=C{printables}). To emulate the C{\b} behavior of regular expressions,
+ use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of
+ the string being parsed, or at the beginning of a line.
+ """
+ def __init__(self, wordChars = printables):
+ super(WordStart,self).__init__()
+ self.wordChars = set(wordChars)
+ self.errmsg = "Not at the start of a word"
+
+ def parseImpl(self, instring, loc, doActions=True ):
+ if loc != 0:
+ if (instring[loc-1] in self.wordChars or
+ instring[loc] not in self.wordChars):
+ raise ParseException(instring, loc, self.errmsg, self)
+ return loc, []
+
+class WordEnd(_PositionToken):
+ """
+ Matches if the current position is at the end of a Word, and
+ is not followed by any character in a given set of C{wordChars}
+ (default=C{printables}). To emulate the C{\b} behavior of regular expressions,
+ use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of
+ the string being parsed, or at the end of a line.
+ """
+ def __init__(self, wordChars = printables):
+ super(WordEnd,self).__init__()
+ self.wordChars = set(wordChars)
+ self.skipWhitespace = False
+ self.errmsg = "Not at the end of a word"
+
+ def parseImpl(self, instring, loc, doActions=True ):
+ instrlen = len(instring)
+ if instrlen>0 and loc<instrlen:
+ if (instring[loc] in self.wordChars or
+ instring[loc-1] not in self.wordChars):
+ raise ParseException(instring, loc, self.errmsg, self)
+ return loc, []
+
+
+class ParseExpression(ParserElement):
+ """
+ Abstract subclass of ParserElement, for combining and post-processing parsed tokens.
+ """
+ def __init__( self, exprs, savelist = False ):
+ super(ParseExpression,self).__init__(savelist)
+ if isinstance( exprs, _generatorType ):
+ exprs = list(exprs)
+
+ if isinstance( exprs, basestring ):
+ self.exprs = [ ParserElement._literalStringClass( exprs ) ]
+ elif isinstance( exprs, Iterable ):
+ exprs = list(exprs)
+ # if sequence of strings provided, wrap with Literal
+ if all(isinstance(expr, basestring) for expr in exprs):
+ exprs = map(ParserElement._literalStringClass, exprs)
+ self.exprs = list(exprs)
+ else:
+ try:
+ self.exprs = list( exprs )
+ except TypeError:
+ self.exprs = [ exprs ]
+ self.callPreparse = False
+
+ def __getitem__( self, i ):
+ return self.exprs[i]
+
+ def append( self, other ):
+ self.exprs.append( other )
+ self.strRepr = None
+ return self
+
+ def leaveWhitespace( self ):
+ """Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on
+ all contained expressions."""
+ self.skipWhitespace = False
+ self.exprs = [ e.copy() for e in self.exprs ]
+ for e in self.exprs:
+ e.leaveWhitespace()
+ return self
+
+ def ignore( self, other ):
+ if isinstance( other, Suppress ):
+ if other not in self.ignoreExprs:
+ super( ParseExpression, self).ignore( other )
+ for e in self.exprs:
+ e.ignore( self.ignoreExprs[-1] )
+ else:
+ super( ParseExpression, self).ignore( other )
+ for e in self.exprs:
+ e.ignore( self.ignoreExprs[-1] )
+ return self
+
+ def __str__( self ):
+ try:
+ return super(ParseExpression,self).__str__()
+ except Exception:
+ pass
+
+ if self.strRepr is None:
+ self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) )
+ return self.strRepr
+
+ def streamline( self ):
+ super(ParseExpression,self).streamline()
+
+ for e in self.exprs:
+ e.streamline()
+
+ # collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d )
+ # but only if there are no parse actions or resultsNames on the nested And's
+ # (likewise for Or's and MatchFirst's)
+ if ( len(self.exprs) == 2 ):
+ other = self.exprs[0]
+ if ( isinstance( other, self.__class__ ) and
+ not(other.parseAction) and
+ other.resultsName is None and
+ not other.debug ):
+ self.exprs = other.exprs[:] + [ self.exprs[1] ]
+ self.strRepr = None
+ self.mayReturnEmpty |= other.mayReturnEmpty
+ self.mayIndexError |= other.mayIndexError
+
+ other = self.exprs[-1]
+ if ( isinstance( other, self.__class__ ) and
+ not(other.parseAction) and
+ other.resultsName is None and
+ not other.debug ):
+ self.exprs = self.exprs[:-1] + other.exprs[:]
+ self.strRepr = None
+ self.mayReturnEmpty |= other.mayReturnEmpty
+ self.mayIndexError |= other.mayIndexError
+
+ self.errmsg = "Expected " + _ustr(self)
+
+ return self
+
+ def setResultsName( self, name, listAllMatches=False ):
+ ret = super(ParseExpression,self).setResultsName(name,listAllMatches)
+ return ret
+
+ def validate( self, validateTrace=[] ):
+ tmp = validateTrace[:]+[self]
+ for e in self.exprs:
+ e.validate(tmp)
+ self.checkRecursion( [] )
+
+ def copy(self):
+ ret = super(ParseExpression,self).copy()
+ ret.exprs = [e.copy() for e in self.exprs]
+ return ret
+
+class And(ParseExpression):
+ """
+ Requires all given C{ParseExpression}s to be found in the given order.
+ Expressions may be separated by whitespace.
+ May be constructed using the C{'+'} operator.
+ May also be constructed using the C{'-'} operator, which will suppress backtracking.
+
+ Example::
+ integer = Word(nums)
+ name_expr = OneOrMore(Word(alphas))
+
+ expr = And([integer("id"),name_expr("name"),integer("age")])
+ # more easily written as:
+ expr = integer("id") + name_expr("name") + integer("age")
+ """
+
+ class _ErrorStop(Empty):
+ def __init__(self, *args, **kwargs):
+ super(And._ErrorStop,self).__init__(*args, **kwargs)
+ self.name = '-'
+ self.leaveWhitespace()
+
+ def __init__( self, exprs, savelist = True ):
+ super(And,self).__init__(exprs, savelist)
+ self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
+ self.setWhitespaceChars( self.exprs[0].whiteChars )
+ self.skipWhitespace = self.exprs[0].skipWhitespace
+ self.callPreparse = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ # pass False as last arg to _parse for first element, since we already
+ # pre-parsed the string as part of our And pre-parsing
+ loc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False )
+ errorStop = False
+ for e in self.exprs[1:]:
+ if isinstance(e, And._ErrorStop):
+ errorStop = True
+ continue
+ if errorStop:
+ try:
+ loc, exprtokens = e._parse( instring, loc, doActions )
+ except ParseSyntaxException:
+ raise
+ except ParseBaseException as pe:
+ pe.__traceback__ = None
+ raise ParseSyntaxException._from_exception(pe)
+ except IndexError:
+ raise ParseSyntaxException(instring, len(instring), self.errmsg, self)
+ else:
+ loc, exprtokens = e._parse( instring, loc, doActions )
+ if exprtokens or exprtokens.haskeys():
+ resultlist += exprtokens
+ return loc, resultlist
+
+ def __iadd__(self, other ):
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ return self.append( other ) #And( [ self, other ] )
+
+ def checkRecursion( self, parseElementList ):
+ subRecCheckList = parseElementList[:] + [ self ]
+ for e in self.exprs:
+ e.checkRecursion( subRecCheckList )
+ if not e.mayReturnEmpty:
+ break
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}"
+
+ return self.strRepr
+
+
+class Or(ParseExpression):
+ """
+ Requires that at least one C{ParseExpression} is found.
+ If two expressions match, the expression that matches the longest string will be used.
+ May be constructed using the C{'^'} operator.
+
+ Example::
+ # construct Or using '^' operator
+
+ number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums))
+ print(number.searchString("123 3.1416 789"))
+ prints::
+ [['123'], ['3.1416'], ['789']]
+ """
+ def __init__( self, exprs, savelist = False ):
+ super(Or,self).__init__(exprs, savelist)
+ if self.exprs:
+ self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)
+ else:
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ maxExcLoc = -1
+ maxException = None
+ matches = []
+ for e in self.exprs:
+ try:
+ loc2 = e.tryParse( instring, loc )
+ except ParseException as err:
+ err.__traceback__ = None
+ if err.loc > maxExcLoc:
+ maxException = err
+ maxExcLoc = err.loc
+ except IndexError:
+ if len(instring) > maxExcLoc:
+ maxException = ParseException(instring,len(instring),e.errmsg,self)
+ maxExcLoc = len(instring)
+ else:
+ # save match among all matches, to retry longest to shortest
+ matches.append((loc2, e))
+
+ if matches:
+ matches.sort(key=lambda x: -x[0])
+ for _,e in matches:
+ try:
+ return e._parse( instring, loc, doActions )
+ except ParseException as err:
+ err.__traceback__ = None
+ if err.loc > maxExcLoc:
+ maxException = err
+ maxExcLoc = err.loc
+
+ if maxException is not None:
+ maxException.msg = self.errmsg
+ raise maxException
+ else:
+ raise ParseException(instring, loc, "no defined alternatives to match", self)
+
+
+ def __ixor__(self, other ):
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ return self.append( other ) #Or( [ self, other ] )
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}"
+
+ return self.strRepr
+
+ def checkRecursion( self, parseElementList ):
+ subRecCheckList = parseElementList[:] + [ self ]
+ for e in self.exprs:
+ e.checkRecursion( subRecCheckList )
+
+
+class MatchFirst(ParseExpression):
+ """
+ Requires that at least one C{ParseExpression} is found.
+ If two expressions match, the first one listed is the one that will match.
+ May be constructed using the C{'|'} operator.
+
+ Example::
+ # construct MatchFirst using '|' operator
+
+ # watch the order of expressions to match
+ number = Word(nums) | Combine(Word(nums) + '.' + Word(nums))
+ print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']]
+
+ # put more selective expression first
+ number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums)
+ print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']]
+ """
+ def __init__( self, exprs, savelist = False ):
+ super(MatchFirst,self).__init__(exprs, savelist)
+ if self.exprs:
+ self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs)
+ else:
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ maxExcLoc = -1
+ maxException = None
+ for e in self.exprs:
+ try:
+ ret = e._parse( instring, loc, doActions )
+ return ret
+ except ParseException as err:
+ if err.loc > maxExcLoc:
+ maxException = err
+ maxExcLoc = err.loc
+ except IndexError:
+ if len(instring) > maxExcLoc:
+ maxException = ParseException(instring,len(instring),e.errmsg,self)
+ maxExcLoc = len(instring)
+
+ # only got here if no expression matched, raise exception for match that made it the furthest
+ else:
+ if maxException is not None:
+ maxException.msg = self.errmsg
+ raise maxException
+ else:
+ raise ParseException(instring, loc, "no defined alternatives to match", self)
+
+ def __ior__(self, other ):
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass( other )
+ return self.append( other ) #MatchFirst( [ self, other ] )
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}"
+
+ return self.strRepr
+
+ def checkRecursion( self, parseElementList ):
+ subRecCheckList = parseElementList[:] + [ self ]
+ for e in self.exprs:
+ e.checkRecursion( subRecCheckList )
+
+
+class Each(ParseExpression):
+ """
+ Requires all given C{ParseExpression}s to be found, but in any order.
+ Expressions may be separated by whitespace.
+ May be constructed using the C{'&'} operator.
+
+ Example::
+ color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN")
+ shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON")
+ integer = Word(nums)
+ shape_attr = "shape:" + shape_type("shape")
+ posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn")
+ color_attr = "color:" + color("color")
+ size_attr = "size:" + integer("size")
+
+ # use Each (using operator '&') to accept attributes in any order
+ # (shape and posn are required, color and size are optional)
+ shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr)
+
+ shape_spec.runTests('''
+ shape: SQUARE color: BLACK posn: 100, 120
+ shape: CIRCLE size: 50 color: BLUE posn: 50,80
+ color:GREEN size:20 shape:TRIANGLE posn:20,40
+ '''
+ )
+ prints::
+ shape: SQUARE color: BLACK posn: 100, 120
+ ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']]
+ - color: BLACK
+ - posn: ['100', ',', '120']
+ - x: 100
+ - y: 120
+ - shape: SQUARE
+
+
+ shape: CIRCLE size: 50 color: BLUE posn: 50,80
+ ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']]
+ - color: BLUE
+ - posn: ['50', ',', '80']
+ - x: 50
+ - y: 80
+ - shape: CIRCLE
+ - size: 50
+
+
+ color: GREEN size: 20 shape: TRIANGLE posn: 20,40
+ ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']]
+ - color: GREEN
+ - posn: ['20', ',', '40']
+ - x: 20
+ - y: 40
+ - shape: TRIANGLE
+ - size: 20
+ """
+ def __init__( self, exprs, savelist = True ):
+ super(Each,self).__init__(exprs, savelist)
+ self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs)
+ self.skipWhitespace = True
+ self.initExprGroups = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if self.initExprGroups:
+ self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional))
+ opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ]
+ opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)]
+ self.optionals = opt1 + opt2
+ self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ]
+ self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ]
+ self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ]
+ self.required += self.multirequired
+ self.initExprGroups = False
+ tmpLoc = loc
+ tmpReqd = self.required[:]
+ tmpOpt = self.optionals[:]
+ matchOrder = []
+
+ keepMatching = True
+ while keepMatching:
+ tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired
+ failed = []
+ for e in tmpExprs:
+ try:
+ tmpLoc = e.tryParse( instring, tmpLoc )
+ except ParseException:
+ failed.append(e)
+ else:
+ matchOrder.append(self.opt1map.get(id(e),e))
+ if e in tmpReqd:
+ tmpReqd.remove(e)
+ elif e in tmpOpt:
+ tmpOpt.remove(e)
+ if len(failed) == len(tmpExprs):
+ keepMatching = False
+
+ if tmpReqd:
+ missing = ", ".join(_ustr(e) for e in tmpReqd)
+ raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing )
+
+ # add any unmatched Optionals, in case they have default values defined
+ matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt]
+
+ resultlist = []
+ for e in matchOrder:
+ loc,results = e._parse(instring,loc,doActions)
+ resultlist.append(results)
+
+ finalResults = sum(resultlist, ParseResults([]))
+ return loc, finalResults
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}"
+
+ return self.strRepr
+
+ def checkRecursion( self, parseElementList ):
+ subRecCheckList = parseElementList[:] + [ self ]
+ for e in self.exprs:
+ e.checkRecursion( subRecCheckList )
+
+
+class ParseElementEnhance(ParserElement):
+ """
+ Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens.
+ """
+ def __init__( self, expr, savelist=False ):
+ super(ParseElementEnhance,self).__init__(savelist)
+ if isinstance( expr, basestring ):
+ if issubclass(ParserElement._literalStringClass, Token):
+ expr = ParserElement._literalStringClass(expr)
+ else:
+ expr = ParserElement._literalStringClass(Literal(expr))
+ self.expr = expr
+ self.strRepr = None
+ if expr is not None:
+ self.mayIndexError = expr.mayIndexError
+ self.mayReturnEmpty = expr.mayReturnEmpty
+ self.setWhitespaceChars( expr.whiteChars )
+ self.skipWhitespace = expr.skipWhitespace
+ self.saveAsList = expr.saveAsList
+ self.callPreparse = expr.callPreparse
+ self.ignoreExprs.extend(expr.ignoreExprs)
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if self.expr is not None:
+ return self.expr._parse( instring, loc, doActions, callPreParse=False )
+ else:
+ raise ParseException("",loc,self.errmsg,self)
+
+ def leaveWhitespace( self ):
+ self.skipWhitespace = False
+ self.expr = self.expr.copy()
+ if self.expr is not None:
+ self.expr.leaveWhitespace()
+ return self
+
+ def ignore( self, other ):
+ if isinstance( other, Suppress ):
+ if other not in self.ignoreExprs:
+ super( ParseElementEnhance, self).ignore( other )
+ if self.expr is not None:
+ self.expr.ignore( self.ignoreExprs[-1] )
+ else:
+ super( ParseElementEnhance, self).ignore( other )
+ if self.expr is not None:
+ self.expr.ignore( self.ignoreExprs[-1] )
+ return self
+
+ def streamline( self ):
+ super(ParseElementEnhance,self).streamline()
+ if self.expr is not None:
+ self.expr.streamline()
+ return self
+
+ def checkRecursion( self, parseElementList ):
+ if self in parseElementList:
+ raise RecursiveGrammarException( parseElementList+[self] )
+ subRecCheckList = parseElementList[:] + [ self ]
+ if self.expr is not None:
+ self.expr.checkRecursion( subRecCheckList )
+
+ def validate( self, validateTrace=[] ):
+ tmp = validateTrace[:]+[self]
+ if self.expr is not None:
+ self.expr.validate(tmp)
+ self.checkRecursion( [] )
+
+ def __str__( self ):
+ try:
+ return super(ParseElementEnhance,self).__str__()
+ except Exception:
+ pass
+
+ if self.strRepr is None and self.expr is not None:
+ self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) )
+ return self.strRepr
+
+
+class FollowedBy(ParseElementEnhance):
+ """
+ Lookahead matching of the given parse expression. C{FollowedBy}
+ does I{not} advance the parsing position within the input string, it only
+ verifies that the specified parse expression matches at the current
+ position. C{FollowedBy} always returns a null token list.
+
+ Example::
+ # use FollowedBy to match a label only if it is followed by a ':'
+ data_word = Word(alphas)
+ label = data_word + FollowedBy(':')
+ attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+
+ OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint()
+ prints::
+ [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']]
+ """
+ def __init__( self, expr ):
+ super(FollowedBy,self).__init__(expr)
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ self.expr.tryParse( instring, loc )
+ return loc, []
+
+
+class NotAny(ParseElementEnhance):
+ """
+ Lookahead to disallow matching with the given parse expression. C{NotAny}
+ does I{not} advance the parsing position within the input string, it only
+ verifies that the specified parse expression does I{not} match at the current
+ position. Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny}
+ always returns a null token list. May be constructed using the '~' operator.
+
+ Example::
+
+ """
+ def __init__( self, expr ):
+ super(NotAny,self).__init__(expr)
+ #~ self.leaveWhitespace()
+ self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs
+ self.mayReturnEmpty = True
+ self.errmsg = "Found unwanted token, "+_ustr(self.expr)
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ if self.expr.canParseNext(instring, loc):
+ raise ParseException(instring, loc, self.errmsg, self)
+ return loc, []
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "~{" + _ustr(self.expr) + "}"
+
+ return self.strRepr
+
+class _MultipleMatch(ParseElementEnhance):
+ def __init__( self, expr, stopOn=None):
+ super(_MultipleMatch, self).__init__(expr)
+ self.saveAsList = True
+ ender = stopOn
+ if isinstance(ender, basestring):
+ ender = ParserElement._literalStringClass(ender)
+ self.not_ender = ~ender if ender is not None else None
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ self_expr_parse = self.expr._parse
+ self_skip_ignorables = self._skipIgnorables
+ check_ender = self.not_ender is not None
+ if check_ender:
+ try_not_ender = self.not_ender.tryParse
+
+ # must be at least one (but first see if we are the stopOn sentinel;
+ # if so, fail)
+ if check_ender:
+ try_not_ender(instring, loc)
+ loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False )
+ try:
+ hasIgnoreExprs = (not not self.ignoreExprs)
+ while 1:
+ if check_ender:
+ try_not_ender(instring, loc)
+ if hasIgnoreExprs:
+ preloc = self_skip_ignorables( instring, loc )
+ else:
+ preloc = loc
+ loc, tmptokens = self_expr_parse( instring, preloc, doActions )
+ if tmptokens or tmptokens.haskeys():
+ tokens += tmptokens
+ except (ParseException,IndexError):
+ pass
+
+ return loc, tokens
+
+class OneOrMore(_MultipleMatch):
+ """
+ Repetition of one or more of the given expression.
+
+ Parameters:
+ - expr - expression that must match one or more times
+ - stopOn - (default=C{None}) - expression for a terminating sentinel
+ (only required if the sentinel would ordinarily match the repetition
+ expression)
+
+ Example::
+ data_word = Word(alphas)
+ label = data_word + FollowedBy(':')
+ attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))
+
+ text = "shape: SQUARE posn: upper left color: BLACK"
+ OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']]
+
+ # use stopOn attribute for OneOrMore to avoid reading label string as part of the data
+ attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+ OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']]
+
+ # could also be written as
+ (attr_expr * (1,)).parseString(text).pprint()
+ """
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "{" + _ustr(self.expr) + "}..."
+
+ return self.strRepr
+
+class ZeroOrMore(_MultipleMatch):
+ """
+ Optional repetition of zero or more of the given expression.
+
+ Parameters:
+ - expr - expression that must match zero or more times
+ - stopOn - (default=C{None}) - expression for a terminating sentinel
+ (only required if the sentinel would ordinarily match the repetition
+ expression)
+
+ Example: similar to L{OneOrMore}
+ """
+ def __init__( self, expr, stopOn=None):
+ super(ZeroOrMore,self).__init__(expr, stopOn=stopOn)
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ try:
+ return super(ZeroOrMore, self).parseImpl(instring, loc, doActions)
+ except (ParseException,IndexError):
+ return loc, []
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "[" + _ustr(self.expr) + "]..."
+
+ return self.strRepr
+
+class _NullToken(object):
+ def __bool__(self):
+ return False
+ __nonzero__ = __bool__
+ def __str__(self):
+ return ""
+
+_optionalNotMatched = _NullToken()
+class Optional(ParseElementEnhance):
+ """
+ Optional matching of the given expression.
+
+ Parameters:
+ - expr - expression that must match zero or more times
+ - default (optional) - value to be returned if the optional expression is not found.
+
+ Example::
+ # US postal code can be a 5-digit zip, plus optional 4-digit qualifier
+ zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4)))
+ zip.runTests('''
+ # traditional ZIP code
+ 12345
+
+ # ZIP+4 form
+ 12101-0001
+
+ # invalid ZIP
+ 98765-
+ ''')
+ prints::
+ # traditional ZIP code
+ 12345
+ ['12345']
+
+ # ZIP+4 form
+ 12101-0001
+ ['12101-0001']
+
+ # invalid ZIP
+ 98765-
+ ^
+ FAIL: Expected end of text (at char 5), (line:1, col:6)
+ """
+ def __init__( self, expr, default=_optionalNotMatched ):
+ super(Optional,self).__init__( expr, savelist=False )
+ self.saveAsList = self.expr.saveAsList
+ self.defaultValue = default
+ self.mayReturnEmpty = True
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ try:
+ loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False )
+ except (ParseException,IndexError):
+ if self.defaultValue is not _optionalNotMatched:
+ if self.expr.resultsName:
+ tokens = ParseResults([ self.defaultValue ])
+ tokens[self.expr.resultsName] = self.defaultValue
+ else:
+ tokens = [ self.defaultValue ]
+ else:
+ tokens = []
+ return loc, tokens
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+
+ if self.strRepr is None:
+ self.strRepr = "[" + _ustr(self.expr) + "]"
+
+ return self.strRepr
+
+class SkipTo(ParseElementEnhance):
+ """
+ Token for skipping over all undefined text until the matched expression is found.
+
+ Parameters:
+ - expr - target expression marking the end of the data to be skipped
+ - include - (default=C{False}) if True, the target expression is also parsed
+ (the skipped text and target expression are returned as a 2-element list).
+ - ignore - (default=C{None}) used to define grammars (typically quoted strings and
+ comments) that might contain false matches to the target expression
+ - failOn - (default=C{None}) define expressions that are not allowed to be
+ included in the skipped test; if found before the target expression is found,
+ the SkipTo is not a match
+
+ Example::
+ report = '''
+ Outstanding Issues Report - 1 Jan 2000
+
+ # | Severity | Description | Days Open
+ -----+----------+-------------------------------------------+-----------
+ 101 | Critical | Intermittent system crash | 6
+ 94 | Cosmetic | Spelling error on Login ('log|n') | 14
+ 79 | Minor | System slow when running too many reports | 47
+ '''
+ integer = Word(nums)
+ SEP = Suppress('|')
+ # use SkipTo to simply match everything up until the next SEP
+ # - ignore quoted strings, so that a '|' character inside a quoted string does not match
+ # - parse action will call token.strip() for each matched token, i.e., the description body
+ string_data = SkipTo(SEP, ignore=quotedString)
+ string_data.setParseAction(tokenMap(str.strip))
+ ticket_expr = (integer("issue_num") + SEP
+ + string_data("sev") + SEP
+ + string_data("desc") + SEP
+ + integer("days_open"))
+
+ for tkt in ticket_expr.searchString(report):
+ print tkt.dump()
+ prints::
+ ['101', 'Critical', 'Intermittent system crash', '6']
+ - days_open: 6
+ - desc: Intermittent system crash
+ - issue_num: 101
+ - sev: Critical
+ ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14']
+ - days_open: 14
+ - desc: Spelling error on Login ('log|n')
+ - issue_num: 94
+ - sev: Cosmetic
+ ['79', 'Minor', 'System slow when running too many reports', '47']
+ - days_open: 47
+ - desc: System slow when running too many reports
+ - issue_num: 79
+ - sev: Minor
+ """
+ def __init__( self, other, include=False, ignore=None, failOn=None ):
+ super( SkipTo, self ).__init__( other )
+ self.ignoreExpr = ignore
+ self.mayReturnEmpty = True
+ self.mayIndexError = False
+ self.includeMatch = include
+ self.asList = False
+ if isinstance(failOn, basestring):
+ self.failOn = ParserElement._literalStringClass(failOn)
+ else:
+ self.failOn = failOn
+ self.errmsg = "No match found for "+_ustr(self.expr)
+
+ def parseImpl( self, instring, loc, doActions=True ):
+ startloc = loc
+ instrlen = len(instring)
+ expr = self.expr
+ expr_parse = self.expr._parse
+ self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None
+ self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None
+
+ tmploc = loc
+ while tmploc <= instrlen:
+ if self_failOn_canParseNext is not None:
+ # break if failOn expression matches
+ if self_failOn_canParseNext(instring, tmploc):
+ break
+
+ if self_ignoreExpr_tryParse is not None:
+ # advance past ignore expressions
+ while 1:
+ try:
+ tmploc = self_ignoreExpr_tryParse(instring, tmploc)
+ except ParseBaseException:
+ break
+
+ try:
+ expr_parse(instring, tmploc, doActions=False, callPreParse=False)
+ except (ParseException, IndexError):
+ # no match, advance loc in string
+ tmploc += 1
+ else:
+ # matched skipto expr, done
+ break
+
+ else:
+ # ran off the end of the input string without matching skipto expr, fail
+ raise ParseException(instring, loc, self.errmsg, self)
+
+ # build up return values
+ loc = tmploc
+ skiptext = instring[startloc:loc]
+ skipresult = ParseResults(skiptext)
+
+ if self.includeMatch:
+ loc, mat = expr_parse(instring,loc,doActions,callPreParse=False)
+ skipresult += mat
+
+ return loc, skipresult
+
+class Forward(ParseElementEnhance):
+ """
+ Forward declaration of an expression to be defined later -
+ used for recursive grammars, such as algebraic infix notation.
+ When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator.
+
+ Note: take care when assigning to C{Forward} not to overlook precedence of operators.
+ Specifically, '|' has a lower precedence than '<<', so that::
+ fwdExpr << a | b | c
+ will actually be evaluated as::
+ (fwdExpr << a) | b | c
+ thereby leaving b and c out as parseable alternatives. It is recommended that you
+ explicitly group the values inserted into the C{Forward}::
+ fwdExpr << (a | b | c)
+ Converting to use the '<<=' operator instead will avoid this problem.
+
+ See L{ParseResults.pprint} for an example of a recursive parser created using
+ C{Forward}.
+ """
+ def __init__( self, other=None ):
+ super(Forward,self).__init__( other, savelist=False )
+
+ def __lshift__( self, other ):
+ if isinstance( other, basestring ):
+ other = ParserElement._literalStringClass(other)
+ self.expr = other
+ self.strRepr = None
+ self.mayIndexError = self.expr.mayIndexError
+ self.mayReturnEmpty = self.expr.mayReturnEmpty
+ self.setWhitespaceChars( self.expr.whiteChars )
+ self.skipWhitespace = self.expr.skipWhitespace
+ self.saveAsList = self.expr.saveAsList
+ self.ignoreExprs.extend(self.expr.ignoreExprs)
+ return self
+
+ def __ilshift__(self, other):
+ return self << other
+
+ def leaveWhitespace( self ):
+ self.skipWhitespace = False
+ return self
+
+ def streamline( self ):
+ if not self.streamlined:
+ self.streamlined = True
+ if self.expr is not None:
+ self.expr.streamline()
+ return self
+
+ def validate( self, validateTrace=[] ):
+ if self not in validateTrace:
+ tmp = validateTrace[:]+[self]
+ if self.expr is not None:
+ self.expr.validate(tmp)
+ self.checkRecursion([])
+
+ def __str__( self ):
+ if hasattr(self,"name"):
+ return self.name
+ return self.__class__.__name__ + ": ..."
+
+ # stubbed out for now - creates awful memory and perf issues
+ self._revertClass = self.__class__
+ self.__class__ = _ForwardNoRecurse
+ try:
+ if self.expr is not None:
+ retString = _ustr(self.expr)
+ else:
+ retString = "None"
+ finally:
+ self.__class__ = self._revertClass
+ return self.__class__.__name__ + ": " + retString
+
+ def copy(self):
+ if self.expr is not None:
+ return super(Forward,self).copy()
+ else:
+ ret = Forward()
+ ret <<= self
+ return ret
+
+class _ForwardNoRecurse(Forward):
+ def __str__( self ):
+ return "..."
+
+class TokenConverter(ParseElementEnhance):
+ """
+ Abstract subclass of C{ParseExpression}, for converting parsed results.
+ """
+ def __init__( self, expr, savelist=False ):
+ super(TokenConverter,self).__init__( expr )#, savelist )
+ self.saveAsList = False
+
+class Combine(TokenConverter):
+ """
+ Converter to concatenate all matching tokens to a single string.
+ By default, the matching patterns must also be contiguous in the input string;
+ this can be disabled by specifying C{'adjacent=False'} in the constructor.
+
+ Example::
+ real = Word(nums) + '.' + Word(nums)
+ print(real.parseString('3.1416')) # -> ['3', '.', '1416']
+ # will also erroneously match the following
+ print(real.parseString('3. 1416')) # -> ['3', '.', '1416']
+
+ real = Combine(Word(nums) + '.' + Word(nums))
+ print(real.parseString('3.1416')) # -> ['3.1416']
+ # no match when there are internal spaces
+ print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...)
+ """
+ def __init__( self, expr, joinString="", adjacent=True ):
+ super(Combine,self).__init__( expr )
+ # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself
+ if adjacent:
+ self.leaveWhitespace()
+ self.adjacent = adjacent
+ self.skipWhitespace = True
+ self.joinString = joinString
+ self.callPreparse = True
+
+ def ignore( self, other ):
+ if self.adjacent:
+ ParserElement.ignore(self, other)
+ else:
+ super( Combine, self).ignore( other )
+ return self
+
+ def postParse( self, instring, loc, tokenlist ):
+ retToks = tokenlist.copy()
+ del retToks[:]
+ retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults)
+
+ if self.resultsName and retToks.haskeys():
+ return [ retToks ]
+ else:
+ return retToks
+
+class Group(TokenConverter):
+ """
+ Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions.
+
+ Example::
+ ident = Word(alphas)
+ num = Word(nums)
+ term = ident | num
+ func = ident + Optional(delimitedList(term))
+ print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100']
+
+ func = ident + Group(Optional(delimitedList(term)))
+ print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']]
+ """
+ def __init__( self, expr ):
+ super(Group,self).__init__( expr )
+ self.saveAsList = True
+
+ def postParse( self, instring, loc, tokenlist ):
+ return [ tokenlist ]
+
+class Dict(TokenConverter):
+ """
+ Converter to return a repetitive expression as a list, but also as a dictionary.
+ Each element can also be referenced using the first token in the expression as its key.
+ Useful for tabular report scraping when the first column can be used as a item key.
+
+ Example::
+ data_word = Word(alphas)
+ label = data_word + FollowedBy(':')
+ attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join))
+
+ text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
+ attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+
+ # print attributes as plain groups
+ print(OneOrMore(attr_expr).parseString(text).dump())
+
+ # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names
+ result = Dict(OneOrMore(Group(attr_expr))).parseString(text)
+ print(result.dump())
+
+ # access named fields as dict entries, or output as dict
+ print(result['shape'])
+ print(result.asDict())
+ prints::
+ ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap']
+
+ [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
+ - color: light blue
+ - posn: upper left
+ - shape: SQUARE
+ - texture: burlap
+ SQUARE
+ {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'}
+ See more examples at L{ParseResults} of accessing fields by results name.
+ """
+ def __init__( self, expr ):
+ super(Dict,self).__init__( expr )
+ self.saveAsList = True
+
+ def postParse( self, instring, loc, tokenlist ):
+ for i,tok in enumerate(tokenlist):
+ if len(tok) == 0:
+ continue
+ ikey = tok[0]
+ if isinstance(ikey,int):
+ ikey = _ustr(tok[0]).strip()
+ if len(tok)==1:
+ tokenlist[ikey] = _ParseResultsWithOffset("",i)
+ elif len(tok)==2 and not isinstance(tok[1],ParseResults):
+ tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i)
+ else:
+ dictvalue = tok.copy() #ParseResults(i)
+ del dictvalue[0]
+ if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()):
+ tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i)
+ else:
+ tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i)
+
+ if self.resultsName:
+ return [ tokenlist ]
+ else:
+ return tokenlist
+
+
+class Suppress(TokenConverter):
+ """
+ Converter for ignoring the results of a parsed expression.
+
+ Example::
+ source = "a, b, c,d"
+ wd = Word(alphas)
+ wd_list1 = wd + ZeroOrMore(',' + wd)
+ print(wd_list1.parseString(source))
+
+ # often, delimiters that are useful during parsing are just in the
+ # way afterward - use Suppress to keep them out of the parsed output
+ wd_list2 = wd + ZeroOrMore(Suppress(',') + wd)
+ print(wd_list2.parseString(source))
+ prints::
+ ['a', ',', 'b', ',', 'c', ',', 'd']
+ ['a', 'b', 'c', 'd']
+ (See also L{delimitedList}.)
+ """
+ def postParse( self, instring, loc, tokenlist ):
+ return []
+
+ def suppress( self ):
+ return self
+
+
+class OnlyOnce(object):
+ """
+ Wrapper for parse actions, to ensure they are only called once.
+ """
+ def __init__(self, methodCall):
+ self.callable = _trim_arity(methodCall)
+ self.called = False
+ def __call__(self,s,l,t):
+ if not self.called:
+ results = self.callable(s,l,t)
+ self.called = True
+ return results
+ raise ParseException(s,l,"")
+ def reset(self):
+ self.called = False
+
+def traceParseAction(f):
+ """
+ Decorator for debugging parse actions.
+
+ When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".}
+ When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised.
+
+ Example::
+ wd = Word(alphas)
+
+ @traceParseAction
+ def remove_duplicate_chars(tokens):
+ return ''.join(sorted(set(''.join(tokens))))
+
+ wds = OneOrMore(wd).setParseAction(remove_duplicate_chars)
+ print(wds.parseString("slkdjs sld sldd sdlf sdljf"))
+ prints::
+ >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {}))
+ <<leaving remove_duplicate_chars (ret: 'dfjkls')
+ ['dfjkls']
+ """
+ f = _trim_arity(f)
+ def z(*paArgs):
+ thisFunc = f.__name__
+ s,l,t = paArgs[-3:]
+ if len(paArgs)>3:
+ thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc
+ sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) )
+ try:
+ ret = f(*paArgs)
+ except Exception as exc:
+ sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) )
+ raise
+ sys.stderr.write( "<<leaving %s (ret: %r)\n" % (thisFunc,ret) )
+ return ret
+ try:
+ z.__name__ = f.__name__
+ except AttributeError:
+ pass
+ return z
+
+#
+# global helpers
+#
+def delimitedList( expr, delim=",", combine=False ):
+ """
+ Helper to define a delimited list of expressions - the delimiter defaults to ','.
+ By default, the list elements and delimiters can have intervening whitespace, and
+ comments, but this can be overridden by passing C{combine=True} in the constructor.
+ If C{combine} is set to C{True}, the matching tokens are returned as a single token
+ string, with the delimiters included; otherwise, the matching tokens are returned
+ as a list of tokens, with the delimiters suppressed.
+
+ Example::
+ delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc']
+ delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE']
+ """
+ dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..."
+ if combine:
+ return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName)
+ else:
+ return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName)
+
+def countedArray( expr, intExpr=None ):
+ """
+ Helper to define a counted list of expressions.
+ This helper defines a pattern of the form::
+ integer expr expr expr...
+ where the leading integer tells how many expr expressions follow.
+ The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed.
+
+ If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value.
+
+ Example::
+ countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd']
+
+ # in this parser, the leading integer value is given in binary,
+ # '10' indicating that 2 values are in the array
+ binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2))
+ countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd']
+ """
+ arrayExpr = Forward()
+ def countFieldParseAction(s,l,t):
+ n = t[0]
+ arrayExpr << (n and Group(And([expr]*n)) or Group(empty))
+ return []
+ if intExpr is None:
+ intExpr = Word(nums).setParseAction(lambda t:int(t[0]))
+ else:
+ intExpr = intExpr.copy()
+ intExpr.setName("arrayLen")
+ intExpr.addParseAction(countFieldParseAction, callDuringTry=True)
+ return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...')
+
+def _flatten(L):
+ ret = []
+ for i in L:
+ if isinstance(i,list):
+ ret.extend(_flatten(i))
+ else:
+ ret.append(i)
+ return ret
+
+def matchPreviousLiteral(expr):
+ """
+ Helper to define an expression that is indirectly defined from
+ the tokens matched in a previous expression, that is, it looks
+ for a 'repeat' of a previous expression. For example::
+ first = Word(nums)
+ second = matchPreviousLiteral(first)
+ matchExpr = first + ":" + second
+ will match C{"1:1"}, but not C{"1:2"}. Because this matches a
+ previous literal, will also match the leading C{"1:1"} in C{"1:10"}.
+ If this is not desired, use C{matchPreviousExpr}.
+ Do I{not} use with packrat parsing enabled.
+ """
+ rep = Forward()
+ def copyTokenToRepeater(s,l,t):
+ if t:
+ if len(t) == 1:
+ rep << t[0]
+ else:
+ # flatten t tokens
+ tflat = _flatten(t.asList())
+ rep << And(Literal(tt) for tt in tflat)
+ else:
+ rep << Empty()
+ expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
+ rep.setName('(prev) ' + _ustr(expr))
+ return rep
+
+def matchPreviousExpr(expr):
+ """
+ Helper to define an expression that is indirectly defined from
+ the tokens matched in a previous expression, that is, it looks
+ for a 'repeat' of a previous expression. For example::
+ first = Word(nums)
+ second = matchPreviousExpr(first)
+ matchExpr = first + ":" + second
+ will match C{"1:1"}, but not C{"1:2"}. Because this matches by
+ expressions, will I{not} match the leading C{"1:1"} in C{"1:10"};
+ the expressions are evaluated first, and then compared, so
+ C{"1"} is compared with C{"10"}.
+ Do I{not} use with packrat parsing enabled.
+ """
+ rep = Forward()
+ e2 = expr.copy()
+ rep <<= e2
+ def copyTokenToRepeater(s,l,t):
+ matchTokens = _flatten(t.asList())
+ def mustMatchTheseTokens(s,l,t):
+ theseTokens = _flatten(t.asList())
+ if theseTokens != matchTokens:
+ raise ParseException("",0,"")
+ rep.setParseAction( mustMatchTheseTokens, callDuringTry=True )
+ expr.addParseAction(copyTokenToRepeater, callDuringTry=True)
+ rep.setName('(prev) ' + _ustr(expr))
+ return rep
+
+def _escapeRegexRangeChars(s):
+ #~ escape these chars: ^-]
+ for c in r"\^-]":
+ s = s.replace(c,_bslash+c)
+ s = s.replace("\n",r"\n")
+ s = s.replace("\t",r"\t")
+ return _ustr(s)
+
+def oneOf( strs, caseless=False, useRegex=True ):
+ """
+ Helper to quickly define a set of alternative Literals, and makes sure to do
+ longest-first testing when there is a conflict, regardless of the input order,
+ but returns a C{L{MatchFirst}} for best performance.
+
+ Parameters:
+ - strs - a string of space-delimited literals, or a collection of string literals
+ - caseless - (default=C{False}) - treat all literals as caseless
+ - useRegex - (default=C{True}) - as an optimization, will generate a Regex
+ object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or
+ if creating a C{Regex} raises an exception)
+
+ Example::
+ comp_oper = oneOf("< = > <= >= !=")
+ var = Word(alphas)
+ number = Word(nums)
+ term = var | number
+ comparison_expr = term + comp_oper + term
+ print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12"))
+ prints::
+ [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']]
+ """
+ if caseless:
+ isequal = ( lambda a,b: a.upper() == b.upper() )
+ masks = ( lambda a,b: b.upper().startswith(a.upper()) )
+ parseElementClass = CaselessLiteral
+ else:
+ isequal = ( lambda a,b: a == b )
+ masks = ( lambda a,b: b.startswith(a) )
+ parseElementClass = Literal
+
+ symbols = []
+ if isinstance(strs,basestring):
+ symbols = strs.split()
+ elif isinstance(strs, Iterable):
+ symbols = list(strs)
+ else:
+ warnings.warn("Invalid argument to oneOf, expected string or iterable",
+ SyntaxWarning, stacklevel=2)
+ if not symbols:
+ return NoMatch()
+
+ i = 0
+ while i < len(symbols)-1:
+ cur = symbols[i]
+ for j,other in enumerate(symbols[i+1:]):
+ if ( isequal(other, cur) ):
+ del symbols[i+j+1]
+ break
+ elif ( masks(cur, other) ):
+ del symbols[i+j+1]
+ symbols.insert(i,other)
+ cur = other
+ break
+ else:
+ i += 1
+
+ if not caseless and useRegex:
+ #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] ))
+ try:
+ if len(symbols)==len("".join(symbols)):
+ return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols))
+ else:
+ return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols))
+ except Exception:
+ warnings.warn("Exception creating Regex for oneOf, building MatchFirst",
+ SyntaxWarning, stacklevel=2)
+
+
+ # last resort, just use MatchFirst
+ return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols))
+
+def dictOf( key, value ):
+ """
+ Helper to easily and clearly define a dictionary by specifying the respective patterns
+ for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens
+ in the proper order. The key pattern can include delimiting markers or punctuation,
+ as long as they are suppressed, thereby leaving the significant key text. The value
+ pattern can include named results, so that the C{Dict} results can include named token
+ fields.
+
+ Example::
+ text = "shape: SQUARE posn: upper left color: light blue texture: burlap"
+ attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join))
+ print(OneOrMore(attr_expr).parseString(text).dump())
+
+ attr_label = label
+ attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)
+
+ # similar to Dict, but simpler call format
+ result = dictOf(attr_label, attr_value).parseString(text)
+ print(result.dump())
+ print(result['shape'])
+ print(result.shape) # object attribute access works too
+ print(result.asDict())
+ prints::
+ [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']]
+ - color: light blue
+ - posn: upper left
+ - shape: SQUARE
+ - texture: burlap
+ SQUARE
+ SQUARE
+ {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'}
+ """
+ return Dict( ZeroOrMore( Group ( key + value ) ) )
+
+def originalTextFor(expr, asString=True):
+ """
+ Helper to return the original, untokenized text for a given expression. Useful to
+ restore the parsed fields of an HTML start tag into the raw tag text itself, or to
+ revert separate tokens with intervening whitespace back to the original matching
+ input text. By default, returns astring containing the original parsed text.
+
+ If the optional C{asString} argument is passed as C{False}, then the return value is a
+ C{L{ParseResults}} containing any results names that were originally matched, and a
+ single token containing the original matched text from the input string. So if
+ the expression passed to C{L{originalTextFor}} contains expressions with defined
+ results names, you must set C{asString} to C{False} if you want to preserve those
+ results name values.
+
+ Example::
+ src = "this is test <b> bold <i>text</i> </b> normal text "
+ for tag in ("b","i"):
+ opener,closer = makeHTMLTags(tag)
+ patt = originalTextFor(opener + SkipTo(closer) + closer)
+ print(patt.searchString(src)[0])
+ prints::
+ ['<b> bold <i>text</i> </b>']
+ ['<i>text</i>']
+ """
+ locMarker = Empty().setParseAction(lambda s,loc,t: loc)
+ endlocMarker = locMarker.copy()
+ endlocMarker.callPreparse = False
+ matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end")
+ if asString:
+ extractText = lambda s,l,t: s[t._original_start:t._original_end]
+ else:
+ def extractText(s,l,t):
+ t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]]
+ matchExpr.setParseAction(extractText)
+ matchExpr.ignoreExprs = expr.ignoreExprs
+ return matchExpr
+
+def ungroup(expr):
+ """
+ Helper to undo pyparsing's default grouping of And expressions, even
+ if all but one are non-empty.
+ """
+ return TokenConverter(expr).setParseAction(lambda t:t[0])
+
+def locatedExpr(expr):
+ """
+ Helper to decorate a returned token with its starting and ending locations in the input string.
+ This helper adds the following results names:
+ - locn_start = location where matched expression begins
+ - locn_end = location where matched expression ends
+ - value = the actual parsed results
+
+ Be careful if the input text contains C{<TAB>} characters, you may want to call
+ C{L{ParserElement.parseWithTabs}}
+
+ Example::
+ wd = Word(alphas)
+ for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"):
+ print(match)
+ prints::
+ [[0, 'ljsdf', 5]]
+ [[8, 'lksdjjf', 15]]
+ [[18, 'lkkjj', 23]]
+ """
+ locator = Empty().setParseAction(lambda s,l,t: l)
+ return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end"))
+
+
+# convenience constants for positional expressions
+empty = Empty().setName("empty")
+lineStart = LineStart().setName("lineStart")
+lineEnd = LineEnd().setName("lineEnd")
+stringStart = StringStart().setName("stringStart")
+stringEnd = StringEnd().setName("stringEnd")
+
+_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1])
+_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16)))
+_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8)))
+_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1)
+_charRange = Group(_singleChar + Suppress("-") + _singleChar)
+_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]"
+
+def srange(s):
+ r"""
+ Helper to easily define string ranges for use in Word construction. Borrows
+ syntax from regexp '[]' string range definitions::
+ srange("[0-9]") -> "0123456789"
+ srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz"
+ srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_"
+ The input string must be enclosed in []'s, and the returned string is the expanded
+ character set joined into a single string.
+ The values enclosed in the []'s may be:
+ - a single character
+ - an escaped character with a leading backslash (such as C{\-} or C{\]})
+ - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character)
+ (C{\0x##} is also supported for backwards compatibility)
+ - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character)
+ - a range of any of the above, separated by a dash (C{'a-z'}, etc.)
+ - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.)
+ """
+ _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1))
+ try:
+ return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body)
+ except Exception:
+ return ""
+
+def matchOnlyAtCol(n):
+ """
+ Helper method for defining parse actions that require matching at a specific
+ column in the input text.
+ """
+ def verifyCol(strg,locn,toks):
+ if col(locn,strg) != n:
+ raise ParseException(strg,locn,"matched token not at column %d" % n)
+ return verifyCol
+
+def replaceWith(replStr):
+ """
+ Helper method for common parse actions that simply return a literal value. Especially
+ useful when used with C{L{transformString<ParserElement.transformString>}()}.
+
+ Example::
+ num = Word(nums).setParseAction(lambda toks: int(toks[0]))
+ na = oneOf("N/A NA").setParseAction(replaceWith(math.nan))
+ term = na | num
+
+ OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234]
+ """
+ return lambda s,l,t: [replStr]
+
+def removeQuotes(s,l,t):
+ """
+ Helper parse action for removing quotation marks from parsed quoted strings.
+
+ Example::
+ # by default, quotation marks are included in parsed results
+ quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"]
+
+ # use removeQuotes to strip quotation marks from parsed results
+ quotedString.setParseAction(removeQuotes)
+ quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"]
+ """
+ return t[0][1:-1]
+
+def tokenMap(func, *args):
+ """
+ Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional
+ args are passed, they are forwarded to the given function as additional arguments after
+ the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the
+ parsed data to an integer using base 16.
+
+ Example (compare the last to example in L{ParserElement.transformString}::
+ hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16))
+ hex_ints.runTests('''
+ 00 11 22 aa FF 0a 0d 1a
+ ''')
+
+ upperword = Word(alphas).setParseAction(tokenMap(str.upper))
+ OneOrMore(upperword).runTests('''
+ my kingdom for a horse
+ ''')
+
+ wd = Word(alphas).setParseAction(tokenMap(str.title))
+ OneOrMore(wd).setParseAction(' '.join).runTests('''
+ now is the winter of our discontent made glorious summer by this sun of york
+ ''')
+ prints::
+ 00 11 22 aa FF 0a 0d 1a
+ [0, 17, 34, 170, 255, 10, 13, 26]
+
+ my kingdom for a horse
+ ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE']
+
+ now is the winter of our discontent made glorious summer by this sun of york
+ ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York']
+ """
+ def pa(s,l,t):
+ return [func(tokn, *args) for tokn in t]
+
+ try:
+ func_name = getattr(func, '__name__',
+ getattr(func, '__class__').__name__)
+ except Exception:
+ func_name = str(func)
+ pa.__name__ = func_name
+
+ return pa
+
+upcaseTokens = tokenMap(lambda t: _ustr(t).upper())
+"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}"""
+
+downcaseTokens = tokenMap(lambda t: _ustr(t).lower())
+"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}"""
+
+def _makeTags(tagStr, xml):
+ """Internal helper to construct opening and closing tag expressions, given a tag name"""
+ if isinstance(tagStr,basestring):
+ resname = tagStr
+ tagStr = Keyword(tagStr, caseless=not xml)
+ else:
+ resname = tagStr.name
+
+ tagAttrName = Word(alphas,alphanums+"_-:")
+ if (xml):
+ tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes )
+ openTag = Suppress("<") + tagStr("tag") + \
+ Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \
+ Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
+ else:
+ printablesLessRAbrack = "".join(c for c in printables if c not in ">")
+ tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack)
+ openTag = Suppress("<") + tagStr("tag") + \
+ Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \
+ Optional( Suppress("=") + tagAttrValue ) ))) + \
+ Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">")
+ closeTag = Combine(_L("</") + tagStr + ">")
+
+ openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname)
+ closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % resname)
+ openTag.tag = resname
+ closeTag.tag = resname
+ return openTag, closeTag
+
+def makeHTMLTags(tagStr):
+ """
+ Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches
+ tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values.
+
+ Example::
+ text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>'
+ # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple
+ a,a_end = makeHTMLTags("A")
+ link_expr = a + SkipTo(a_end)("link_text") + a_end
+
+ for link in link_expr.searchString(text):
+ # attributes in the <A> tag (like "href" shown here) are also accessible as named results
+ print(link.link_text, '->', link.href)
+ prints::
+ pyparsing -> http://pyparsing.wikispaces.com
+ """
+ return _makeTags( tagStr, False )
+
+def makeXMLTags(tagStr):
+ """
+ Helper to construct opening and closing tag expressions for XML, given a tag name. Matches
+ tags only in the given upper/lower case.
+
+ Example: similar to L{makeHTMLTags}
+ """
+ return _makeTags( tagStr, True )
+
+def withAttribute(*args,**attrDict):
+ """
+ Helper to create a validating parse action to be used with start tags created
+ with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag
+ with a required attribute value, to avoid false matches on common tags such as
+ C{<TD>} or C{<DIV>}.
+
+ Call C{withAttribute} with a series of attribute names and values. Specify the list
+ of filter attributes names and values as:
+ - keyword arguments, as in C{(align="right")}, or
+ - as an explicit dict with C{**} operator, when an attribute name is also a Python
+ reserved word, as in C{**{"class":"Customer", "align":"right"}}
+ - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") )
+ For attribute names with a namespace prefix, you must use the second form. Attribute
+ names are matched insensitive to upper/lower case.
+
+ If just testing for C{class} (with or without a namespace), use C{L{withClass}}.
+
+ To verify that the attribute exists, but without specifying a value, pass
+ C{withAttribute.ANY_VALUE} as the value.
+
+ Example::
+ html = '''
+ <div>
+ Some text
+ <div type="grid">1 4 0 1 0</div>
+ <div type="graph">1,3 2,3 1,1</div>
+ <div>this has no type</div>
+ </div>
+
+ '''
+ div,div_end = makeHTMLTags("div")
+
+ # only match div tag having a type attribute with value "grid"
+ div_grid = div().setParseAction(withAttribute(type="grid"))
+ grid_expr = div_grid + SkipTo(div | div_end)("body")
+ for grid_header in grid_expr.searchString(html):
+ print(grid_header.body)
+
+ # construct a match with any div tag having a type attribute, regardless of the value
+ div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE))
+ div_expr = div_any_type + SkipTo(div | div_end)("body")
+ for div_header in div_expr.searchString(html):
+ print(div_header.body)
+ prints::
+ 1 4 0 1 0
+
+ 1 4 0 1 0
+ 1,3 2,3 1,1
+ """
+ if args:
+ attrs = args[:]
+ else:
+ attrs = attrDict.items()
+ attrs = [(k,v) for k,v in attrs]
+ def pa(s,l,tokens):
+ for attrName,attrValue in attrs:
+ if attrName not in tokens:
+ raise ParseException(s,l,"no matching attribute " + attrName)
+ if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue:
+ raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" %
+ (attrName, tokens[attrName], attrValue))
+ return pa
+withAttribute.ANY_VALUE = object()
+
+def withClass(classname, namespace=''):
+ """
+ Simplified version of C{L{withAttribute}} when matching on a div class - made
+ difficult because C{class} is a reserved word in Python.
+
+ Example::
+ html = '''
+ <div>
+ Some text
+ <div class="grid">1 4 0 1 0</div>
+ <div class="graph">1,3 2,3 1,1</div>
+ <div>this <div> has no class</div>
+ </div>
+
+ '''
+ div,div_end = makeHTMLTags("div")
+ div_grid = div().setParseAction(withClass("grid"))
+
+ grid_expr = div_grid + SkipTo(div | div_end)("body")
+ for grid_header in grid_expr.searchString(html):
+ print(grid_header.body)
+
+ div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE))
+ div_expr = div_any_type + SkipTo(div | div_end)("body")
+ for div_header in div_expr.searchString(html):
+ print(div_header.body)
+ prints::
+ 1 4 0 1 0
+
+ 1 4 0 1 0
+ 1,3 2,3 1,1
+ """
+ classattr = "%s:class" % namespace if namespace else "class"
+ return withAttribute(**{classattr : classname})
+
+opAssoc = _Constants()
+opAssoc.LEFT = object()
+opAssoc.RIGHT = object()
+
+def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ):
+ """
+ Helper method for constructing grammars of expressions made up of
+ operators working in a precedence hierarchy. Operators may be unary or
+ binary, left- or right-associative. Parse actions can also be attached
+ to operator expressions. The generated parser will also recognize the use
+ of parentheses to override operator precedences (see example below).
+
+ Note: if you define a deep operator list, you may see performance issues
+ when using infixNotation. See L{ParserElement.enablePackrat} for a
+ mechanism to potentially improve your parser performance.
+
+ Parameters:
+ - baseExpr - expression representing the most basic element for the nested
+ - opList - list of tuples, one for each operator precedence level in the
+ expression grammar; each tuple is of the form
+ (opExpr, numTerms, rightLeftAssoc, parseAction), where:
+ - opExpr is the pyparsing expression for the operator;
+ may also be a string, which will be converted to a Literal;
+ if numTerms is 3, opExpr is a tuple of two expressions, for the
+ two operators separating the 3 terms
+ - numTerms is the number of terms for this operator (must
+ be 1, 2, or 3)
+ - rightLeftAssoc is the indicator whether the operator is
+ right or left associative, using the pyparsing-defined
+ constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}.
+ - parseAction is the parse action to be associated with
+ expressions matching this operator expression (the
+ parse action tuple member may be omitted); if the parse action
+ is passed a tuple or list of functions, this is equivalent to
+ calling C{setParseAction(*fn)} (L{ParserElement.setParseAction})
+ - lpar - expression for matching left-parentheses (default=C{Suppress('(')})
+ - rpar - expression for matching right-parentheses (default=C{Suppress(')')})
+
+ Example::
+ # simple example of four-function arithmetic with ints and variable names
+ integer = pyparsing_common.signed_integer
+ varname = pyparsing_common.identifier
+
+ arith_expr = infixNotation(integer | varname,
+ [
+ ('-', 1, opAssoc.RIGHT),
+ (oneOf('* /'), 2, opAssoc.LEFT),
+ (oneOf('+ -'), 2, opAssoc.LEFT),
+ ])
+
+ arith_expr.runTests('''
+ 5+3*6
+ (5+3)*6
+ -2--11
+ ''', fullDump=False)
+ prints::
+ 5+3*6
+ [[5, '+', [3, '*', 6]]]
+
+ (5+3)*6
+ [[[5, '+', 3], '*', 6]]
+
+ -2--11
+ [[['-', 2], '-', ['-', 11]]]
+ """
+ ret = Forward()
+ lastExpr = baseExpr | ( lpar + ret + rpar )
+ for i,operDef in enumerate(opList):
+ opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4]
+ termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr
+ if arity == 3:
+ if opExpr is None or len(opExpr) != 2:
+ raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions")
+ opExpr1, opExpr2 = opExpr
+ thisExpr = Forward().setName(termName)
+ if rightLeftAssoc == opAssoc.LEFT:
+ if arity == 1:
+ matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) )
+ elif arity == 2:
+ if opExpr is not None:
+ matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) )
+ else:
+ matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) )
+ elif arity == 3:
+ matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \
+ Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr )
+ else:
+ raise ValueError("operator must be unary (1), binary (2), or ternary (3)")
+ elif rightLeftAssoc == opAssoc.RIGHT:
+ if arity == 1:
+ # try to avoid LR with this extra test
+ if not isinstance(opExpr, Optional):
+ opExpr = Optional(opExpr)
+ matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr )
+ elif arity == 2:
+ if opExpr is not None:
+ matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) )
+ else:
+ matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) )
+ elif arity == 3:
+ matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \
+ Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr )
+ else:
+ raise ValueError("operator must be unary (1), binary (2), or ternary (3)")
+ else:
+ raise ValueError("operator must indicate right or left associativity")
+ if pa:
+ if isinstance(pa, (tuple, list)):
+ matchExpr.setParseAction(*pa)
+ else:
+ matchExpr.setParseAction(pa)
+ thisExpr <<= ( matchExpr.setName(termName) | lastExpr )
+ lastExpr = thisExpr
+ ret <<= lastExpr
+ return ret
+
+operatorPrecedence = infixNotation
+"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release."""
+
+dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes")
+sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes")
+quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'|
+ Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes")
+unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal")
+
+def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()):
+ """
+ Helper method for defining nested lists enclosed in opening and closing
+ delimiters ("(" and ")" are the default).
+
+ Parameters:
+ - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression
+ - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression
+ - content - expression for items within the nested lists (default=C{None})
+ - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString})
+
+ If an expression is not provided for the content argument, the nested
+ expression will capture all whitespace-delimited content between delimiters
+ as a list of separate values.
+
+ Use the C{ignoreExpr} argument to define expressions that may contain
+ opening or closing characters that should not be treated as opening
+ or closing characters for nesting, such as quotedString or a comment
+ expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}.
+ The default is L{quotedString}, but if no expressions are to be ignored,
+ then pass C{None} for this argument.
+
+ Example::
+ data_type = oneOf("void int short long char float double")
+ decl_data_type = Combine(data_type + Optional(Word('*')))
+ ident = Word(alphas+'_', alphanums+'_')
+ number = pyparsing_common.number
+ arg = Group(decl_data_type + ident)
+ LPAR,RPAR = map(Suppress, "()")
+
+ code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment))
+
+ c_function = (decl_data_type("type")
+ + ident("name")
+ + LPAR + Optional(delimitedList(arg), [])("args") + RPAR
+ + code_body("body"))
+ c_function.ignore(cStyleComment)
+
+ source_code = '''
+ int is_odd(int x) {
+ return (x%2);
+ }
+
+ int dec_to_hex(char hchar) {
+ if (hchar >= '0' && hchar <= '9') {
+ return (ord(hchar)-ord('0'));
+ } else {
+ return (10+ord(hchar)-ord('A'));
+ }
+ }
+ '''
+ for func in c_function.searchString(source_code):
+ print("%(name)s (%(type)s) args: %(args)s" % func)
+
+ prints::
+ is_odd (int) args: [['int', 'x']]
+ dec_to_hex (int) args: [['char', 'hchar']]
+ """
+ if opener == closer:
+ raise ValueError("opening and closing strings cannot be the same")
+ if content is None:
+ if isinstance(opener,basestring) and isinstance(closer,basestring):
+ if len(opener) == 1 and len(closer)==1:
+ if ignoreExpr is not None:
+ content = (Combine(OneOrMore(~ignoreExpr +
+ CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1))
+ ).setParseAction(lambda t:t[0].strip()))
+ else:
+ content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS
+ ).setParseAction(lambda t:t[0].strip()))
+ else:
+ if ignoreExpr is not None:
+ content = (Combine(OneOrMore(~ignoreExpr +
+ ~Literal(opener) + ~Literal(closer) +
+ CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1))
+ ).setParseAction(lambda t:t[0].strip()))
+ else:
+ content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) +
+ CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1))
+ ).setParseAction(lambda t:t[0].strip()))
+ else:
+ raise ValueError("opening and closing arguments must be strings if no content expression is given")
+ ret = Forward()
+ if ignoreExpr is not None:
+ ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) )
+ else:
+ ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) )
+ ret.setName('nested %s%s expression' % (opener,closer))
+ return ret
+
+def indentedBlock(blockStatementExpr, indentStack, indent=True):
+ """
+ Helper method for defining space-delimited indentation blocks, such as
+ those used to define block statements in Python source code.
+
+ Parameters:
+ - blockStatementExpr - expression defining syntax of statement that
+ is repeated within the indented block
+ - indentStack - list created by caller to manage indentation stack
+ (multiple statementWithIndentedBlock expressions within a single grammar
+ should share a common indentStack)
+ - indent - boolean indicating whether block must be indented beyond the
+ the current level; set to False for block of left-most statements
+ (default=C{True})
+
+ A valid block must contain at least one C{blockStatement}.
+
+ Example::
+ data = '''
+ def A(z):
+ A1
+ B = 100
+ G = A2
+ A2
+ A3
+ B
+ def BB(a,b,c):
+ BB1
+ def BBA():
+ bba1
+ bba2
+ bba3
+ C
+ D
+ def spam(x,y):
+ def eggs(z):
+ pass
+ '''
+
+
+ indentStack = [1]
+ stmt = Forward()
+
+ identifier = Word(alphas, alphanums)
+ funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":")
+ func_body = indentedBlock(stmt, indentStack)
+ funcDef = Group( funcDecl + func_body )
+
+ rvalue = Forward()
+ funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")")
+ rvalue << (funcCall | identifier | Word(nums))
+ assignment = Group(identifier + "=" + rvalue)
+ stmt << ( funcDef | assignment | identifier )
+
+ module_body = OneOrMore(stmt)
+
+ parseTree = module_body.parseString(data)
+ parseTree.pprint()
+ prints::
+ [['def',
+ 'A',
+ ['(', 'z', ')'],
+ ':',
+ [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]],
+ 'B',
+ ['def',
+ 'BB',
+ ['(', 'a', 'b', 'c', ')'],
+ ':',
+ [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]],
+ 'C',
+ 'D',
+ ['def',
+ 'spam',
+ ['(', 'x', 'y', ')'],
+ ':',
+ [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]]
+ """
+ def checkPeerIndent(s,l,t):
+ if l >= len(s): return
+ curCol = col(l,s)
+ if curCol != indentStack[-1]:
+ if curCol > indentStack[-1]:
+ raise ParseFatalException(s,l,"illegal nesting")
+ raise ParseException(s,l,"not a peer entry")
+
+ def checkSubIndent(s,l,t):
+ curCol = col(l,s)
+ if curCol > indentStack[-1]:
+ indentStack.append( curCol )
+ else:
+ raise ParseException(s,l,"not a subentry")
+
+ def checkUnindent(s,l,t):
+ if l >= len(s): return
+ curCol = col(l,s)
+ if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]):
+ raise ParseException(s,l,"not an unindent")
+ indentStack.pop()
+
+ NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress())
+ INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT')
+ PEER = Empty().setParseAction(checkPeerIndent).setName('')
+ UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT')
+ if indent:
+ smExpr = Group( Optional(NL) +
+ #~ FollowedBy(blockStatementExpr) +
+ INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT)
+ else:
+ smExpr = Group( Optional(NL) +
+ (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) )
+ blockStatementExpr.ignore(_bslash + LineEnd())
+ return smExpr.setName('indented block')
+
+alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]")
+punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]")
+
+anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag'))
+_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\''))
+commonHTMLEntity = Regex('&(?P<entity>' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity")
+def replaceHTMLEntity(t):
+ """Helper parser action to replace common HTML entities with their special characters"""
+ return _htmlEntityMap.get(t.entity)
+
+# it's easy to get these comment structures wrong - they're very common, so may as well make them available
+cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment")
+"Comment of the form C{/* ... */}"
+
+htmlComment = Regex(r"<!--[\s\S]*?-->").setName("HTML comment")
+"Comment of the form C{<!-- ... -->}"
+
+restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line")
+dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment")
+"Comment of the form C{// ... (to end of line)}"
+
+cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment")
+"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}"
+
+javaStyleComment = cppStyleComment
+"Same as C{L{cppStyleComment}}"
+
+pythonStyleComment = Regex(r"#.*").setName("Python style comment")
+"Comment of the form C{# ... (to end of line)}"
+
+_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') +
+ Optional( Word(" \t") +
+ ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem")
+commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList")
+"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas.
+ This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}."""
+
+# some other useful expressions - using lower-case class name since we are really using this as a namespace
+class pyparsing_common:
+ """
+ Here are some common low-level expressions that may be useful in jump-starting parser development:
+ - numeric forms (L{integers<integer>}, L{reals<real>}, L{scientific notation<sci_real>})
+ - common L{programming identifiers<identifier>}
+ - network addresses (L{MAC<mac_address>}, L{IPv4<ipv4_address>}, L{IPv6<ipv6_address>})
+ - ISO8601 L{dates<iso8601_date>} and L{datetime<iso8601_datetime>}
+ - L{UUID<uuid>}
+ - L{comma-separated list<comma_separated_list>}
+ Parse actions:
+ - C{L{convertToInteger}}
+ - C{L{convertToFloat}}
+ - C{L{convertToDate}}
+ - C{L{convertToDatetime}}
+ - C{L{stripHTMLTags}}
+ - C{L{upcaseTokens}}
+ - C{L{downcaseTokens}}
+
+ Example::
+ pyparsing_common.number.runTests('''
+ # any int or real number, returned as the appropriate type
+ 100
+ -100
+ +100
+ 3.14159
+ 6.02e23
+ 1e-12
+ ''')
+
+ pyparsing_common.fnumber.runTests('''
+ # any int or real number, returned as float
+ 100
+ -100
+ +100
+ 3.14159
+ 6.02e23
+ 1e-12
+ ''')
+
+ pyparsing_common.hex_integer.runTests('''
+ # hex numbers
+ 100
+ FF
+ ''')
+
+ pyparsing_common.fraction.runTests('''
+ # fractions
+ 1/2
+ -3/4
+ ''')
+
+ pyparsing_common.mixed_integer.runTests('''
+ # mixed fractions
+ 1
+ 1/2
+ -3/4
+ 1-3/4
+ ''')
+
+ import uuid
+ pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID))
+ pyparsing_common.uuid.runTests('''
+ # uuid
+ 12345678-1234-5678-1234-567812345678
+ ''')
+ prints::
+ # any int or real number, returned as the appropriate type
+ 100
+ [100]
+
+ -100
+ [-100]
+
+ +100
+ [100]
+
+ 3.14159
+ [3.14159]
+
+ 6.02e23
+ [6.02e+23]
+
+ 1e-12
+ [1e-12]
+
+ # any int or real number, returned as float
+ 100
+ [100.0]
+
+ -100
+ [-100.0]
+
+ +100
+ [100.0]
+
+ 3.14159
+ [3.14159]
+
+ 6.02e23
+ [6.02e+23]
+
+ 1e-12
+ [1e-12]
+
+ # hex numbers
+ 100
+ [256]
+
+ FF
+ [255]
+
+ # fractions
+ 1/2
+ [0.5]
+
+ -3/4
+ [-0.75]
+
+ # mixed fractions
+ 1
+ [1]
+
+ 1/2
+ [0.5]
+
+ -3/4
+ [-0.75]
+
+ 1-3/4
+ [1.75]
+
+ # uuid
+ 12345678-1234-5678-1234-567812345678
+ [UUID('12345678-1234-5678-1234-567812345678')]
+ """
+
+ convertToInteger = tokenMap(int)
+ """
+ Parse action for converting parsed integers to Python int
+ """
+
+ convertToFloat = tokenMap(float)
+ """
+ Parse action for converting parsed numbers to Python float
+ """
+
+ integer = Word(nums).setName("integer").setParseAction(convertToInteger)
+ """expression that parses an unsigned integer, returns an int"""
+
+ hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16))
+ """expression that parses a hexadecimal integer, returns an int"""
+
+ signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger)
+ """expression that parses an integer with optional leading sign, returns an int"""
+
+ fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction")
+ """fractional expression of an integer divided by an integer, returns a float"""
+ fraction.addParseAction(lambda t: t[0]/t[-1])
+
+ mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction")
+ """mixed integer of the form 'integer - fraction', with optional leading integer, returns float"""
+ mixed_integer.addParseAction(sum)
+
+ real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat)
+ """expression that parses a floating point number and returns a float"""
+
+ sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat)
+ """expression that parses a floating point number with optional scientific notation and returns a float"""
+
+ # streamlining this expression makes the docs nicer-looking
+ number = (sci_real | real | signed_integer).streamline()
+ """any numeric expression, returns the corresponding Python type"""
+
+ fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat)
+ """any int or real number, returned as float"""
+
+ identifier = Word(alphas+'_', alphanums+'_').setName("identifier")
+ """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')"""
+
+ ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address")
+ "IPv4 address (C{0.0.0.0 - 255.255.255.255})"
+
+ _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer")
+ _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address")
+ _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address")
+ _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8)
+ _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address")
+ ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address")
+ "IPv6 address (long, short, or mixed form)"
+
+ mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address")
+ "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)"
+
+ @staticmethod
+ def convertToDate(fmt="%Y-%m-%d"):
+ """
+ Helper to create a parse action for converting parsed date string to Python datetime.date
+
+ Params -
+ - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"})
+
+ Example::
+ date_expr = pyparsing_common.iso8601_date.copy()
+ date_expr.setParseAction(pyparsing_common.convertToDate())
+ print(date_expr.parseString("1999-12-31"))
+ prints::
+ [datetime.date(1999, 12, 31)]
+ """
+ def cvt_fn(s,l,t):
+ try:
+ return datetime.strptime(t[0], fmt).date()
+ except ValueError as ve:
+ raise ParseException(s, l, str(ve))
+ return cvt_fn
+
+ @staticmethod
+ def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"):
+ """
+ Helper to create a parse action for converting parsed datetime string to Python datetime.datetime
+
+ Params -
+ - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"})
+
+ Example::
+ dt_expr = pyparsing_common.iso8601_datetime.copy()
+ dt_expr.setParseAction(pyparsing_common.convertToDatetime())
+ print(dt_expr.parseString("1999-12-31T23:59:59.999"))
+ prints::
+ [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)]
+ """
+ def cvt_fn(s,l,t):
+ try:
+ return datetime.strptime(t[0], fmt)
+ except ValueError as ve:
+ raise ParseException(s, l, str(ve))
+ return cvt_fn
+
+ iso8601_date = Regex(r'(?P<year>\d{4})(?:-(?P<month>\d\d)(?:-(?P<day>\d\d))?)?').setName("ISO8601 date")
+ "ISO8601 date (C{yyyy-mm-dd})"
+
+ iso8601_datetime = Regex(r'(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)[T ](?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d(\.\d*)?)?)?(?P<tz>Z|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime")
+ "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}"
+
+ uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID")
+ "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})"
+
+ _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress()
+ @staticmethod
+ def stripHTMLTags(s, l, tokens):
+ """
+ Parse action to remove HTML tags from web page HTML source
+
+ Example::
+ # strip HTML links from normal text
+ text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>'
+ td,td_end = makeHTMLTags("TD")
+ table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end
+
+ print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page'
+ """
+ return pyparsing_common._html_stripper.transformString(tokens[0])
+
+ _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',')
+ + Optional( White(" \t") ) ) ).streamline().setName("commaItem")
+ comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list")
+ """Predefined expression of 1 or more printable words or quoted strings, separated by commas."""
+
+ upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper()))
+ """Parse action to convert tokens to upper case."""
+
+ downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower()))
+ """Parse action to convert tokens to lower case."""
+
+
+if __name__ == "__main__":
+
+ selectToken = CaselessLiteral("select")
+ fromToken = CaselessLiteral("from")
+
+ ident = Word(alphas, alphanums + "_$")
+
+ columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens)
+ columnNameList = Group(delimitedList(columnName)).setName("columns")
+ columnSpec = ('*' | columnNameList)
+
+ tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens)
+ tableNameList = Group(delimitedList(tableName)).setName("tables")
+
+ simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables")
+
+ # demo runTests method, including embedded comments in test string
+ simpleSQL.runTests("""
+ # '*' as column list and dotted table name
+ select * from SYS.XYZZY
+
+ # caseless match on "SELECT", and casts back to "select"
+ SELECT * from XYZZY, ABC
+
+ # list of column names, and mixed case SELECT keyword
+ Select AA,BB,CC from Sys.dual
+
+ # multiple tables
+ Select A, B, C from Sys.dual, Table2
+
+ # invalid SELECT keyword - should fail
+ Xelect A, B, C from Sys.dual
+
+ # incomplete command - should fail
+ Select
+
+ # invalid column name - should fail
+ Select ^^^ frox Sys.dual
+
+ """)
+
+ pyparsing_common.number.runTests("""
+ 100
+ -100
+ +100
+ 3.14159
+ 6.02e23
+ 1e-12
+ """)
+
+ # any int or real number, returned as float
+ pyparsing_common.fnumber.runTests("""
+ 100
+ -100
+ +100
+ 3.14159
+ 6.02e23
+ 1e-12
+ """)
+
+ pyparsing_common.hex_integer.runTests("""
+ 100
+ FF
+ """)
+
+ import uuid
+ pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID))
+ pyparsing_common.uuid.runTests("""
+ 12345678-1234-5678-1234-567812345678
+ """)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/six.py b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/six.py
new file mode 100644
index 0000000000000000000000000000000000000000..190c0239cd7d7af82a6e0cbc8d68053fa2e3dfaf
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/_vendor/six.py
@@ -0,0 +1,868 @@
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+# Copyright (c) 2010-2015 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+from __future__ import absolute_import
+
+import functools
+import itertools
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson <benjamin@python.org>"
+__version__ = "1.10.0"
+
+
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+PY34 = sys.version_info[0:2] >= (3, 4)
+
+if PY3:
+ string_types = str,
+ integer_types = int,
+ class_types = type,
+ text_type = str
+ binary_type = bytes
+
+ MAXSIZE = sys.maxsize
+else:
+ string_types = basestring,
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+ if sys.platform.startswith("java"):
+ # Jython always uses 32 bits.
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+ class X(object):
+
+ def __len__(self):
+ return 1 << 31
+ try:
+ len(X())
+ except OverflowError:
+ # 32-bit
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # 64-bit
+ MAXSIZE = int((1 << 63) - 1)
+ del X
+
+
+def _add_doc(func, doc):
+ """Add documentation to a function."""
+ func.__doc__ = doc
+
+
+def _import_module(name):
+ """Import module, returning the module after the last dot."""
+ __import__(name)
+ return sys.modules[name]
+
+
+class _LazyDescr(object):
+
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, tp):
+ result = self._resolve()
+ setattr(obj, self.name, result) # Invokes __set__.
+ try:
+ # This is a bit ugly, but it avoids running this again by
+ # removing this descriptor.
+ delattr(obj.__class__, self.name)
+ except AttributeError:
+ pass
+ return result
+
+
+class MovedModule(_LazyDescr):
+
+ def __init__(self, name, old, new=None):
+ super(MovedModule, self).__init__(name)
+ if PY3:
+ if new is None:
+ new = name
+ self.mod = new
+ else:
+ self.mod = old
+
+ def _resolve(self):
+ return _import_module(self.mod)
+
+ def __getattr__(self, attr):
+ _module = self._resolve()
+ value = getattr(_module, attr)
+ setattr(self, attr, value)
+ return value
+
+
+class _LazyModule(types.ModuleType):
+
+ def __init__(self, name):
+ super(_LazyModule, self).__init__(name)
+ self.__doc__ = self.__class__.__doc__
+
+ def __dir__(self):
+ attrs = ["__doc__", "__name__"]
+ attrs += [attr.name for attr in self._moved_attributes]
+ return attrs
+
+ # Subclasses should override this
+ _moved_attributes = []
+
+
+class MovedAttribute(_LazyDescr):
+
+ def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+ super(MovedAttribute, self).__init__(name)
+ if PY3:
+ if new_mod is None:
+ new_mod = name
+ self.mod = new_mod
+ if new_attr is None:
+ if old_attr is None:
+ new_attr = name
+ else:
+ new_attr = old_attr
+ self.attr = new_attr
+ else:
+ self.mod = old_mod
+ if old_attr is None:
+ old_attr = name
+ self.attr = old_attr
+
+ def _resolve(self):
+ module = _import_module(self.mod)
+ return getattr(module, self.attr)
+
+
+class _SixMetaPathImporter(object):
+
+ """
+ A meta path importer to import six.moves and its submodules.
+
+ This class implements a PEP302 finder and loader. It should be compatible
+ with Python 2.5 and all existing versions of Python3
+ """
+
+ def __init__(self, six_module_name):
+ self.name = six_module_name
+ self.known_modules = {}
+
+ def _add_module(self, mod, *fullnames):
+ for fullname in fullnames:
+ self.known_modules[self.name + "." + fullname] = mod
+
+ def _get_module(self, fullname):
+ return self.known_modules[self.name + "." + fullname]
+
+ def find_module(self, fullname, path=None):
+ if fullname in self.known_modules:
+ return self
+ return None
+
+ def __get_module(self, fullname):
+ try:
+ return self.known_modules[fullname]
+ except KeyError:
+ raise ImportError("This loader does not know module " + fullname)
+
+ def load_module(self, fullname):
+ try:
+ # in case of a reload
+ return sys.modules[fullname]
+ except KeyError:
+ pass
+ mod = self.__get_module(fullname)
+ if isinstance(mod, MovedModule):
+ mod = mod._resolve()
+ else:
+ mod.__loader__ = self
+ sys.modules[fullname] = mod
+ return mod
+
+ def is_package(self, fullname):
+ """
+ Return true, if the named module is a package.
+
+ We need this method to get correct spec objects with
+ Python 3.4 (see PEP451)
+ """
+ return hasattr(self.__get_module(fullname), "__path__")
+
+ def get_code(self, fullname):
+ """Return None
+
+ Required, if is_package is implemented"""
+ self.__get_module(fullname) # eventually raises ImportError
+ return None
+ get_source = get_code # same as get_code
+
+_importer = _SixMetaPathImporter(__name__)
+
+
+class _MovedItems(_LazyModule):
+
+ """Lazy loading of moved objects"""
+ __path__ = [] # mark as package
+
+
+_moved_attributes = [
+ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+ MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
+ MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+ MovedAttribute("intern", "__builtin__", "sys"),
+ MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
+ MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
+ MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
+ MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
+ MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("UserDict", "UserDict", "collections"),
+ MovedAttribute("UserList", "UserList", "collections"),
+ MovedAttribute("UserString", "UserString", "collections"),
+ MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+ MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
+ MovedModule("builtins", "__builtin__"),
+ MovedModule("configparser", "ConfigParser"),
+ MovedModule("copyreg", "copy_reg"),
+ MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+ MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
+ MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+ MovedModule("http_cookies", "Cookie", "http.cookies"),
+ MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+ MovedModule("html_parser", "HTMLParser", "html.parser"),
+ MovedModule("http_client", "httplib", "http.client"),
+ MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+ MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
+ MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
+ MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+ MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+ MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+ MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+ MovedModule("cPickle", "cPickle", "pickle"),
+ MovedModule("queue", "Queue"),
+ MovedModule("reprlib", "repr"),
+ MovedModule("socketserver", "SocketServer"),
+ MovedModule("_thread", "thread", "_thread"),
+ MovedModule("tkinter", "Tkinter"),
+ MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+ MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+ MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
+ MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+ MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+ MovedModule("tkinter_colorchooser", "tkColorChooser",
+ "tkinter.colorchooser"),
+ MovedModule("tkinter_commondialog", "tkCommonDialog",
+ "tkinter.commondialog"),
+ MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+ MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
+ "tkinter.simpledialog"),
+ MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+ MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+ MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
+ MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+ MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+ MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
+]
+# Add windows specific modules.
+if sys.platform == "win32":
+ _moved_attributes += [
+ MovedModule("winreg", "_winreg"),
+ ]
+
+for attr in _moved_attributes:
+ setattr(_MovedItems, attr.name, attr)
+ if isinstance(attr, MovedModule):
+ _importer._add_module(attr, "moves." + attr.name)
+del attr
+
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = _MovedItems(__name__ + ".moves")
+_importer._add_module(moves, "moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+ MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+ MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+ MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+ MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+ MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("quote", "urllib", "urllib.parse"),
+ MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("urlencode", "urllib", "urllib.parse"),
+ MovedAttribute("splitquery", "urllib", "urllib.parse"),
+ MovedAttribute("splittag", "urllib", "urllib.parse"),
+ MovedAttribute("splituser", "urllib", "urllib.parse"),
+ MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_params", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_query", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+ setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
+ "moves.urllib_parse", "moves.urllib.parse")
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+ MovedAttribute("URLError", "urllib2", "urllib.error"),
+ MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+ MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+ setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
+ "moves.urllib_error", "moves.urllib.error")
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+ MovedAttribute("urlopen", "urllib2", "urllib.request"),
+ MovedAttribute("install_opener", "urllib2", "urllib.request"),
+ MovedAttribute("build_opener", "urllib2", "urllib.request"),
+ MovedAttribute("pathname2url", "urllib", "urllib.request"),
+ MovedAttribute("url2pathname", "urllib", "urllib.request"),
+ MovedAttribute("getproxies", "urllib", "urllib.request"),
+ MovedAttribute("Request", "urllib2", "urllib.request"),
+ MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+ MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+ MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+ MovedAttribute("URLopener", "urllib", "urllib.request"),
+ MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+ MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+ setattr(Module_six_moves_urllib_request, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
+ "moves.urllib_request", "moves.urllib.request")
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+ MovedAttribute("addbase", "urllib", "urllib.response"),
+ MovedAttribute("addclosehook", "urllib", "urllib.response"),
+ MovedAttribute("addinfo", "urllib", "urllib.response"),
+ MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+ setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
+ "moves.urllib_response", "moves.urllib.response")
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+]
+for attr in _urllib_robotparser_moved_attributes:
+ setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
+ "moves.urllib_robotparser", "moves.urllib.robotparser")
+
+
+class Module_six_moves_urllib(types.ModuleType):
+
+ """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+ __path__ = [] # mark as package
+ parse = _importer._get_module("moves.urllib_parse")
+ error = _importer._get_module("moves.urllib_error")
+ request = _importer._get_module("moves.urllib_request")
+ response = _importer._get_module("moves.urllib_response")
+ robotparser = _importer._get_module("moves.urllib_robotparser")
+
+ def __dir__(self):
+ return ['parse', 'error', 'request', 'response', 'robotparser']
+
+_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
+ "moves.urllib")
+
+
+def add_move(move):
+ """Add an item to six.moves."""
+ setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+ """Remove item from six.moves."""
+ try:
+ delattr(_MovedItems, name)
+ except AttributeError:
+ try:
+ del moves.__dict__[name]
+ except KeyError:
+ raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+ _meth_func = "__func__"
+ _meth_self = "__self__"
+
+ _func_closure = "__closure__"
+ _func_code = "__code__"
+ _func_defaults = "__defaults__"
+ _func_globals = "__globals__"
+else:
+ _meth_func = "im_func"
+ _meth_self = "im_self"
+
+ _func_closure = "func_closure"
+ _func_code = "func_code"
+ _func_defaults = "func_defaults"
+ _func_globals = "func_globals"
+
+
+try:
+ advance_iterator = next
+except NameError:
+ def advance_iterator(it):
+ return it.next()
+next = advance_iterator
+
+
+try:
+ callable = callable
+except NameError:
+ def callable(obj):
+ return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+
+
+if PY3:
+ def get_unbound_function(unbound):
+ return unbound
+
+ create_bound_method = types.MethodType
+
+ def create_unbound_method(func, cls):
+ return func
+
+ Iterator = object
+else:
+ def get_unbound_function(unbound):
+ return unbound.im_func
+
+ def create_bound_method(func, obj):
+ return types.MethodType(func, obj, obj.__class__)
+
+ def create_unbound_method(func, cls):
+ return types.MethodType(func, None, cls)
+
+ class Iterator(object):
+
+ def next(self):
+ return type(self).__next__(self)
+
+ callable = callable
+_add_doc(get_unbound_function,
+ """Get the function out of a possibly unbound function""")
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_closure = operator.attrgetter(_func_closure)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+get_function_globals = operator.attrgetter(_func_globals)
+
+
+if PY3:
+ def iterkeys(d, **kw):
+ return iter(d.keys(**kw))
+
+ def itervalues(d, **kw):
+ return iter(d.values(**kw))
+
+ def iteritems(d, **kw):
+ return iter(d.items(**kw))
+
+ def iterlists(d, **kw):
+ return iter(d.lists(**kw))
+
+ viewkeys = operator.methodcaller("keys")
+
+ viewvalues = operator.methodcaller("values")
+
+ viewitems = operator.methodcaller("items")
+else:
+ def iterkeys(d, **kw):
+ return d.iterkeys(**kw)
+
+ def itervalues(d, **kw):
+ return d.itervalues(**kw)
+
+ def iteritems(d, **kw):
+ return d.iteritems(**kw)
+
+ def iterlists(d, **kw):
+ return d.iterlists(**kw)
+
+ viewkeys = operator.methodcaller("viewkeys")
+
+ viewvalues = operator.methodcaller("viewvalues")
+
+ viewitems = operator.methodcaller("viewitems")
+
+_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
+_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
+_add_doc(iteritems,
+ "Return an iterator over the (key, value) pairs of a dictionary.")
+_add_doc(iterlists,
+ "Return an iterator over the (key, [values]) pairs of a dictionary.")
+
+
+if PY3:
+ def b(s):
+ return s.encode("latin-1")
+
+ def u(s):
+ return s
+ unichr = chr
+ import struct
+ int2byte = struct.Struct(">B").pack
+ del struct
+ byte2int = operator.itemgetter(0)
+ indexbytes = operator.getitem
+ iterbytes = iter
+ import io
+ StringIO = io.StringIO
+ BytesIO = io.BytesIO
+ _assertCountEqual = "assertCountEqual"
+ if sys.version_info[1] <= 1:
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ else:
+ _assertRaisesRegex = "assertRaisesRegex"
+ _assertRegex = "assertRegex"
+else:
+ def b(s):
+ return s
+ # Workaround for standalone backslash
+
+ def u(s):
+ return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
+ unichr = unichr
+ int2byte = chr
+
+ def byte2int(bs):
+ return ord(bs[0])
+
+ def indexbytes(buf, i):
+ return ord(buf[i])
+ iterbytes = functools.partial(itertools.imap, ord)
+ import StringIO
+ StringIO = BytesIO = StringIO.StringIO
+ _assertCountEqual = "assertItemsEqual"
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+def assertCountEqual(self, *args, **kwargs):
+ return getattr(self, _assertCountEqual)(*args, **kwargs)
+
+
+def assertRaisesRegex(self, *args, **kwargs):
+ return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+ return getattr(self, _assertRegex)(*args, **kwargs)
+
+
+if PY3:
+ exec_ = getattr(moves.builtins, "exec")
+
+ def reraise(tp, value, tb=None):
+ if value is None:
+ value = tp()
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+
+else:
+ def exec_(_code_, _globs_=None, _locs_=None):
+ """Execute code in a namespace."""
+ if _globs_ is None:
+ frame = sys._getframe(1)
+ _globs_ = frame.f_globals
+ if _locs_ is None:
+ _locs_ = frame.f_locals
+ del frame
+ elif _locs_ is None:
+ _locs_ = _globs_
+ exec("""exec _code_ in _globs_, _locs_""")
+
+ exec_("""def reraise(tp, value, tb=None):
+ raise tp, value, tb
+""")
+
+
+if sys.version_info[:2] == (3, 2):
+ exec_("""def raise_from(value, from_value):
+ if from_value is None:
+ raise value
+ raise value from from_value
+""")
+elif sys.version_info[:2] > (3, 2):
+ exec_("""def raise_from(value, from_value):
+ raise value from from_value
+""")
+else:
+ def raise_from(value, from_value):
+ raise value
+
+
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
+ def print_(*args, **kwargs):
+ """The new-style print function for Python 2.4 and 2.5."""
+ fp = kwargs.pop("file", sys.stdout)
+ if fp is None:
+ return
+
+ def write(data):
+ if not isinstance(data, basestring):
+ data = str(data)
+ # If the file has an encoding, encode unicode with it.
+ if (isinstance(fp, file) and
+ isinstance(data, unicode) and
+ fp.encoding is not None):
+ errors = getattr(fp, "errors", None)
+ if errors is None:
+ errors = "strict"
+ data = data.encode(fp.encoding, errors)
+ fp.write(data)
+ want_unicode = False
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if isinstance(sep, unicode):
+ want_unicode = True
+ elif not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if isinstance(end, unicode):
+ want_unicode = True
+ elif not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+ if kwargs:
+ raise TypeError("invalid keyword arguments to print()")
+ if not want_unicode:
+ for arg in args:
+ if isinstance(arg, unicode):
+ want_unicode = True
+ break
+ if want_unicode:
+ newline = unicode("\n")
+ space = unicode(" ")
+ else:
+ newline = "\n"
+ space = " "
+ if sep is None:
+ sep = space
+ if end is None:
+ end = newline
+ for i, arg in enumerate(args):
+ if i:
+ write(sep)
+ write(arg)
+ write(end)
+if sys.version_info[:2] < (3, 3):
+ _print = print_
+
+ def print_(*args, **kwargs):
+ fp = kwargs.get("file", sys.stdout)
+ flush = kwargs.pop("flush", False)
+ _print(*args, **kwargs)
+ if flush and fp is not None:
+ fp.flush()
+
+_add_doc(reraise, """Reraise an exception.""")
+
+if sys.version_info[0:2] < (3, 4):
+ def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES):
+ def wrapper(f):
+ f = functools.wraps(wrapped, assigned, updated)(f)
+ f.__wrapped__ = wrapped
+ return f
+ return wrapper
+else:
+ wraps = functools.wraps
+
+
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass."""
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
+ class metaclass(meta):
+
+ def __new__(cls, name, this_bases, d):
+ return meta(name, bases, d)
+ return type.__new__(metaclass, 'temporary_class', (), {})
+
+
+def add_metaclass(metaclass):
+ """Class decorator for creating a class with a metaclass."""
+ def wrapper(cls):
+ orig_vars = cls.__dict__.copy()
+ slots = orig_vars.get('__slots__')
+ if slots is not None:
+ if isinstance(slots, str):
+ slots = [slots]
+ for slots_var in slots:
+ orig_vars.pop(slots_var)
+ orig_vars.pop('__dict__', None)
+ orig_vars.pop('__weakref__', None)
+ return metaclass(cls.__name__, cls.__bases__, orig_vars)
+ return wrapper
+
+
+def python_2_unicode_compatible(klass):
+ """
+ A decorator that defines __unicode__ and __str__ methods under Python 2.
+ Under Python 3 it does nothing.
+
+ To support Python 2 and 3 with a single code base, define a __str__ method
+ returning text and apply this decorator to the class.
+ """
+ if PY2:
+ if '__str__' not in klass.__dict__:
+ raise ValueError("@python_2_unicode_compatible cannot be applied "
+ "to %s because it doesn't define __str__()." %
+ klass.__name__)
+ klass.__unicode__ = klass.__str__
+ klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
+ return klass
+
+
+# Complete the moves implementation.
+# This code is at the end of this module to speed up module loading.
+# Turn this module into a package.
+__path__ = [] # required for PEP 302 and PEP 451
+__package__ = __name__ # see PEP 366 @ReservedAssignment
+if globals().get("__spec__") is not None:
+ __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
+# Remove other six meta path importers, since they cause problems. This can
+# happen if six is removed from sys.modules and then reloaded. (Setuptools does
+# this for some reason.)
+if sys.meta_path:
+ for i, importer in enumerate(sys.meta_path):
+ # Here's some real nastiness: Another "instance" of the six module might
+ # be floating around. Therefore, we can't use isinstance() to check for
+ # the six meta path importer, since the other six instance will have
+ # inserted an importer with different class.
+ if (type(importer).__name__ == "_SixMetaPathImporter" and
+ importer.name == __name__):
+ del sys.meta_path[i]
+ break
+ del i, importer
+# Finally, add the importer to the meta path import hook.
+sys.meta_path.append(_importer)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/archive_util.py b/monEnvTP/lib/python3.8/site-packages/setuptools/archive_util.py
new file mode 100644
index 0000000000000000000000000000000000000000..81436044d995ff430334a7ef324b08e616f4b7a7
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/archive_util.py
@@ -0,0 +1,173 @@
+"""Utilities for extracting common archive formats"""
+
+import zipfile
+import tarfile
+import os
+import shutil
+import posixpath
+import contextlib
+from distutils.errors import DistutilsError
+
+from pkg_resources import ensure_directory
+
+__all__ = [
+ "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
+ "UnrecognizedFormat", "extraction_drivers", "unpack_directory",
+]
+
+
+class UnrecognizedFormat(DistutilsError):
+ """Couldn't recognize the archive type"""
+
+
+def default_filter(src, dst):
+ """The default progress/filter callback; returns True for all files"""
+ return dst
+
+
+def unpack_archive(filename, extract_dir, progress_filter=default_filter,
+ drivers=None):
+ """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
+
+ `progress_filter` is a function taking two arguments: a source path
+ internal to the archive ('/'-separated), and a filesystem path where it
+ will be extracted. The callback must return the desired extract path
+ (which may be the same as the one passed in), or else ``None`` to skip
+ that file or directory. The callback can thus be used to report on the
+ progress of the extraction, as well as to filter the items extracted or
+ alter their extraction paths.
+
+ `drivers`, if supplied, must be a non-empty sequence of functions with the
+ same signature as this function (minus the `drivers` argument), that raise
+ ``UnrecognizedFormat`` if they do not support extracting the designated
+ archive type. The `drivers` are tried in sequence until one is found that
+ does not raise an error, or until all are exhausted (in which case
+ ``UnrecognizedFormat`` is raised). If you do not supply a sequence of
+ drivers, the module's ``extraction_drivers`` constant will be used, which
+ means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
+ order.
+ """
+ for driver in drivers or extraction_drivers:
+ try:
+ driver(filename, extract_dir, progress_filter)
+ except UnrecognizedFormat:
+ continue
+ else:
+ return
+ else:
+ raise UnrecognizedFormat(
+ "Not a recognized archive type: %s" % filename
+ )
+
+
+def unpack_directory(filename, extract_dir, progress_filter=default_filter):
+ """"Unpack" a directory, using the same interface as for archives
+
+ Raises ``UnrecognizedFormat`` if `filename` is not a directory
+ """
+ if not os.path.isdir(filename):
+ raise UnrecognizedFormat("%s is not a directory" % filename)
+
+ paths = {
+ filename: ('', extract_dir),
+ }
+ for base, dirs, files in os.walk(filename):
+ src, dst = paths[base]
+ for d in dirs:
+ paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d)
+ for f in files:
+ target = os.path.join(dst, f)
+ target = progress_filter(src + f, target)
+ if not target:
+ # skip non-files
+ continue
+ ensure_directory(target)
+ f = os.path.join(base, f)
+ shutil.copyfile(f, target)
+ shutil.copystat(f, target)
+
+
+def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
+ """Unpack zip `filename` to `extract_dir`
+
+ Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
+ by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation
+ of the `progress_filter` argument.
+ """
+
+ if not zipfile.is_zipfile(filename):
+ raise UnrecognizedFormat("%s is not a zip file" % (filename,))
+
+ with zipfile.ZipFile(filename) as z:
+ for info in z.infolist():
+ name = info.filename
+
+ # don't extract absolute paths or ones with .. in them
+ if name.startswith('/') or '..' in name.split('/'):
+ continue
+
+ target = os.path.join(extract_dir, *name.split('/'))
+ target = progress_filter(name, target)
+ if not target:
+ continue
+ if name.endswith('/'):
+ # directory
+ ensure_directory(target)
+ else:
+ # file
+ ensure_directory(target)
+ data = z.read(info.filename)
+ with open(target, 'wb') as f:
+ f.write(data)
+ unix_attributes = info.external_attr >> 16
+ if unix_attributes:
+ os.chmod(target, unix_attributes)
+
+
+def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
+ """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
+
+ Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
+ by ``tarfile.open()``). See ``unpack_archive()`` for an explanation
+ of the `progress_filter` argument.
+ """
+ try:
+ tarobj = tarfile.open(filename)
+ except tarfile.TarError:
+ raise UnrecognizedFormat(
+ "%s is not a compressed or uncompressed tar file" % (filename,)
+ )
+ with contextlib.closing(tarobj):
+ # don't do any chowning!
+ tarobj.chown = lambda *args: None
+ for member in tarobj:
+ name = member.name
+ # don't extract absolute paths or ones with .. in them
+ if not name.startswith('/') and '..' not in name.split('/'):
+ prelim_dst = os.path.join(extract_dir, *name.split('/'))
+
+ # resolve any links and to extract the link targets as normal
+ # files
+ while member is not None and (member.islnk() or member.issym()):
+ linkpath = member.linkname
+ if member.issym():
+ base = posixpath.dirname(member.name)
+ linkpath = posixpath.join(base, linkpath)
+ linkpath = posixpath.normpath(linkpath)
+ member = tarobj._getmember(linkpath)
+
+ if member is not None and (member.isfile() or member.isdir()):
+ final_dst = progress_filter(name, prelim_dst)
+ if final_dst:
+ if final_dst.endswith(os.sep):
+ final_dst = final_dst[:-1]
+ try:
+ # XXX Ugh
+ tarobj._extract_member(member, final_dst)
+ except tarfile.ExtractError:
+ # chown/chmod/mkfifo/mknode/makedev failed
+ pass
+ return True
+
+
+extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/build_meta.py b/monEnvTP/lib/python3.8/site-packages/setuptools/build_meta.py
new file mode 100644
index 0000000000000000000000000000000000000000..10c4b528d996d23a1319e3fed755aef0e6da2eb9
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/build_meta.py
@@ -0,0 +1,257 @@
+"""A PEP 517 interface to setuptools
+
+Previously, when a user or a command line tool (let's call it a "frontend")
+needed to make a request of setuptools to take a certain action, for
+example, generating a list of installation requirements, the frontend would
+would call "setup.py egg_info" or "setup.py bdist_wheel" on the command line.
+
+PEP 517 defines a different method of interfacing with setuptools. Rather
+than calling "setup.py" directly, the frontend should:
+
+ 1. Set the current directory to the directory with a setup.py file
+ 2. Import this module into a safe python interpreter (one in which
+ setuptools can potentially set global variables or crash hard).
+ 3. Call one of the functions defined in PEP 517.
+
+What each function does is defined in PEP 517. However, here is a "casual"
+definition of the functions (this definition should not be relied on for
+bug reports or API stability):
+
+ - `build_wheel`: build a wheel in the folder and return the basename
+ - `get_requires_for_build_wheel`: get the `setup_requires` to build
+ - `prepare_metadata_for_build_wheel`: get the `install_requires`
+ - `build_sdist`: build an sdist in the folder and return the basename
+ - `get_requires_for_build_sdist`: get the `setup_requires` to build
+
+Again, this is not a formal definition! Just a "taste" of the module.
+"""
+
+import io
+import os
+import sys
+import tokenize
+import shutil
+import contextlib
+
+import setuptools
+import distutils
+from setuptools.py31compat import TemporaryDirectory
+
+from pkg_resources import parse_requirements
+from pkg_resources.py31compat import makedirs
+
+__all__ = ['get_requires_for_build_sdist',
+ 'get_requires_for_build_wheel',
+ 'prepare_metadata_for_build_wheel',
+ 'build_wheel',
+ 'build_sdist',
+ '__legacy__',
+ 'SetupRequirementsError']
+
+class SetupRequirementsError(BaseException):
+ def __init__(self, specifiers):
+ self.specifiers = specifiers
+
+
+class Distribution(setuptools.dist.Distribution):
+ def fetch_build_eggs(self, specifiers):
+ specifier_list = list(map(str, parse_requirements(specifiers)))
+
+ raise SetupRequirementsError(specifier_list)
+
+ @classmethod
+ @contextlib.contextmanager
+ def patch(cls):
+ """
+ Replace
+ distutils.dist.Distribution with this class
+ for the duration of this context.
+ """
+ orig = distutils.core.Distribution
+ distutils.core.Distribution = cls
+ try:
+ yield
+ finally:
+ distutils.core.Distribution = orig
+
+
+def _to_str(s):
+ """
+ Convert a filename to a string (on Python 2, explicitly
+ a byte string, not Unicode) as distutils checks for the
+ exact type str.
+ """
+ if sys.version_info[0] == 2 and not isinstance(s, str):
+ # Assume it's Unicode, as that's what the PEP says
+ # should be provided.
+ return s.encode(sys.getfilesystemencoding())
+ return s
+
+
+def _get_immediate_subdirectories(a_dir):
+ return [name for name in os.listdir(a_dir)
+ if os.path.isdir(os.path.join(a_dir, name))]
+
+
+def _file_with_extension(directory, extension):
+ matching = (
+ f for f in os.listdir(directory)
+ if f.endswith(extension)
+ )
+ file, = matching
+ return file
+
+
+def _open_setup_script(setup_script):
+ if not os.path.exists(setup_script):
+ # Supply a default setup.py
+ return io.StringIO(u"from setuptools import setup; setup()")
+
+ return getattr(tokenize, 'open', open)(setup_script)
+
+
+class _BuildMetaBackend(object):
+
+ def _fix_config(self, config_settings):
+ config_settings = config_settings or {}
+ config_settings.setdefault('--global-option', [])
+ return config_settings
+
+ def _get_build_requires(self, config_settings, requirements):
+ config_settings = self._fix_config(config_settings)
+
+ sys.argv = sys.argv[:1] + ['egg_info'] + \
+ config_settings["--global-option"]
+ try:
+ with Distribution.patch():
+ self.run_setup()
+ except SetupRequirementsError as e:
+ requirements += e.specifiers
+
+ return requirements
+
+ def run_setup(self, setup_script='setup.py'):
+ # Note that we can reuse our build directory between calls
+ # Correctness comes first, then optimization later
+ __file__ = setup_script
+ __name__ = '__main__'
+
+ with _open_setup_script(__file__) as f:
+ code = f.read().replace(r'\r\n', r'\n')
+
+ exec(compile(code, __file__, 'exec'), locals())
+
+ def get_requires_for_build_wheel(self, config_settings=None):
+ config_settings = self._fix_config(config_settings)
+ return self._get_build_requires(config_settings, requirements=['wheel'])
+
+ def get_requires_for_build_sdist(self, config_settings=None):
+ config_settings = self._fix_config(config_settings)
+ return self._get_build_requires(config_settings, requirements=[])
+
+ def prepare_metadata_for_build_wheel(self, metadata_directory,
+ config_settings=None):
+ sys.argv = sys.argv[:1] + ['dist_info', '--egg-base',
+ _to_str(metadata_directory)]
+ self.run_setup()
+
+ dist_info_directory = metadata_directory
+ while True:
+ dist_infos = [f for f in os.listdir(dist_info_directory)
+ if f.endswith('.dist-info')]
+
+ if (len(dist_infos) == 0 and
+ len(_get_immediate_subdirectories(dist_info_directory)) == 1):
+
+ dist_info_directory = os.path.join(
+ dist_info_directory, os.listdir(dist_info_directory)[0])
+ continue
+
+ assert len(dist_infos) == 1
+ break
+
+ # PEP 517 requires that the .dist-info directory be placed in the
+ # metadata_directory. To comply, we MUST copy the directory to the root
+ if dist_info_directory != metadata_directory:
+ shutil.move(
+ os.path.join(dist_info_directory, dist_infos[0]),
+ metadata_directory)
+ shutil.rmtree(dist_info_directory, ignore_errors=True)
+
+ return dist_infos[0]
+
+ def _build_with_temp_dir(self, setup_command, result_extension,
+ result_directory, config_settings):
+ config_settings = self._fix_config(config_settings)
+ result_directory = os.path.abspath(result_directory)
+
+ # Build in a temporary directory, then copy to the target.
+ makedirs(result_directory, exist_ok=True)
+ with TemporaryDirectory(dir=result_directory) as tmp_dist_dir:
+ sys.argv = (sys.argv[:1] + setup_command +
+ ['--dist-dir', tmp_dist_dir] +
+ config_settings["--global-option"])
+ self.run_setup()
+
+ result_basename = _file_with_extension(tmp_dist_dir, result_extension)
+ result_path = os.path.join(result_directory, result_basename)
+ if os.path.exists(result_path):
+ # os.rename will fail overwriting on non-Unix.
+ os.remove(result_path)
+ os.rename(os.path.join(tmp_dist_dir, result_basename), result_path)
+
+ return result_basename
+
+
+ def build_wheel(self, wheel_directory, config_settings=None,
+ metadata_directory=None):
+ return self._build_with_temp_dir(['bdist_wheel'], '.whl',
+ wheel_directory, config_settings)
+
+ def build_sdist(self, sdist_directory, config_settings=None):
+ return self._build_with_temp_dir(['sdist', '--formats', 'gztar'],
+ '.tar.gz', sdist_directory,
+ config_settings)
+
+
+class _BuildMetaLegacyBackend(_BuildMetaBackend):
+ """Compatibility backend for setuptools
+
+ This is a version of setuptools.build_meta that endeavors to maintain backwards
+ compatibility with pre-PEP 517 modes of invocation. It exists as a temporary
+ bridge between the old packaging mechanism and the new packaging mechanism,
+ and will eventually be removed.
+ """
+ def run_setup(self, setup_script='setup.py'):
+ # In order to maintain compatibility with scripts assuming that
+ # the setup.py script is in a directory on the PYTHONPATH, inject
+ # '' into sys.path. (pypa/setuptools#1642)
+ sys_path = list(sys.path) # Save the original path
+
+ script_dir = os.path.dirname(os.path.abspath(setup_script))
+ if script_dir not in sys.path:
+ sys.path.insert(0, script_dir)
+
+ try:
+ super(_BuildMetaLegacyBackend,
+ self).run_setup(setup_script=setup_script)
+ finally:
+ # While PEP 517 frontends should be calling each hook in a fresh
+ # subprocess according to the standard (and thus it should not be
+ # strictly necessary to restore the old sys.path), we'll restore
+ # the original path so that the path manipulation does not persist
+ # within the hook after run_setup is called.
+ sys.path[:] = sys_path
+
+# The primary backend
+_BACKEND = _BuildMetaBackend()
+
+get_requires_for_build_wheel = _BACKEND.get_requires_for_build_wheel
+get_requires_for_build_sdist = _BACKEND.get_requires_for_build_sdist
+prepare_metadata_for_build_wheel = _BACKEND.prepare_metadata_for_build_wheel
+build_wheel = _BACKEND.build_wheel
+build_sdist = _BACKEND.build_sdist
+
+
+# The legacy backend
+__legacy__ = _BuildMetaLegacyBackend()
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/cli-32.exe b/monEnvTP/lib/python3.8/site-packages/setuptools/cli-32.exe
new file mode 100644
index 0000000000000000000000000000000000000000..b1487b7819e7286577a043c7726fbe0ca1543083
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/cli-32.exe differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/cli-64.exe b/monEnvTP/lib/python3.8/site-packages/setuptools/cli-64.exe
new file mode 100644
index 0000000000000000000000000000000000000000..675e6bf3743f3d3011c238657e7128ee9960ef7f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/cli-64.exe differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/cli.exe b/monEnvTP/lib/python3.8/site-packages/setuptools/cli.exe
new file mode 100644
index 0000000000000000000000000000000000000000..b1487b7819e7286577a043c7726fbe0ca1543083
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/cli.exe differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__init__.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..743f5588faf3ad79850df7bd196749e7a6c03f93
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__init__.py
@@ -0,0 +1,17 @@
+__all__ = [
+ 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop',
+ 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts',
+ 'sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts',
+ 'bdist_wininst', 'upload_docs', 'build_clib', 'dist_info',
+]
+
+from distutils.command.bdist import bdist
+import sys
+
+from setuptools.command import install_scripts
+
+if 'egg' not in bdist.format_commands:
+ bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
+ bdist.format_commands.append('egg')
+
+del bdist, sys
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fdb34567426024940ec2288bc1eda8bb47204b7f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/alias.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/alias.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e54a81cd4805284a36520fd2af9d8bd3966e419c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/alias.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..5be692f9d1dd722891c297a36172788d6d519047
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/bdist_rpm.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/bdist_rpm.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7418aec26a2fbc39f5d40fe9cf96d4f66893b414
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/bdist_rpm.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/bdist_wininst.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/bdist_wininst.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c14e362110d3bc1dc6aaac223886f44431fadb35
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/bdist_wininst.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/build_clib.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/build_clib.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d31a351a8649a142a02f15cd8af65b4bc541c546
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/build_clib.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/build_ext.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/build_ext.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ff933fd4674f657b2639c0fae14abb01195d1891
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/build_ext.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/build_py.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/build_py.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e8868f96bd7c5957ca27010cc3692f65f4ba8edb
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/build_py.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/develop.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/develop.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8ec169bb7bf2ed342478be02fb7540e2a1378ab2
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/develop.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/dist_info.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/dist_info.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6bbb8a626414944916e24ae5a9f36ccd1b85300d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/dist_info.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/easy_install.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/easy_install.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..839ad2c80706f533a0d302f6b00d49825b8bc493
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/easy_install.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/egg_info.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/egg_info.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a99bf5c8966ce02c9e865ad3bed8517bb1fea2ac
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/egg_info.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/install.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/install.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9cb2b45ddd7629b855aabd838fe83c600c53cbee
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/install.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bdb530e4edfb18202e936ca49ced97da4e1d9c17
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/install_lib.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/install_lib.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..364dac96400eeeb424ac9036649532dc999f3b1c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/install_lib.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/install_scripts.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/install_scripts.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c9ab86e9033650cf37808cf3565ee73b8a8610ac
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/install_scripts.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/py36compat.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/py36compat.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..dfe3238e053046168b95d0bf0f7d35cca5146bd3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/py36compat.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/register.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/register.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3c41865d835cb392d914b756de9e132ea331e38f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/register.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/rotate.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/rotate.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..de66e728fffa71cc43cf623c02ac0497cba2be77
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/rotate.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/saveopts.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/saveopts.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c3b356ebb6f85b2159f85845567fb9b3d1a78cde
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/saveopts.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/sdist.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/sdist.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..19af84f8bbff524f4c03462b1c594fc37e580cf9
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/sdist.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/setopt.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/setopt.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..16c9383a2067984762ce2bec57b760c86e1a06c3
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/setopt.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/test.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/test.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2a69b466dc613f0e5090f732d5f2e5dedf3f2bb7
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/test.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/upload.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/upload.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..929ff15238ab456a86e735d69126dfafdef1080d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/upload.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/upload_docs.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/upload_docs.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2078b5c3453f4f4e9c5dd61264bb10e6ddd4a43f
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/command/__pycache__/upload_docs.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/alias.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/alias.py
new file mode 100644
index 0000000000000000000000000000000000000000..4532b1cc0dca76227927e873f9c64f01008e565a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/alias.py
@@ -0,0 +1,80 @@
+from distutils.errors import DistutilsOptionError
+
+from setuptools.extern.six.moves import map
+
+from setuptools.command.setopt import edit_config, option_base, config_file
+
+
+def shquote(arg):
+ """Quote an argument for later parsing by shlex.split()"""
+ for c in '"', "'", "\\", "#":
+ if c in arg:
+ return repr(arg)
+ if arg.split() != [arg]:
+ return repr(arg)
+ return arg
+
+
+class alias(option_base):
+ """Define a shortcut that invokes one or more commands"""
+
+ description = "define a shortcut to invoke one or more commands"
+ command_consumes_arguments = True
+
+ user_options = [
+ ('remove', 'r', 'remove (unset) the alias'),
+ ] + option_base.user_options
+
+ boolean_options = option_base.boolean_options + ['remove']
+
+ def initialize_options(self):
+ option_base.initialize_options(self)
+ self.args = None
+ self.remove = None
+
+ def finalize_options(self):
+ option_base.finalize_options(self)
+ if self.remove and len(self.args) != 1:
+ raise DistutilsOptionError(
+ "Must specify exactly one argument (the alias name) when "
+ "using --remove"
+ )
+
+ def run(self):
+ aliases = self.distribution.get_option_dict('aliases')
+
+ if not self.args:
+ print("Command Aliases")
+ print("---------------")
+ for alias in aliases:
+ print("setup.py alias", format_alias(alias, aliases))
+ return
+
+ elif len(self.args) == 1:
+ alias, = self.args
+ if self.remove:
+ command = None
+ elif alias in aliases:
+ print("setup.py alias", format_alias(alias, aliases))
+ return
+ else:
+ print("No alias definition found for %r" % alias)
+ return
+ else:
+ alias = self.args[0]
+ command = ' '.join(map(shquote, self.args[1:]))
+
+ edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run)
+
+
+def format_alias(name, aliases):
+ source, command = aliases[name]
+ if source == config_file('global'):
+ source = '--global-config '
+ elif source == config_file('user'):
+ source = '--user-config '
+ elif source == config_file('local'):
+ source = ''
+ else:
+ source = '--filename=%r' % source
+ return source + name + ' ' + command
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/bdist_egg.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/bdist_egg.py
new file mode 100644
index 0000000000000000000000000000000000000000..98470f1715b21befab94b3e84428622a1ba86463
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/bdist_egg.py
@@ -0,0 +1,502 @@
+"""setuptools.command.bdist_egg
+
+Build .egg distributions"""
+
+from distutils.errors import DistutilsSetupError
+from distutils.dir_util import remove_tree, mkpath
+from distutils import log
+from types import CodeType
+import sys
+import os
+import re
+import textwrap
+import marshal
+
+from setuptools.extern import six
+
+from pkg_resources import get_build_platform, Distribution, ensure_directory
+from pkg_resources import EntryPoint
+from setuptools.extension import Library
+from setuptools import Command
+
+try:
+ # Python 2.7 or >=3.2
+ from sysconfig import get_path, get_python_version
+
+ def _get_purelib():
+ return get_path("purelib")
+except ImportError:
+ from distutils.sysconfig import get_python_lib, get_python_version
+
+ def _get_purelib():
+ return get_python_lib(False)
+
+
+def strip_module(filename):
+ if '.' in filename:
+ filename = os.path.splitext(filename)[0]
+ if filename.endswith('module'):
+ filename = filename[:-6]
+ return filename
+
+
+def sorted_walk(dir):
+ """Do os.walk in a reproducible way,
+ independent of indeterministic filesystem readdir order
+ """
+ for base, dirs, files in os.walk(dir):
+ dirs.sort()
+ files.sort()
+ yield base, dirs, files
+
+
+def write_stub(resource, pyfile):
+ _stub_template = textwrap.dedent("""
+ def __bootstrap__():
+ global __bootstrap__, __loader__, __file__
+ import sys, pkg_resources, imp
+ __file__ = pkg_resources.resource_filename(__name__, %r)
+ __loader__ = None; del __bootstrap__, __loader__
+ imp.load_dynamic(__name__,__file__)
+ __bootstrap__()
+ """).lstrip()
+ with open(pyfile, 'w') as f:
+ f.write(_stub_template % resource)
+
+
+class bdist_egg(Command):
+ description = "create an \"egg\" distribution"
+
+ user_options = [
+ ('bdist-dir=', 'b',
+ "temporary directory for creating the distribution"),
+ ('plat-name=', 'p', "platform name to embed in generated filenames "
+ "(default: %s)" % get_build_platform()),
+ ('exclude-source-files', None,
+ "remove all .py files from the generated egg"),
+ ('keep-temp', 'k',
+ "keep the pseudo-installation tree around after " +
+ "creating the distribution archive"),
+ ('dist-dir=', 'd',
+ "directory to put final built distributions in"),
+ ('skip-build', None,
+ "skip rebuilding everything (for testing/debugging)"),
+ ]
+
+ boolean_options = [
+ 'keep-temp', 'skip-build', 'exclude-source-files'
+ ]
+
+ def initialize_options(self):
+ self.bdist_dir = None
+ self.plat_name = None
+ self.keep_temp = 0
+ self.dist_dir = None
+ self.skip_build = 0
+ self.egg_output = None
+ self.exclude_source_files = None
+
+ def finalize_options(self):
+ ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
+ self.egg_info = ei_cmd.egg_info
+
+ if self.bdist_dir is None:
+ bdist_base = self.get_finalized_command('bdist').bdist_base
+ self.bdist_dir = os.path.join(bdist_base, 'egg')
+
+ if self.plat_name is None:
+ self.plat_name = get_build_platform()
+
+ self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
+
+ if self.egg_output is None:
+
+ # Compute filename of the output egg
+ basename = Distribution(
+ None, None, ei_cmd.egg_name, ei_cmd.egg_version,
+ get_python_version(),
+ self.distribution.has_ext_modules() and self.plat_name
+ ).egg_name()
+
+ self.egg_output = os.path.join(self.dist_dir, basename + '.egg')
+
+ def do_install_data(self):
+ # Hack for packages that install data to install's --install-lib
+ self.get_finalized_command('install').install_lib = self.bdist_dir
+
+ site_packages = os.path.normcase(os.path.realpath(_get_purelib()))
+ old, self.distribution.data_files = self.distribution.data_files, []
+
+ for item in old:
+ if isinstance(item, tuple) and len(item) == 2:
+ if os.path.isabs(item[0]):
+ realpath = os.path.realpath(item[0])
+ normalized = os.path.normcase(realpath)
+ if normalized == site_packages or normalized.startswith(
+ site_packages + os.sep
+ ):
+ item = realpath[len(site_packages) + 1:], item[1]
+ # XXX else: raise ???
+ self.distribution.data_files.append(item)
+
+ try:
+ log.info("installing package data to %s", self.bdist_dir)
+ self.call_command('install_data', force=0, root=None)
+ finally:
+ self.distribution.data_files = old
+
+ def get_outputs(self):
+ return [self.egg_output]
+
+ def call_command(self, cmdname, **kw):
+ """Invoke reinitialized command `cmdname` with keyword args"""
+ for dirname in INSTALL_DIRECTORY_ATTRS:
+ kw.setdefault(dirname, self.bdist_dir)
+ kw.setdefault('skip_build', self.skip_build)
+ kw.setdefault('dry_run', self.dry_run)
+ cmd = self.reinitialize_command(cmdname, **kw)
+ self.run_command(cmdname)
+ return cmd
+
+ def run(self):
+ # Generate metadata first
+ self.run_command("egg_info")
+ # We run install_lib before install_data, because some data hacks
+ # pull their data path from the install_lib command.
+ log.info("installing library code to %s", self.bdist_dir)
+ instcmd = self.get_finalized_command('install')
+ old_root = instcmd.root
+ instcmd.root = None
+ if self.distribution.has_c_libraries() and not self.skip_build:
+ self.run_command('build_clib')
+ cmd = self.call_command('install_lib', warn_dir=0)
+ instcmd.root = old_root
+
+ all_outputs, ext_outputs = self.get_ext_outputs()
+ self.stubs = []
+ to_compile = []
+ for (p, ext_name) in enumerate(ext_outputs):
+ filename, ext = os.path.splitext(ext_name)
+ pyfile = os.path.join(self.bdist_dir, strip_module(filename) +
+ '.py')
+ self.stubs.append(pyfile)
+ log.info("creating stub loader for %s", ext_name)
+ if not self.dry_run:
+ write_stub(os.path.basename(ext_name), pyfile)
+ to_compile.append(pyfile)
+ ext_outputs[p] = ext_name.replace(os.sep, '/')
+
+ if to_compile:
+ cmd.byte_compile(to_compile)
+ if self.distribution.data_files:
+ self.do_install_data()
+
+ # Make the EGG-INFO directory
+ archive_root = self.bdist_dir
+ egg_info = os.path.join(archive_root, 'EGG-INFO')
+ self.mkpath(egg_info)
+ if self.distribution.scripts:
+ script_dir = os.path.join(egg_info, 'scripts')
+ log.info("installing scripts to %s", script_dir)
+ self.call_command('install_scripts', install_dir=script_dir,
+ no_ep=1)
+
+ self.copy_metadata_to(egg_info)
+ native_libs = os.path.join(egg_info, "native_libs.txt")
+ if all_outputs:
+ log.info("writing %s", native_libs)
+ if not self.dry_run:
+ ensure_directory(native_libs)
+ libs_file = open(native_libs, 'wt')
+ libs_file.write('\n'.join(all_outputs))
+ libs_file.write('\n')
+ libs_file.close()
+ elif os.path.isfile(native_libs):
+ log.info("removing %s", native_libs)
+ if not self.dry_run:
+ os.unlink(native_libs)
+
+ write_safety_flag(
+ os.path.join(archive_root, 'EGG-INFO'), self.zip_safe()
+ )
+
+ if os.path.exists(os.path.join(self.egg_info, 'depends.txt')):
+ log.warn(
+ "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
+ "Use the install_requires/extras_require setup() args instead."
+ )
+
+ if self.exclude_source_files:
+ self.zap_pyfiles()
+
+ # Make the archive
+ make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
+ dry_run=self.dry_run, mode=self.gen_header())
+ if not self.keep_temp:
+ remove_tree(self.bdist_dir, dry_run=self.dry_run)
+
+ # Add to 'Distribution.dist_files' so that the "upload" command works
+ getattr(self.distribution, 'dist_files', []).append(
+ ('bdist_egg', get_python_version(), self.egg_output))
+
+ def zap_pyfiles(self):
+ log.info("Removing .py files from temporary directory")
+ for base, dirs, files in walk_egg(self.bdist_dir):
+ for name in files:
+ path = os.path.join(base, name)
+
+ if name.endswith('.py'):
+ log.debug("Deleting %s", path)
+ os.unlink(path)
+
+ if base.endswith('__pycache__'):
+ path_old = path
+
+ pattern = r'(?P<name>.+)\.(?P<magic>[^.]+)\.pyc'
+ m = re.match(pattern, name)
+ path_new = os.path.join(
+ base, os.pardir, m.group('name') + '.pyc')
+ log.info(
+ "Renaming file from [%s] to [%s]"
+ % (path_old, path_new))
+ try:
+ os.remove(path_new)
+ except OSError:
+ pass
+ os.rename(path_old, path_new)
+
+ def zip_safe(self):
+ safe = getattr(self.distribution, 'zip_safe', None)
+ if safe is not None:
+ return safe
+ log.warn("zip_safe flag not set; analyzing archive contents...")
+ return analyze_egg(self.bdist_dir, self.stubs)
+
+ def gen_header(self):
+ epm = EntryPoint.parse_map(self.distribution.entry_points or '')
+ ep = epm.get('setuptools.installation', {}).get('eggsecutable')
+ if ep is None:
+ return 'w' # not an eggsecutable, do it the usual way.
+
+ if not ep.attrs or ep.extras:
+ raise DistutilsSetupError(
+ "eggsecutable entry point (%r) cannot have 'extras' "
+ "or refer to a module" % (ep,)
+ )
+
+ pyver = '{}.{}'.format(*sys.version_info)
+ pkg = ep.module_name
+ full = '.'.join(ep.attrs)
+ base = ep.attrs[0]
+ basename = os.path.basename(self.egg_output)
+
+ header = (
+ "#!/bin/sh\n"
+ 'if [ `basename $0` = "%(basename)s" ]\n'
+ 'then exec python%(pyver)s -c "'
+ "import sys, os; sys.path.insert(0, os.path.abspath('$0')); "
+ "from %(pkg)s import %(base)s; sys.exit(%(full)s())"
+ '" "$@"\n'
+ 'else\n'
+ ' echo $0 is not the correct name for this egg file.\n'
+ ' echo Please rename it back to %(basename)s and try again.\n'
+ ' exec false\n'
+ 'fi\n'
+ ) % locals()
+
+ if not self.dry_run:
+ mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run)
+ f = open(self.egg_output, 'w')
+ f.write(header)
+ f.close()
+ return 'a'
+
+ def copy_metadata_to(self, target_dir):
+ "Copy metadata (egg info) to the target_dir"
+ # normalize the path (so that a forward-slash in egg_info will
+ # match using startswith below)
+ norm_egg_info = os.path.normpath(self.egg_info)
+ prefix = os.path.join(norm_egg_info, '')
+ for path in self.ei_cmd.filelist.files:
+ if path.startswith(prefix):
+ target = os.path.join(target_dir, path[len(prefix):])
+ ensure_directory(target)
+ self.copy_file(path, target)
+
+ def get_ext_outputs(self):
+ """Get a list of relative paths to C extensions in the output distro"""
+
+ all_outputs = []
+ ext_outputs = []
+
+ paths = {self.bdist_dir: ''}
+ for base, dirs, files in sorted_walk(self.bdist_dir):
+ for filename in files:
+ if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
+ all_outputs.append(paths[base] + filename)
+ for filename in dirs:
+ paths[os.path.join(base, filename)] = (paths[base] +
+ filename + '/')
+
+ if self.distribution.has_ext_modules():
+ build_cmd = self.get_finalized_command('build_ext')
+ for ext in build_cmd.extensions:
+ if isinstance(ext, Library):
+ continue
+ fullname = build_cmd.get_ext_fullname(ext.name)
+ filename = build_cmd.get_ext_filename(fullname)
+ if not os.path.basename(filename).startswith('dl-'):
+ if os.path.exists(os.path.join(self.bdist_dir, filename)):
+ ext_outputs.append(filename)
+
+ return all_outputs, ext_outputs
+
+
+NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
+
+
+def walk_egg(egg_dir):
+ """Walk an unpacked egg's contents, skipping the metadata directory"""
+ walker = sorted_walk(egg_dir)
+ base, dirs, files = next(walker)
+ if 'EGG-INFO' in dirs:
+ dirs.remove('EGG-INFO')
+ yield base, dirs, files
+ for bdf in walker:
+ yield bdf
+
+
+def analyze_egg(egg_dir, stubs):
+ # check for existing flag in EGG-INFO
+ for flag, fn in safety_flags.items():
+ if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)):
+ return flag
+ if not can_scan():
+ return False
+ safe = True
+ for base, dirs, files in walk_egg(egg_dir):
+ for name in files:
+ if name.endswith('.py') or name.endswith('.pyw'):
+ continue
+ elif name.endswith('.pyc') or name.endswith('.pyo'):
+ # always scan, even if we already know we're not safe
+ safe = scan_module(egg_dir, base, name, stubs) and safe
+ return safe
+
+
+def write_safety_flag(egg_dir, safe):
+ # Write or remove zip safety flag file(s)
+ for flag, fn in safety_flags.items():
+ fn = os.path.join(egg_dir, fn)
+ if os.path.exists(fn):
+ if safe is None or bool(safe) != flag:
+ os.unlink(fn)
+ elif safe is not None and bool(safe) == flag:
+ f = open(fn, 'wt')
+ f.write('\n')
+ f.close()
+
+
+safety_flags = {
+ True: 'zip-safe',
+ False: 'not-zip-safe',
+}
+
+
+def scan_module(egg_dir, base, name, stubs):
+ """Check whether module possibly uses unsafe-for-zipfile stuff"""
+
+ filename = os.path.join(base, name)
+ if filename[:-1] in stubs:
+ return True # Extension module
+ pkg = base[len(egg_dir) + 1:].replace(os.sep, '.')
+ module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]
+ if six.PY2:
+ skip = 8 # skip magic & date
+ elif sys.version_info < (3, 7):
+ skip = 12 # skip magic & date & file size
+ else:
+ skip = 16 # skip magic & reserved? & date & file size
+ f = open(filename, 'rb')
+ f.read(skip)
+ code = marshal.load(f)
+ f.close()
+ safe = True
+ symbols = dict.fromkeys(iter_symbols(code))
+ for bad in ['__file__', '__path__']:
+ if bad in symbols:
+ log.warn("%s: module references %s", module, bad)
+ safe = False
+ if 'inspect' in symbols:
+ for bad in [
+ 'getsource', 'getabsfile', 'getsourcefile', 'getfile'
+ 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
+ 'getinnerframes', 'getouterframes', 'stack', 'trace'
+ ]:
+ if bad in symbols:
+ log.warn("%s: module MAY be using inspect.%s", module, bad)
+ safe = False
+ return safe
+
+
+def iter_symbols(code):
+ """Yield names and strings used by `code` and its nested code objects"""
+ for name in code.co_names:
+ yield name
+ for const in code.co_consts:
+ if isinstance(const, six.string_types):
+ yield const
+ elif isinstance(const, CodeType):
+ for name in iter_symbols(const):
+ yield name
+
+
+def can_scan():
+ if not sys.platform.startswith('java') and sys.platform != 'cli':
+ # CPython, PyPy, etc.
+ return True
+ log.warn("Unable to analyze compiled code on this platform.")
+ log.warn("Please ask the author to include a 'zip_safe'"
+ " setting (either True or False) in the package's setup.py")
+
+
+# Attribute names of options for commands that might need to be convinced to
+# install to the egg build directory
+
+INSTALL_DIRECTORY_ATTRS = [
+ 'install_lib', 'install_dir', 'install_data', 'install_base'
+]
+
+
+def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True,
+ mode='w'):
+ """Create a zip file from all the files under 'base_dir'. The output
+ zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
+ Python module (if available) or the InfoZIP "zip" utility (if installed
+ and found on the default search path). If neither tool is available,
+ raises DistutilsExecError. Returns the name of the output zip file.
+ """
+ import zipfile
+
+ mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
+ log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
+
+ def visit(z, dirname, names):
+ for name in names:
+ path = os.path.normpath(os.path.join(dirname, name))
+ if os.path.isfile(path):
+ p = path[len(base_dir) + 1:]
+ if not dry_run:
+ z.write(path, p)
+ log.debug("adding '%s'", p)
+
+ compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
+ if not dry_run:
+ z = zipfile.ZipFile(zip_filename, mode, compression=compression)
+ for dirname, dirs, files in sorted_walk(base_dir):
+ visit(z, dirname, files)
+ z.close()
+ else:
+ for dirname, dirs, files in sorted_walk(base_dir):
+ visit(None, dirname, files)
+ return zip_filename
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/bdist_rpm.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/bdist_rpm.py
new file mode 100644
index 0000000000000000000000000000000000000000..70730927ecaed778ebbdee98eb37c24ec3f1a8e6
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/bdist_rpm.py
@@ -0,0 +1,43 @@
+import distutils.command.bdist_rpm as orig
+
+
+class bdist_rpm(orig.bdist_rpm):
+ """
+ Override the default bdist_rpm behavior to do the following:
+
+ 1. Run egg_info to ensure the name and version are properly calculated.
+ 2. Always run 'install' using --single-version-externally-managed to
+ disable eggs in RPM distributions.
+ 3. Replace dash with underscore in the version numbers for better RPM
+ compatibility.
+ """
+
+ def run(self):
+ # ensure distro name is up-to-date
+ self.run_command('egg_info')
+
+ orig.bdist_rpm.run(self)
+
+ def _make_spec_file(self):
+ version = self.distribution.get_version()
+ rpmversion = version.replace('-', '_')
+ spec = orig.bdist_rpm._make_spec_file(self)
+ line23 = '%define version ' + version
+ line24 = '%define version ' + rpmversion
+ spec = [
+ line.replace(
+ "Source0: %{name}-%{version}.tar",
+ "Source0: %{name}-%{unmangled_version}.tar"
+ ).replace(
+ "setup.py install ",
+ "setup.py install --single-version-externally-managed "
+ ).replace(
+ "%setup",
+ "%setup -n %{name}-%{unmangled_version}"
+ ).replace(line23, line24)
+ for line in spec
+ ]
+ insert_loc = spec.index(line24) + 1
+ unmangled_version = "%define unmangled_version " + version
+ spec.insert(insert_loc, unmangled_version)
+ return spec
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/bdist_wininst.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/bdist_wininst.py
new file mode 100644
index 0000000000000000000000000000000000000000..073de97b46c92e2e221cade8c1350ab2c5cff891
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/bdist_wininst.py
@@ -0,0 +1,21 @@
+import distutils.command.bdist_wininst as orig
+
+
+class bdist_wininst(orig.bdist_wininst):
+ def reinitialize_command(self, command, reinit_subcommands=0):
+ """
+ Supplement reinitialize_command to work around
+ http://bugs.python.org/issue20819
+ """
+ cmd = self.distribution.reinitialize_command(
+ command, reinit_subcommands)
+ if command in ('install', 'install_lib'):
+ cmd.install_lib = None
+ return cmd
+
+ def run(self):
+ self._is_running = True
+ try:
+ orig.bdist_wininst.run(self)
+ finally:
+ self._is_running = False
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/build_clib.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/build_clib.py
new file mode 100644
index 0000000000000000000000000000000000000000..09caff6ffde8fc3f368cb635dc3cbbbc8851530d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/build_clib.py
@@ -0,0 +1,98 @@
+import distutils.command.build_clib as orig
+from distutils.errors import DistutilsSetupError
+from distutils import log
+from setuptools.dep_util import newer_pairwise_group
+
+
+class build_clib(orig.build_clib):
+ """
+ Override the default build_clib behaviour to do the following:
+
+ 1. Implement a rudimentary timestamp-based dependency system
+ so 'compile()' doesn't run every time.
+ 2. Add more keys to the 'build_info' dictionary:
+ * obj_deps - specify dependencies for each object compiled.
+ this should be a dictionary mapping a key
+ with the source filename to a list of
+ dependencies. Use an empty string for global
+ dependencies.
+ * cflags - specify a list of additional flags to pass to
+ the compiler.
+ """
+
+ def build_libraries(self, libraries):
+ for (lib_name, build_info) in libraries:
+ sources = build_info.get('sources')
+ if sources is None or not isinstance(sources, (list, tuple)):
+ raise DistutilsSetupError(
+ "in 'libraries' option (library '%s'), "
+ "'sources' must be present and must be "
+ "a list of source filenames" % lib_name)
+ sources = list(sources)
+
+ log.info("building '%s' library", lib_name)
+
+ # Make sure everything is the correct type.
+ # obj_deps should be a dictionary of keys as sources
+ # and a list/tuple of files that are its dependencies.
+ obj_deps = build_info.get('obj_deps', dict())
+ if not isinstance(obj_deps, dict):
+ raise DistutilsSetupError(
+ "in 'libraries' option (library '%s'), "
+ "'obj_deps' must be a dictionary of "
+ "type 'source: list'" % lib_name)
+ dependencies = []
+
+ # Get the global dependencies that are specified by the '' key.
+ # These will go into every source's dependency list.
+ global_deps = obj_deps.get('', list())
+ if not isinstance(global_deps, (list, tuple)):
+ raise DistutilsSetupError(
+ "in 'libraries' option (library '%s'), "
+ "'obj_deps' must be a dictionary of "
+ "type 'source: list'" % lib_name)
+
+ # Build the list to be used by newer_pairwise_group
+ # each source will be auto-added to its dependencies.
+ for source in sources:
+ src_deps = [source]
+ src_deps.extend(global_deps)
+ extra_deps = obj_deps.get(source, list())
+ if not isinstance(extra_deps, (list, tuple)):
+ raise DistutilsSetupError(
+ "in 'libraries' option (library '%s'), "
+ "'obj_deps' must be a dictionary of "
+ "type 'source: list'" % lib_name)
+ src_deps.extend(extra_deps)
+ dependencies.append(src_deps)
+
+ expected_objects = self.compiler.object_filenames(
+ sources,
+ output_dir=self.build_temp
+ )
+
+ if newer_pairwise_group(dependencies, expected_objects) != ([], []):
+ # First, compile the source code to object files in the library
+ # directory. (This should probably change to putting object
+ # files in a temporary build directory.)
+ macros = build_info.get('macros')
+ include_dirs = build_info.get('include_dirs')
+ cflags = build_info.get('cflags')
+ objects = self.compiler.compile(
+ sources,
+ output_dir=self.build_temp,
+ macros=macros,
+ include_dirs=include_dirs,
+ extra_postargs=cflags,
+ debug=self.debug
+ )
+
+ # Now "link" the object files together into a static library.
+ # (On Unix at least, this isn't really linking -- it just
+ # builds an archive. Whatever.)
+ self.compiler.create_static_lib(
+ expected_objects,
+ lib_name,
+ output_dir=self.build_clib,
+ debug=self.debug
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/build_ext.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/build_ext.py
new file mode 100644
index 0000000000000000000000000000000000000000..daa8e4fe81c18e8fc3e07718b4a66137b062127e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/build_ext.py
@@ -0,0 +1,327 @@
+import os
+import sys
+import itertools
+from distutils.command.build_ext import build_ext as _du_build_ext
+from distutils.file_util import copy_file
+from distutils.ccompiler import new_compiler
+from distutils.sysconfig import customize_compiler, get_config_var
+from distutils.errors import DistutilsError
+from distutils import log
+
+from setuptools.extension import Library
+from setuptools.extern import six
+
+if six.PY2:
+ import imp
+
+ EXTENSION_SUFFIXES = [s for s, _, tp in imp.get_suffixes() if tp == imp.C_EXTENSION]
+else:
+ from importlib.machinery import EXTENSION_SUFFIXES
+
+try:
+ # Attempt to use Cython for building extensions, if available
+ from Cython.Distutils.build_ext import build_ext as _build_ext
+ # Additionally, assert that the compiler module will load
+ # also. Ref #1229.
+ __import__('Cython.Compiler.Main')
+except ImportError:
+ _build_ext = _du_build_ext
+
+# make sure _config_vars is initialized
+get_config_var("LDSHARED")
+from distutils.sysconfig import _config_vars as _CONFIG_VARS
+
+
+def _customize_compiler_for_shlib(compiler):
+ if sys.platform == "darwin":
+ # building .dylib requires additional compiler flags on OSX; here we
+ # temporarily substitute the pyconfig.h variables so that distutils'
+ # 'customize_compiler' uses them before we build the shared libraries.
+ tmp = _CONFIG_VARS.copy()
+ try:
+ # XXX Help! I don't have any idea whether these are right...
+ _CONFIG_VARS['LDSHARED'] = (
+ "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup")
+ _CONFIG_VARS['CCSHARED'] = " -dynamiclib"
+ _CONFIG_VARS['SO'] = ".dylib"
+ customize_compiler(compiler)
+ finally:
+ _CONFIG_VARS.clear()
+ _CONFIG_VARS.update(tmp)
+ else:
+ customize_compiler(compiler)
+
+
+have_rtld = False
+use_stubs = False
+libtype = 'shared'
+
+if sys.platform == "darwin":
+ use_stubs = True
+elif os.name != 'nt':
+ try:
+ import dl
+ use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW')
+ except ImportError:
+ pass
+
+if_dl = lambda s: s if have_rtld else ''
+
+
+def get_abi3_suffix():
+ """Return the file extension for an abi3-compliant Extension()"""
+ for suffix in EXTENSION_SUFFIXES:
+ if '.abi3' in suffix: # Unix
+ return suffix
+ elif suffix == '.pyd': # Windows
+ return suffix
+
+
+class build_ext(_build_ext):
+ def run(self):
+ """Build extensions in build directory, then copy if --inplace"""
+ old_inplace, self.inplace = self.inplace, 0
+ _build_ext.run(self)
+ self.inplace = old_inplace
+ if old_inplace:
+ self.copy_extensions_to_source()
+
+ def copy_extensions_to_source(self):
+ build_py = self.get_finalized_command('build_py')
+ for ext in self.extensions:
+ fullname = self.get_ext_fullname(ext.name)
+ filename = self.get_ext_filename(fullname)
+ modpath = fullname.split('.')
+ package = '.'.join(modpath[:-1])
+ package_dir = build_py.get_package_dir(package)
+ dest_filename = os.path.join(package_dir,
+ os.path.basename(filename))
+ src_filename = os.path.join(self.build_lib, filename)
+
+ # Always copy, even if source is older than destination, to ensure
+ # that the right extensions for the current Python/platform are
+ # used.
+ copy_file(
+ src_filename, dest_filename, verbose=self.verbose,
+ dry_run=self.dry_run
+ )
+ if ext._needs_stub:
+ self.write_stub(package_dir or os.curdir, ext, True)
+
+ def get_ext_filename(self, fullname):
+ filename = _build_ext.get_ext_filename(self, fullname)
+ if fullname in self.ext_map:
+ ext = self.ext_map[fullname]
+ use_abi3 = (
+ six.PY3
+ and getattr(ext, 'py_limited_api')
+ and get_abi3_suffix()
+ )
+ if use_abi3:
+ so_ext = get_config_var('EXT_SUFFIX')
+ filename = filename[:-len(so_ext)]
+ filename = filename + get_abi3_suffix()
+ if isinstance(ext, Library):
+ fn, ext = os.path.splitext(filename)
+ return self.shlib_compiler.library_filename(fn, libtype)
+ elif use_stubs and ext._links_to_dynamic:
+ d, fn = os.path.split(filename)
+ return os.path.join(d, 'dl-' + fn)
+ return filename
+
+ def initialize_options(self):
+ _build_ext.initialize_options(self)
+ self.shlib_compiler = None
+ self.shlibs = []
+ self.ext_map = {}
+
+ def finalize_options(self):
+ _build_ext.finalize_options(self)
+ self.extensions = self.extensions or []
+ self.check_extensions_list(self.extensions)
+ self.shlibs = [ext for ext in self.extensions
+ if isinstance(ext, Library)]
+ if self.shlibs:
+ self.setup_shlib_compiler()
+ for ext in self.extensions:
+ ext._full_name = self.get_ext_fullname(ext.name)
+ for ext in self.extensions:
+ fullname = ext._full_name
+ self.ext_map[fullname] = ext
+
+ # distutils 3.1 will also ask for module names
+ # XXX what to do with conflicts?
+ self.ext_map[fullname.split('.')[-1]] = ext
+
+ ltd = self.shlibs and self.links_to_dynamic(ext) or False
+ ns = ltd and use_stubs and not isinstance(ext, Library)
+ ext._links_to_dynamic = ltd
+ ext._needs_stub = ns
+ filename = ext._file_name = self.get_ext_filename(fullname)
+ libdir = os.path.dirname(os.path.join(self.build_lib, filename))
+ if ltd and libdir not in ext.library_dirs:
+ ext.library_dirs.append(libdir)
+ if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
+ ext.runtime_library_dirs.append(os.curdir)
+
+ def setup_shlib_compiler(self):
+ compiler = self.shlib_compiler = new_compiler(
+ compiler=self.compiler, dry_run=self.dry_run, force=self.force
+ )
+ _customize_compiler_for_shlib(compiler)
+
+ if self.include_dirs is not None:
+ compiler.set_include_dirs(self.include_dirs)
+ if self.define is not None:
+ # 'define' option is a list of (name,value) tuples
+ for (name, value) in self.define:
+ compiler.define_macro(name, value)
+ if self.undef is not None:
+ for macro in self.undef:
+ compiler.undefine_macro(macro)
+ if self.libraries is not None:
+ compiler.set_libraries(self.libraries)
+ if self.library_dirs is not None:
+ compiler.set_library_dirs(self.library_dirs)
+ if self.rpath is not None:
+ compiler.set_runtime_library_dirs(self.rpath)
+ if self.link_objects is not None:
+ compiler.set_link_objects(self.link_objects)
+
+ # hack so distutils' build_extension() builds a library instead
+ compiler.link_shared_object = link_shared_object.__get__(compiler)
+
+ def get_export_symbols(self, ext):
+ if isinstance(ext, Library):
+ return ext.export_symbols
+ return _build_ext.get_export_symbols(self, ext)
+
+ def build_extension(self, ext):
+ ext._convert_pyx_sources_to_lang()
+ _compiler = self.compiler
+ try:
+ if isinstance(ext, Library):
+ self.compiler = self.shlib_compiler
+ _build_ext.build_extension(self, ext)
+ if ext._needs_stub:
+ cmd = self.get_finalized_command('build_py').build_lib
+ self.write_stub(cmd, ext)
+ finally:
+ self.compiler = _compiler
+
+ def links_to_dynamic(self, ext):
+ """Return true if 'ext' links to a dynamic lib in the same package"""
+ # XXX this should check to ensure the lib is actually being built
+ # XXX as dynamic, and not just using a locally-found version or a
+ # XXX static-compiled version
+ libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
+ pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
+ return any(pkg + libname in libnames for libname in ext.libraries)
+
+ def get_outputs(self):
+ return _build_ext.get_outputs(self) + self.__get_stubs_outputs()
+
+ def __get_stubs_outputs(self):
+ # assemble the base name for each extension that needs a stub
+ ns_ext_bases = (
+ os.path.join(self.build_lib, *ext._full_name.split('.'))
+ for ext in self.extensions
+ if ext._needs_stub
+ )
+ # pair each base with the extension
+ pairs = itertools.product(ns_ext_bases, self.__get_output_extensions())
+ return list(base + fnext for base, fnext in pairs)
+
+ def __get_output_extensions(self):
+ yield '.py'
+ yield '.pyc'
+ if self.get_finalized_command('build_py').optimize:
+ yield '.pyo'
+
+ def write_stub(self, output_dir, ext, compile=False):
+ log.info("writing stub loader for %s to %s", ext._full_name,
+ output_dir)
+ stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) +
+ '.py')
+ if compile and os.path.exists(stub_file):
+ raise DistutilsError(stub_file + " already exists! Please delete.")
+ if not self.dry_run:
+ f = open(stub_file, 'w')
+ f.write(
+ '\n'.join([
+ "def __bootstrap__():",
+ " global __bootstrap__, __file__, __loader__",
+ " import sys, os, pkg_resources, imp" + if_dl(", dl"),
+ " __file__ = pkg_resources.resource_filename"
+ "(__name__,%r)"
+ % os.path.basename(ext._file_name),
+ " del __bootstrap__",
+ " if '__loader__' in globals():",
+ " del __loader__",
+ if_dl(" old_flags = sys.getdlopenflags()"),
+ " old_dir = os.getcwd()",
+ " try:",
+ " os.chdir(os.path.dirname(__file__))",
+ if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
+ " imp.load_dynamic(__name__,__file__)",
+ " finally:",
+ if_dl(" sys.setdlopenflags(old_flags)"),
+ " os.chdir(old_dir)",
+ "__bootstrap__()",
+ "" # terminal \n
+ ])
+ )
+ f.close()
+ if compile:
+ from distutils.util import byte_compile
+
+ byte_compile([stub_file], optimize=0,
+ force=True, dry_run=self.dry_run)
+ optimize = self.get_finalized_command('install_lib').optimize
+ if optimize > 0:
+ byte_compile([stub_file], optimize=optimize,
+ force=True, dry_run=self.dry_run)
+ if os.path.exists(stub_file) and not self.dry_run:
+ os.unlink(stub_file)
+
+
+if use_stubs or os.name == 'nt':
+ # Build shared libraries
+ #
+ def link_shared_object(
+ self, objects, output_libname, output_dir=None, libraries=None,
+ library_dirs=None, runtime_library_dirs=None, export_symbols=None,
+ debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
+ target_lang=None):
+ self.link(
+ self.SHARED_LIBRARY, objects, output_libname,
+ output_dir, libraries, library_dirs, runtime_library_dirs,
+ export_symbols, debug, extra_preargs, extra_postargs,
+ build_temp, target_lang
+ )
+else:
+ # Build static libraries everywhere else
+ libtype = 'static'
+
+ def link_shared_object(
+ self, objects, output_libname, output_dir=None, libraries=None,
+ library_dirs=None, runtime_library_dirs=None, export_symbols=None,
+ debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
+ target_lang=None):
+ # XXX we need to either disallow these attrs on Library instances,
+ # or warn/abort here if set, or something...
+ # libraries=None, library_dirs=None, runtime_library_dirs=None,
+ # export_symbols=None, extra_preargs=None, extra_postargs=None,
+ # build_temp=None
+
+ assert output_dir is None # distutils build_ext doesn't pass this
+ output_dir, filename = os.path.split(output_libname)
+ basename, ext = os.path.splitext(filename)
+ if self.library_filename("x").startswith('lib'):
+ # strip 'lib' prefix; this is kludgy if some platform uses
+ # a different prefix
+ basename = basename[3:]
+
+ self.create_static_lib(
+ objects, basename, output_dir, debug, target_lang
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/build_py.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/build_py.py
new file mode 100644
index 0000000000000000000000000000000000000000..b0314fd413ae7f8c1027ccde0b092fd493fb104b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/build_py.py
@@ -0,0 +1,270 @@
+from glob import glob
+from distutils.util import convert_path
+import distutils.command.build_py as orig
+import os
+import fnmatch
+import textwrap
+import io
+import distutils.errors
+import itertools
+
+from setuptools.extern import six
+from setuptools.extern.six.moves import map, filter, filterfalse
+
+try:
+ from setuptools.lib2to3_ex import Mixin2to3
+except ImportError:
+
+ class Mixin2to3:
+ def run_2to3(self, files, doctests=True):
+ "do nothing"
+
+
+class build_py(orig.build_py, Mixin2to3):
+ """Enhanced 'build_py' command that includes data files with packages
+
+ The data files are specified via a 'package_data' argument to 'setup()'.
+ See 'setuptools.dist.Distribution' for more details.
+
+ Also, this version of the 'build_py' command allows you to specify both
+ 'py_modules' and 'packages' in the same setup operation.
+ """
+
+ def finalize_options(self):
+ orig.build_py.finalize_options(self)
+ self.package_data = self.distribution.package_data
+ self.exclude_package_data = (self.distribution.exclude_package_data or
+ {})
+ if 'data_files' in self.__dict__:
+ del self.__dict__['data_files']
+ self.__updated_files = []
+ self.__doctests_2to3 = []
+
+ def run(self):
+ """Build modules, packages, and copy data files to build directory"""
+ if not self.py_modules and not self.packages:
+ return
+
+ if self.py_modules:
+ self.build_modules()
+
+ if self.packages:
+ self.build_packages()
+ self.build_package_data()
+
+ self.run_2to3(self.__updated_files, False)
+ self.run_2to3(self.__updated_files, True)
+ self.run_2to3(self.__doctests_2to3, True)
+
+ # Only compile actual .py files, using our base class' idea of what our
+ # output files are.
+ self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0))
+
+ def __getattr__(self, attr):
+ "lazily compute data files"
+ if attr == 'data_files':
+ self.data_files = self._get_data_files()
+ return self.data_files
+ return orig.build_py.__getattr__(self, attr)
+
+ def build_module(self, module, module_file, package):
+ if six.PY2 and isinstance(package, six.string_types):
+ # avoid errors on Python 2 when unicode is passed (#190)
+ package = package.split('.')
+ outfile, copied = orig.build_py.build_module(self, module, module_file,
+ package)
+ if copied:
+ self.__updated_files.append(outfile)
+ return outfile, copied
+
+ def _get_data_files(self):
+ """Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
+ self.analyze_manifest()
+ return list(map(self._get_pkg_data_files, self.packages or ()))
+
+ def _get_pkg_data_files(self, package):
+ # Locate package source directory
+ src_dir = self.get_package_dir(package)
+
+ # Compute package build directory
+ build_dir = os.path.join(*([self.build_lib] + package.split('.')))
+
+ # Strip directory from globbed filenames
+ filenames = [
+ os.path.relpath(file, src_dir)
+ for file in self.find_data_files(package, src_dir)
+ ]
+ return package, src_dir, build_dir, filenames
+
+ def find_data_files(self, package, src_dir):
+ """Return filenames for package's data files in 'src_dir'"""
+ patterns = self._get_platform_patterns(
+ self.package_data,
+ package,
+ src_dir,
+ )
+ globs_expanded = map(glob, patterns)
+ # flatten the expanded globs into an iterable of matches
+ globs_matches = itertools.chain.from_iterable(globs_expanded)
+ glob_files = filter(os.path.isfile, globs_matches)
+ files = itertools.chain(
+ self.manifest_files.get(package, []),
+ glob_files,
+ )
+ return self.exclude_data_files(package, src_dir, files)
+
+ def build_package_data(self):
+ """Copy data files into build directory"""
+ for package, src_dir, build_dir, filenames in self.data_files:
+ for filename in filenames:
+ target = os.path.join(build_dir, filename)
+ self.mkpath(os.path.dirname(target))
+ srcfile = os.path.join(src_dir, filename)
+ outf, copied = self.copy_file(srcfile, target)
+ srcfile = os.path.abspath(srcfile)
+ if (copied and
+ srcfile in self.distribution.convert_2to3_doctests):
+ self.__doctests_2to3.append(outf)
+
+ def analyze_manifest(self):
+ self.manifest_files = mf = {}
+ if not self.distribution.include_package_data:
+ return
+ src_dirs = {}
+ for package in self.packages or ():
+ # Locate package source directory
+ src_dirs[assert_relative(self.get_package_dir(package))] = package
+
+ self.run_command('egg_info')
+ ei_cmd = self.get_finalized_command('egg_info')
+ for path in ei_cmd.filelist.files:
+ d, f = os.path.split(assert_relative(path))
+ prev = None
+ oldf = f
+ while d and d != prev and d not in src_dirs:
+ prev = d
+ d, df = os.path.split(d)
+ f = os.path.join(df, f)
+ if d in src_dirs:
+ if path.endswith('.py') and f == oldf:
+ continue # it's a module, not data
+ mf.setdefault(src_dirs[d], []).append(path)
+
+ def get_data_files(self):
+ pass # Lazily compute data files in _get_data_files() function.
+
+ def check_package(self, package, package_dir):
+ """Check namespace packages' __init__ for declare_namespace"""
+ try:
+ return self.packages_checked[package]
+ except KeyError:
+ pass
+
+ init_py = orig.build_py.check_package(self, package, package_dir)
+ self.packages_checked[package] = init_py
+
+ if not init_py or not self.distribution.namespace_packages:
+ return init_py
+
+ for pkg in self.distribution.namespace_packages:
+ if pkg == package or pkg.startswith(package + '.'):
+ break
+ else:
+ return init_py
+
+ with io.open(init_py, 'rb') as f:
+ contents = f.read()
+ if b'declare_namespace' not in contents:
+ raise distutils.errors.DistutilsError(
+ "Namespace package problem: %s is a namespace package, but "
+ "its\n__init__.py does not call declare_namespace()! Please "
+ 'fix it.\n(See the setuptools manual under '
+ '"Namespace Packages" for details.)\n"' % (package,)
+ )
+ return init_py
+
+ def initialize_options(self):
+ self.packages_checked = {}
+ orig.build_py.initialize_options(self)
+
+ def get_package_dir(self, package):
+ res = orig.build_py.get_package_dir(self, package)
+ if self.distribution.src_root is not None:
+ return os.path.join(self.distribution.src_root, res)
+ return res
+
+ def exclude_data_files(self, package, src_dir, files):
+ """Filter filenames for package's data files in 'src_dir'"""
+ files = list(files)
+ patterns = self._get_platform_patterns(
+ self.exclude_package_data,
+ package,
+ src_dir,
+ )
+ match_groups = (
+ fnmatch.filter(files, pattern)
+ for pattern in patterns
+ )
+ # flatten the groups of matches into an iterable of matches
+ matches = itertools.chain.from_iterable(match_groups)
+ bad = set(matches)
+ keepers = (
+ fn
+ for fn in files
+ if fn not in bad
+ )
+ # ditch dupes
+ return list(_unique_everseen(keepers))
+
+ @staticmethod
+ def _get_platform_patterns(spec, package, src_dir):
+ """
+ yield platform-specific path patterns (suitable for glob
+ or fn_match) from a glob-based spec (such as
+ self.package_data or self.exclude_package_data)
+ matching package in src_dir.
+ """
+ raw_patterns = itertools.chain(
+ spec.get('', []),
+ spec.get(package, []),
+ )
+ return (
+ # Each pattern has to be converted to a platform-specific path
+ os.path.join(src_dir, convert_path(pattern))
+ for pattern in raw_patterns
+ )
+
+
+# from Python docs
+def _unique_everseen(iterable, key=None):
+ "List unique elements, preserving order. Remember all elements ever seen."
+ # unique_everseen('AAAABBBCCDAABBB') --> A B C D
+ # unique_everseen('ABBCcAD', str.lower) --> A B C D
+ seen = set()
+ seen_add = seen.add
+ if key is None:
+ for element in filterfalse(seen.__contains__, iterable):
+ seen_add(element)
+ yield element
+ else:
+ for element in iterable:
+ k = key(element)
+ if k not in seen:
+ seen_add(k)
+ yield element
+
+
+def assert_relative(path):
+ if not os.path.isabs(path):
+ return path
+ from distutils.errors import DistutilsSetupError
+
+ msg = textwrap.dedent("""
+ Error: setup script specifies an absolute path:
+
+ %s
+
+ setup() arguments must *always* be /-separated paths relative to the
+ setup.py directory, *never* absolute paths.
+ """).lstrip() % path
+ raise DistutilsSetupError(msg)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/develop.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/develop.py
new file mode 100644
index 0000000000000000000000000000000000000000..009e4f9368f5b29fffd160f3b712fb0cd19807bd
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/develop.py
@@ -0,0 +1,221 @@
+from distutils.util import convert_path
+from distutils import log
+from distutils.errors import DistutilsError, DistutilsOptionError
+import os
+import glob
+import io
+
+from setuptools.extern import six
+
+import pkg_resources
+from setuptools.command.easy_install import easy_install
+from setuptools import namespaces
+import setuptools
+
+__metaclass__ = type
+
+
+class develop(namespaces.DevelopInstaller, easy_install):
+ """Set up package for development"""
+
+ description = "install package in 'development mode'"
+
+ user_options = easy_install.user_options + [
+ ("uninstall", "u", "Uninstall this source package"),
+ ("egg-path=", None, "Set the path to be used in the .egg-link file"),
+ ]
+
+ boolean_options = easy_install.boolean_options + ['uninstall']
+
+ command_consumes_arguments = False # override base
+
+ def run(self):
+ if self.uninstall:
+ self.multi_version = True
+ self.uninstall_link()
+ self.uninstall_namespaces()
+ else:
+ self.install_for_development()
+ self.warn_deprecated_options()
+
+ def initialize_options(self):
+ self.uninstall = None
+ self.egg_path = None
+ easy_install.initialize_options(self)
+ self.setup_path = None
+ self.always_copy_from = '.' # always copy eggs installed in curdir
+
+ def finalize_options(self):
+ ei = self.get_finalized_command("egg_info")
+ if ei.broken_egg_info:
+ template = "Please rename %r to %r before using 'develop'"
+ args = ei.egg_info, ei.broken_egg_info
+ raise DistutilsError(template % args)
+ self.args = [ei.egg_name]
+
+ easy_install.finalize_options(self)
+ self.expand_basedirs()
+ self.expand_dirs()
+ # pick up setup-dir .egg files only: no .egg-info
+ self.package_index.scan(glob.glob('*.egg'))
+
+ egg_link_fn = ei.egg_name + '.egg-link'
+ self.egg_link = os.path.join(self.install_dir, egg_link_fn)
+ self.egg_base = ei.egg_base
+ if self.egg_path is None:
+ self.egg_path = os.path.abspath(ei.egg_base)
+
+ target = pkg_resources.normalize_path(self.egg_base)
+ egg_path = pkg_resources.normalize_path(
+ os.path.join(self.install_dir, self.egg_path))
+ if egg_path != target:
+ raise DistutilsOptionError(
+ "--egg-path must be a relative path from the install"
+ " directory to " + target
+ )
+
+ # Make a distribution for the package's source
+ self.dist = pkg_resources.Distribution(
+ target,
+ pkg_resources.PathMetadata(target, os.path.abspath(ei.egg_info)),
+ project_name=ei.egg_name
+ )
+
+ self.setup_path = self._resolve_setup_path(
+ self.egg_base,
+ self.install_dir,
+ self.egg_path,
+ )
+
+ @staticmethod
+ def _resolve_setup_path(egg_base, install_dir, egg_path):
+ """
+ Generate a path from egg_base back to '.' where the
+ setup script resides and ensure that path points to the
+ setup path from $install_dir/$egg_path.
+ """
+ path_to_setup = egg_base.replace(os.sep, '/').rstrip('/')
+ if path_to_setup != os.curdir:
+ path_to_setup = '../' * (path_to_setup.count('/') + 1)
+ resolved = pkg_resources.normalize_path(
+ os.path.join(install_dir, egg_path, path_to_setup)
+ )
+ if resolved != pkg_resources.normalize_path(os.curdir):
+ raise DistutilsOptionError(
+ "Can't get a consistent path to setup script from"
+ " installation directory", resolved,
+ pkg_resources.normalize_path(os.curdir))
+ return path_to_setup
+
+ def install_for_development(self):
+ if six.PY3 and getattr(self.distribution, 'use_2to3', False):
+ # If we run 2to3 we can not do this inplace:
+
+ # Ensure metadata is up-to-date
+ self.reinitialize_command('build_py', inplace=0)
+ self.run_command('build_py')
+ bpy_cmd = self.get_finalized_command("build_py")
+ build_path = pkg_resources.normalize_path(bpy_cmd.build_lib)
+
+ # Build extensions
+ self.reinitialize_command('egg_info', egg_base=build_path)
+ self.run_command('egg_info')
+
+ self.reinitialize_command('build_ext', inplace=0)
+ self.run_command('build_ext')
+
+ # Fixup egg-link and easy-install.pth
+ ei_cmd = self.get_finalized_command("egg_info")
+ self.egg_path = build_path
+ self.dist.location = build_path
+ # XXX
+ self.dist._provider = pkg_resources.PathMetadata(
+ build_path, ei_cmd.egg_info)
+ else:
+ # Without 2to3 inplace works fine:
+ self.run_command('egg_info')
+
+ # Build extensions in-place
+ self.reinitialize_command('build_ext', inplace=1)
+ self.run_command('build_ext')
+
+ self.install_site_py() # ensure that target dir is site-safe
+ if setuptools.bootstrap_install_from:
+ self.easy_install(setuptools.bootstrap_install_from)
+ setuptools.bootstrap_install_from = None
+
+ self.install_namespaces()
+
+ # create an .egg-link in the installation dir, pointing to our egg
+ log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
+ if not self.dry_run:
+ with open(self.egg_link, "w") as f:
+ f.write(self.egg_path + "\n" + self.setup_path)
+ # postprocess the installed distro, fixing up .pth, installing scripts,
+ # and handling requirements
+ self.process_distribution(None, self.dist, not self.no_deps)
+
+ def uninstall_link(self):
+ if os.path.exists(self.egg_link):
+ log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
+ egg_link_file = open(self.egg_link)
+ contents = [line.rstrip() for line in egg_link_file]
+ egg_link_file.close()
+ if contents not in ([self.egg_path],
+ [self.egg_path, self.setup_path]):
+ log.warn("Link points to %s: uninstall aborted", contents)
+ return
+ if not self.dry_run:
+ os.unlink(self.egg_link)
+ if not self.dry_run:
+ self.update_pth(self.dist) # remove any .pth link to us
+ if self.distribution.scripts:
+ # XXX should also check for entry point scripts!
+ log.warn("Note: you must uninstall or replace scripts manually!")
+
+ def install_egg_scripts(self, dist):
+ if dist is not self.dist:
+ # Installing a dependency, so fall back to normal behavior
+ return easy_install.install_egg_scripts(self, dist)
+
+ # create wrapper scripts in the script dir, pointing to dist.scripts
+
+ # new-style...
+ self.install_wrapper_scripts(dist)
+
+ # ...and old-style
+ for script_name in self.distribution.scripts or []:
+ script_path = os.path.abspath(convert_path(script_name))
+ script_name = os.path.basename(script_path)
+ with io.open(script_path) as strm:
+ script_text = strm.read()
+ self.install_script(dist, script_name, script_text, script_path)
+
+ def install_wrapper_scripts(self, dist):
+ dist = VersionlessRequirement(dist)
+ return easy_install.install_wrapper_scripts(self, dist)
+
+
+class VersionlessRequirement:
+ """
+ Adapt a pkg_resources.Distribution to simply return the project
+ name as the 'requirement' so that scripts will work across
+ multiple versions.
+
+ >>> from pkg_resources import Distribution
+ >>> dist = Distribution(project_name='foo', version='1.0')
+ >>> str(dist.as_requirement())
+ 'foo==1.0'
+ >>> adapted_dist = VersionlessRequirement(dist)
+ >>> str(adapted_dist.as_requirement())
+ 'foo'
+ """
+
+ def __init__(self, dist):
+ self.__dist = dist
+
+ def __getattr__(self, name):
+ return getattr(self.__dist, name)
+
+ def as_requirement(self):
+ return self.project_name
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/dist_info.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/dist_info.py
new file mode 100644
index 0000000000000000000000000000000000000000..c45258fa03a3ddd6a73db4514365f8741d16ca86
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/dist_info.py
@@ -0,0 +1,36 @@
+"""
+Create a dist_info directory
+As defined in the wheel specification
+"""
+
+import os
+
+from distutils.core import Command
+from distutils import log
+
+
+class dist_info(Command):
+
+ description = 'create a .dist-info directory'
+
+ user_options = [
+ ('egg-base=', 'e', "directory containing .egg-info directories"
+ " (default: top of the source tree)"),
+ ]
+
+ def initialize_options(self):
+ self.egg_base = None
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ egg_info = self.get_finalized_command('egg_info')
+ egg_info.egg_base = self.egg_base
+ egg_info.finalize_options()
+ egg_info.run()
+ dist_info_dir = egg_info.egg_info[:-len('.egg-info')] + '.dist-info'
+ log.info("creating '{}'".format(os.path.abspath(dist_info_dir)))
+
+ bdist_wheel = self.get_finalized_command('bdist_wheel')
+ bdist_wheel.egg2dist(egg_info.egg_info, dist_info_dir)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/easy_install.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/easy_install.py
new file mode 100644
index 0000000000000000000000000000000000000000..1f6839cb3b78fe8d63f709b6ff9abb15bf276b6e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/easy_install.py
@@ -0,0 +1,2402 @@
+#!/usr/bin/env python
+"""
+Easy Install
+------------
+
+A tool for doing automatic download/extract/build of distutils-based Python
+packages. For detailed documentation, see the accompanying EasyInstall.txt
+file, or visit the `EasyInstall home page`__.
+
+__ https://setuptools.readthedocs.io/en/latest/easy_install.html
+
+"""
+
+from glob import glob
+from distutils.util import get_platform
+from distutils.util import convert_path, subst_vars
+from distutils.errors import (
+ DistutilsArgError, DistutilsOptionError,
+ DistutilsError, DistutilsPlatformError,
+)
+from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS
+from distutils import log, dir_util
+from distutils.command.build_scripts import first_line_re
+from distutils.spawn import find_executable
+import sys
+import os
+import zipimport
+import shutil
+import tempfile
+import zipfile
+import re
+import stat
+import random
+import textwrap
+import warnings
+import site
+import struct
+import contextlib
+import subprocess
+import shlex
+import io
+
+
+from sysconfig import get_config_vars, get_path
+
+from setuptools import SetuptoolsDeprecationWarning
+
+from setuptools.extern import six
+from setuptools.extern.six.moves import configparser, map
+
+from setuptools import Command
+from setuptools.sandbox import run_setup
+from setuptools.py27compat import rmtree_safe
+from setuptools.command import setopt
+from setuptools.archive_util import unpack_archive
+from setuptools.package_index import (
+ PackageIndex, parse_requirement_arg, URL_SCHEME,
+)
+from setuptools.command import bdist_egg, egg_info
+from setuptools.wheel import Wheel
+from pkg_resources import (
+ yield_lines, normalize_path, resource_string, ensure_directory,
+ get_distribution, find_distributions, Environment, Requirement,
+ Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound,
+ VersionConflict, DEVELOP_DIST,
+)
+import pkg_resources.py31compat
+
+__metaclass__ = type
+
+# Turn on PEP440Warnings
+warnings.filterwarnings("default", category=pkg_resources.PEP440Warning)
+
+__all__ = [
+ 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
+ 'main', 'get_exe_prefixes',
+]
+
+
+def is_64bit():
+ return struct.calcsize("P") == 8
+
+
+def samefile(p1, p2):
+ """
+ Determine if two paths reference the same file.
+
+ Augments os.path.samefile to work on Windows and
+ suppresses errors if the path doesn't exist.
+ """
+ both_exist = os.path.exists(p1) and os.path.exists(p2)
+ use_samefile = hasattr(os.path, 'samefile') and both_exist
+ if use_samefile:
+ return os.path.samefile(p1, p2)
+ norm_p1 = os.path.normpath(os.path.normcase(p1))
+ norm_p2 = os.path.normpath(os.path.normcase(p2))
+ return norm_p1 == norm_p2
+
+
+if six.PY2:
+
+ def _to_bytes(s):
+ return s
+
+ def isascii(s):
+ try:
+ six.text_type(s, 'ascii')
+ return True
+ except UnicodeError:
+ return False
+else:
+
+ def _to_bytes(s):
+ return s.encode('utf8')
+
+ def isascii(s):
+ try:
+ s.encode('ascii')
+ return True
+ except UnicodeError:
+ return False
+
+
+_one_liner = lambda text: textwrap.dedent(text).strip().replace('\n', '; ')
+
+
+class easy_install(Command):
+ """Manage a download/build/install process"""
+ description = "Find/get/install Python packages"
+ command_consumes_arguments = True
+
+ user_options = [
+ ('prefix=', None, "installation prefix"),
+ ("zip-ok", "z", "install package as a zipfile"),
+ ("multi-version", "m", "make apps have to require() a version"),
+ ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),
+ ("install-dir=", "d", "install package to DIR"),
+ ("script-dir=", "s", "install scripts to DIR"),
+ ("exclude-scripts", "x", "Don't install scripts"),
+ ("always-copy", "a", "Copy all needed packages to install dir"),
+ ("index-url=", "i", "base URL of Python Package Index"),
+ ("find-links=", "f", "additional URL(s) to search for packages"),
+ ("build-directory=", "b",
+ "download/extract/build in DIR; keep the results"),
+ ('optimize=', 'O',
+ "also compile with optimization: -O1 for \"python -O\", "
+ "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+ ('record=', None,
+ "filename in which to record list of installed files"),
+ ('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
+ ('site-dirs=', 'S', "list of directories where .pth files work"),
+ ('editable', 'e', "Install specified packages in editable form"),
+ ('no-deps', 'N', "don't install dependencies"),
+ ('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
+ ('local-snapshots-ok', 'l',
+ "allow building eggs from local checkouts"),
+ ('version', None, "print version information and exit"),
+ ('install-layout=', None, "installation layout to choose (known values: deb)"),
+ ('force-installation-into-system-dir', '0', "force installation into /usr"),
+ ('no-find-links', None,
+ "Don't load find-links defined in packages being installed")
+ ]
+ boolean_options = [
+ 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy',
+ 'editable',
+ 'no-deps', 'local-snapshots-ok', 'version', 'force-installation-into-system-dir'
+ ]
+
+ if site.ENABLE_USER_SITE:
+ help_msg = "install in user site-package '%s'" % site.USER_SITE
+ user_options.append(('user', None, help_msg))
+ boolean_options.append('user')
+
+ negative_opt = {'always-unzip': 'zip-ok'}
+ create_index = PackageIndex
+
+ def initialize_options(self):
+ # the --user option seems to be an opt-in one,
+ # so the default should be False.
+ self.user = 0
+ self.zip_ok = self.local_snapshots_ok = None
+ self.install_dir = self.script_dir = self.exclude_scripts = None
+ self.index_url = None
+ self.find_links = None
+ self.build_directory = None
+ self.args = None
+ self.optimize = self.record = None
+ self.upgrade = self.always_copy = self.multi_version = None
+ self.editable = self.no_deps = self.allow_hosts = None
+ self.root = self.prefix = self.no_report = None
+ self.version = None
+ self.install_purelib = None # for pure module distributions
+ self.install_platlib = None # non-pure (dists w/ extensions)
+ self.install_headers = None # for C/C++ headers
+ self.install_lib = None # set to either purelib or platlib
+ self.install_scripts = None
+ self.install_data = None
+ self.install_base = None
+ self.install_platbase = None
+ if site.ENABLE_USER_SITE:
+ self.install_userbase = site.USER_BASE
+ self.install_usersite = site.USER_SITE
+ else:
+ self.install_userbase = None
+ self.install_usersite = None
+ self.no_find_links = None
+
+ # Options not specifiable via command line
+ self.package_index = None
+ self.pth_file = self.always_copy_from = None
+ self.site_dirs = None
+ self.installed_projects = {}
+ self.sitepy_installed = False
+ # enable custom installation, known values: deb
+ self.install_layout = None
+ self.force_installation_into_system_dir = None
+ self.multiarch = None
+
+ # Always read easy_install options, even if we are subclassed, or have
+ # an independent instance created. This ensures that defaults will
+ # always come from the standard configuration file(s)' "easy_install"
+ # section, even if this is a "develop" or "install" command, or some
+ # other embedding.
+ self._dry_run = None
+ self.verbose = self.distribution.verbose
+ self.distribution._set_command_options(
+ self, self.distribution.get_option_dict('easy_install')
+ )
+
+ def delete_blockers(self, blockers):
+ extant_blockers = (
+ filename for filename in blockers
+ if os.path.exists(filename) or os.path.islink(filename)
+ )
+ list(map(self._delete_path, extant_blockers))
+
+ def _delete_path(self, path):
+ log.info("Deleting %s", path)
+ if self.dry_run:
+ return
+
+ is_tree = os.path.isdir(path) and not os.path.islink(path)
+ remover = rmtree if is_tree else os.unlink
+ remover(path)
+
+ @staticmethod
+ def _render_version():
+ """
+ Render the Setuptools version and installation details, then exit.
+ """
+ ver = '{}.{}'.format(*sys.version_info)
+ dist = get_distribution('setuptools')
+ tmpl = 'setuptools {dist.version} from {dist.location} (Python {ver})'
+ print(tmpl.format(**locals()))
+ raise SystemExit()
+
+ def finalize_options(self):
+ self.version and self._render_version()
+
+ py_version = sys.version.split()[0]
+ prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix')
+
+ self.config_vars = {
+ 'dist_name': self.distribution.get_name(),
+ 'dist_version': self.distribution.get_version(),
+ 'dist_fullname': self.distribution.get_fullname(),
+ 'py_version': py_version,
+ 'py_version_short': py_version[0:3],
+ 'py_version_nodot': py_version[0] + py_version[2],
+ 'sys_prefix': prefix,
+ 'prefix': prefix,
+ 'sys_exec_prefix': exec_prefix,
+ 'exec_prefix': exec_prefix,
+ # Only python 3.2+ has abiflags
+ 'abiflags': getattr(sys, 'abiflags', ''),
+ }
+
+ if site.ENABLE_USER_SITE:
+ self.config_vars['userbase'] = self.install_userbase
+ self.config_vars['usersite'] = self.install_usersite
+
+ self._fix_install_dir_for_user_site()
+
+ self.expand_basedirs()
+ self.expand_dirs()
+
+ if self.install_layout:
+ if not self.install_layout.lower() in ['deb']:
+ raise DistutilsOptionError("unknown value for --install-layout")
+ self.install_layout = self.install_layout.lower()
+
+ import sysconfig
+ if sys.version_info[:2] >= (3, 3):
+ self.multiarch = sysconfig.get_config_var('MULTIARCH')
+
+ self._expand(
+ 'install_dir', 'script_dir', 'build_directory',
+ 'site_dirs',
+ )
+ # If a non-default installation directory was specified, default the
+ # script directory to match it.
+ if self.script_dir is None:
+ self.script_dir = self.install_dir
+
+ if self.no_find_links is None:
+ self.no_find_links = False
+
+ # Let install_dir get set by install_lib command, which in turn
+ # gets its info from the install command, and takes into account
+ # --prefix and --home and all that other crud.
+ self.set_undefined_options(
+ 'install_lib', ('install_dir', 'install_dir')
+ )
+ # Likewise, set default script_dir from 'install_scripts.install_dir'
+ self.set_undefined_options(
+ 'install_scripts', ('install_dir', 'script_dir')
+ )
+
+ if self.user and self.install_purelib:
+ self.install_dir = self.install_purelib
+ self.script_dir = self.install_scripts
+
+ if self.prefix == '/usr' and not self.force_installation_into_system_dir:
+ raise DistutilsOptionError("""installation into /usr
+
+Trying to install into the system managed parts of the file system. Please
+consider to install to another location, or use the option
+--force-installation-into-system-dir to overwrite this warning.
+""")
+
+ # default --record from the install command
+ self.set_undefined_options('install', ('record', 'record'))
+ # Should this be moved to the if statement below? It's not used
+ # elsewhere
+ normpath = map(normalize_path, sys.path)
+ self.all_site_dirs = get_site_dirs()
+ if self.site_dirs is not None:
+ site_dirs = [
+ os.path.expanduser(s.strip()) for s in
+ self.site_dirs.split(',')
+ ]
+ for d in site_dirs:
+ if not os.path.isdir(d):
+ log.warn("%s (in --site-dirs) does not exist", d)
+ elif normalize_path(d) not in normpath:
+ raise DistutilsOptionError(
+ d + " (in --site-dirs) is not on sys.path"
+ )
+ else:
+ self.all_site_dirs.append(normalize_path(d))
+ if not self.editable:
+ self.check_site_dir()
+ self.index_url = self.index_url or "https://pypi.org/simple/"
+ self.shadow_path = self.all_site_dirs[:]
+ for path_item in self.install_dir, normalize_path(self.script_dir):
+ if path_item not in self.shadow_path:
+ self.shadow_path.insert(0, path_item)
+
+ if self.allow_hosts is not None:
+ hosts = [s.strip() for s in self.allow_hosts.split(',')]
+ else:
+ hosts = ['*']
+ if self.package_index is None:
+ self.package_index = self.create_index(
+ self.index_url, search_path=self.shadow_path, hosts=hosts,
+ )
+ self.local_index = Environment(self.shadow_path + sys.path)
+
+ if self.find_links is not None:
+ if isinstance(self.find_links, six.string_types):
+ self.find_links = self.find_links.split()
+ else:
+ self.find_links = []
+ if self.local_snapshots_ok:
+ self.package_index.scan_egg_links(self.shadow_path + sys.path)
+ if not self.no_find_links:
+ self.package_index.add_find_links(self.find_links)
+ self.set_undefined_options('install_lib', ('optimize', 'optimize'))
+ if not isinstance(self.optimize, int):
+ try:
+ self.optimize = int(self.optimize)
+ if not (0 <= self.optimize <= 2):
+ raise ValueError
+ except ValueError:
+ raise DistutilsOptionError("--optimize must be 0, 1, or 2")
+
+ if self.editable and not self.build_directory:
+ raise DistutilsArgError(
+ "Must specify a build directory (-b) when using --editable"
+ )
+ if not self.args:
+ raise DistutilsArgError(
+ "No urls, filenames, or requirements specified (see --help)")
+
+ self.outputs = []
+
+ def _fix_install_dir_for_user_site(self):
+ """
+ Fix the install_dir if "--user" was used.
+ """
+ if not self.user or not site.ENABLE_USER_SITE:
+ return
+
+ self.create_home_path()
+ if self.install_userbase is None:
+ msg = "User base directory is not specified"
+ raise DistutilsPlatformError(msg)
+ self.install_base = self.install_platbase = self.install_userbase
+ scheme_name = os.name.replace('posix', 'unix') + '_user'
+ self.select_scheme(scheme_name)
+
+ def _expand_attrs(self, attrs):
+ for attr in attrs:
+ val = getattr(self, attr)
+ if val is not None:
+ if os.name == 'posix' or os.name == 'nt':
+ val = os.path.expanduser(val)
+ val = subst_vars(val, self.config_vars)
+ setattr(self, attr, val)
+
+ def expand_basedirs(self):
+ """Calls `os.path.expanduser` on install_base, install_platbase and
+ root."""
+ self._expand_attrs(['install_base', 'install_platbase', 'root'])
+
+ def expand_dirs(self):
+ """Calls `os.path.expanduser` on install dirs."""
+ dirs = [
+ 'install_purelib',
+ 'install_platlib',
+ 'install_lib',
+ 'install_headers',
+ 'install_scripts',
+ 'install_data',
+ ]
+ self._expand_attrs(dirs)
+
+ def run(self, show_deprecation=True):
+ if show_deprecation:
+ self.announce(
+ "WARNING: The easy_install command is deprecated "
+ "and will be removed in a future version."
+ , log.WARN,
+ )
+ if self.verbose != self.distribution.verbose:
+ log.set_verbosity(self.verbose)
+ try:
+ for spec in self.args:
+ self.easy_install(spec, not self.no_deps)
+ if self.record:
+ outputs = list(sorted(self.outputs))
+ if self.root: # strip any package prefix
+ root_len = len(self.root)
+ for counter in range(len(outputs)):
+ outputs[counter] = outputs[counter][root_len:]
+ from distutils import file_util
+
+ self.execute(
+ file_util.write_file, (self.record, outputs),
+ "writing list of installed files to '%s'" %
+ self.record
+ )
+ self.warn_deprecated_options()
+ finally:
+ log.set_verbosity(self.distribution.verbose)
+
+ def pseudo_tempname(self):
+ """Return a pseudo-tempname base in the install directory.
+ This code is intentionally naive; if a malicious party can write to
+ the target directory you're already in deep doodoo.
+ """
+ try:
+ pid = os.getpid()
+ except Exception:
+ pid = random.randint(0, sys.maxsize)
+ return os.path.join(self.install_dir, "test-easy-install-%s" % pid)
+
+ def warn_deprecated_options(self):
+ pass
+
+ def check_site_dir(self):
+ """Verify that self.install_dir is .pth-capable dir, if needed"""
+
+ instdir = normalize_path(self.install_dir)
+ pth_file = os.path.join(instdir, 'easy-install.pth')
+
+ # Is it a configured, PYTHONPATH, implicit, or explicit site dir?
+ is_site_dir = instdir in self.all_site_dirs
+
+ if not is_site_dir and not self.multi_version:
+ # No? Then directly test whether it does .pth file processing
+ is_site_dir = self.check_pth_processing()
+ else:
+ # make sure we can write to target dir
+ testfile = self.pseudo_tempname() + '.write-test'
+ test_exists = os.path.exists(testfile)
+ try:
+ if test_exists:
+ os.unlink(testfile)
+ open(testfile, 'w').close()
+ os.unlink(testfile)
+ except (OSError, IOError):
+ self.cant_write_to_target()
+
+ if not is_site_dir and not self.multi_version:
+ # Can't install non-multi to non-site dir
+ raise DistutilsError(self.no_default_version_msg())
+
+ if is_site_dir:
+ if self.pth_file is None:
+ self.pth_file = PthDistributions(pth_file, self.all_site_dirs)
+ else:
+ self.pth_file = None
+
+ if instdir not in map(normalize_path, _pythonpath()):
+ # only PYTHONPATH dirs need a site.py, so pretend it's there
+ self.sitepy_installed = True
+ elif self.multi_version and not os.path.exists(pth_file):
+ self.sitepy_installed = True # don't need site.py in this case
+ self.pth_file = None # and don't create a .pth file
+ self.install_dir = instdir
+
+ __cant_write_msg = textwrap.dedent("""
+ can't create or remove files in install directory
+
+ The following error occurred while trying to add or remove files in the
+ installation directory:
+
+ %s
+
+ The installation directory you specified (via --install-dir, --prefix, or
+ the distutils default setting) was:
+
+ %s
+ """).lstrip()
+
+ __not_exists_id = textwrap.dedent("""
+ This directory does not currently exist. Please create it and try again, or
+ choose a different installation directory (using the -d or --install-dir
+ option).
+ """).lstrip()
+
+ __access_msg = textwrap.dedent("""
+ Perhaps your account does not have write access to this directory? If the
+ installation directory is a system-owned directory, you may need to sign in
+ as the administrator or "root" account. If you do not have administrative
+ access to this machine, you may wish to choose a different installation
+ directory, preferably one that is listed in your PYTHONPATH environment
+ variable.
+
+ For information on other options, you may wish to consult the
+ documentation at:
+
+ https://setuptools.readthedocs.io/en/latest/easy_install.html
+
+ Please make the appropriate changes for your system and try again.
+ """).lstrip()
+
+ def cant_write_to_target(self):
+ msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,)
+
+ if not os.path.exists(self.install_dir):
+ msg += '\n' + self.__not_exists_id
+ else:
+ msg += '\n' + self.__access_msg
+ raise DistutilsError(msg)
+
+ def check_pth_processing(self):
+ """Empirically verify whether .pth files are supported in inst. dir"""
+ instdir = self.install_dir
+ log.info("Checking .pth file support in %s", instdir)
+ pth_file = self.pseudo_tempname() + ".pth"
+ ok_file = pth_file + '.ok'
+ ok_exists = os.path.exists(ok_file)
+ tmpl = _one_liner("""
+ import os
+ f = open({ok_file!r}, 'w')
+ f.write('OK')
+ f.close()
+ """) + '\n'
+ try:
+ if ok_exists:
+ os.unlink(ok_file)
+ dirname = os.path.dirname(ok_file)
+ pkg_resources.py31compat.makedirs(dirname, exist_ok=True)
+ f = open(pth_file, 'w')
+ except (OSError, IOError):
+ self.cant_write_to_target()
+ else:
+ try:
+ f.write(tmpl.format(**locals()))
+ f.close()
+ f = None
+ executable = sys.executable
+ if os.name == 'nt':
+ dirname, basename = os.path.split(executable)
+ alt = os.path.join(dirname, 'pythonw.exe')
+ use_alt = (
+ basename.lower() == 'python.exe' and
+ os.path.exists(alt)
+ )
+ if use_alt:
+ # use pythonw.exe to avoid opening a console window
+ executable = alt
+
+ from distutils.spawn import spawn
+
+ spawn([executable, '-E', '-c', 'pass'], 0)
+
+ if os.path.exists(ok_file):
+ log.info(
+ "TEST PASSED: %s appears to support .pth files",
+ instdir
+ )
+ return True
+ finally:
+ if f:
+ f.close()
+ if os.path.exists(ok_file):
+ os.unlink(ok_file)
+ if os.path.exists(pth_file):
+ os.unlink(pth_file)
+ if not self.multi_version:
+ log.warn("TEST FAILED: %s does NOT support .pth files", instdir)
+ return False
+
+ def install_egg_scripts(self, dist):
+ """Write all the scripts for `dist`, unless scripts are excluded"""
+ if not self.exclude_scripts and dist.metadata_isdir('scripts'):
+ for script_name in dist.metadata_listdir('scripts'):
+ if dist.metadata_isdir('scripts/' + script_name):
+ # The "script" is a directory, likely a Python 3
+ # __pycache__ directory, so skip it.
+ continue
+ self.install_script(
+ dist, script_name,
+ dist.get_metadata('scripts/' + script_name)
+ )
+ self.install_wrapper_scripts(dist)
+
+ def add_output(self, path):
+ if os.path.isdir(path):
+ for base, dirs, files in os.walk(path):
+ for filename in files:
+ self.outputs.append(os.path.join(base, filename))
+ else:
+ self.outputs.append(path)
+
+ def not_editable(self, spec):
+ if self.editable:
+ raise DistutilsArgError(
+ "Invalid argument %r: you can't use filenames or URLs "
+ "with --editable (except via the --find-links option)."
+ % (spec,)
+ )
+
+ def check_editable(self, spec):
+ if not self.editable:
+ return
+
+ if os.path.exists(os.path.join(self.build_directory, spec.key)):
+ raise DistutilsArgError(
+ "%r already exists in %s; can't do a checkout there" %
+ (spec.key, self.build_directory)
+ )
+
+ @contextlib.contextmanager
+ def _tmpdir(self):
+ tmpdir = tempfile.mkdtemp(prefix=u"easy_install-")
+ try:
+ # cast to str as workaround for #709 and #710 and #712
+ yield str(tmpdir)
+ finally:
+ os.path.exists(tmpdir) and rmtree(rmtree_safe(tmpdir))
+
+ def easy_install(self, spec, deps=False):
+ if not self.editable:
+ self.install_site_py()
+
+ with self._tmpdir() as tmpdir:
+ if not isinstance(spec, Requirement):
+ if URL_SCHEME(spec):
+ # It's a url, download it to tmpdir and process
+ self.not_editable(spec)
+ dl = self.package_index.download(spec, tmpdir)
+ return self.install_item(None, dl, tmpdir, deps, True)
+
+ elif os.path.exists(spec):
+ # Existing file or directory, just process it directly
+ self.not_editable(spec)
+ return self.install_item(None, spec, tmpdir, deps, True)
+ else:
+ spec = parse_requirement_arg(spec)
+
+ self.check_editable(spec)
+ dist = self.package_index.fetch_distribution(
+ spec, tmpdir, self.upgrade, self.editable,
+ not self.always_copy, self.local_index
+ )
+ if dist is None:
+ msg = "Could not find suitable distribution for %r" % spec
+ if self.always_copy:
+ msg += " (--always-copy skips system and development eggs)"
+ raise DistutilsError(msg)
+ elif dist.precedence == DEVELOP_DIST:
+ # .egg-info dists don't need installing, just process deps
+ self.process_distribution(spec, dist, deps, "Using")
+ return dist
+ else:
+ return self.install_item(spec, dist.location, tmpdir, deps)
+
+ def install_item(self, spec, download, tmpdir, deps, install_needed=False):
+
+ # Installation is also needed if file in tmpdir or is not an egg
+ install_needed = install_needed or self.always_copy
+ install_needed = install_needed or os.path.dirname(download) == tmpdir
+ install_needed = install_needed or not download.endswith('.egg')
+ install_needed = install_needed or (
+ self.always_copy_from is not None and
+ os.path.dirname(normalize_path(download)) ==
+ normalize_path(self.always_copy_from)
+ )
+
+ if spec and not install_needed:
+ # at this point, we know it's a local .egg, we just don't know if
+ # it's already installed.
+ for dist in self.local_index[spec.project_name]:
+ if dist.location == download:
+ break
+ else:
+ install_needed = True # it's not in the local index
+
+ log.info("Processing %s", os.path.basename(download))
+
+ if install_needed:
+ dists = self.install_eggs(spec, download, tmpdir)
+ for dist in dists:
+ self.process_distribution(spec, dist, deps)
+ else:
+ dists = [self.egg_distribution(download)]
+ self.process_distribution(spec, dists[0], deps, "Using")
+
+ if spec is not None:
+ for dist in dists:
+ if dist in spec:
+ return dist
+
+ def select_scheme(self, name):
+ """Sets the install directories by applying the install schemes."""
+ # it's the caller's problem if they supply a bad name!
+ scheme = INSTALL_SCHEMES[name]
+ for key in SCHEME_KEYS:
+ attrname = 'install_' + key
+ if getattr(self, attrname) is None:
+ setattr(self, attrname, scheme[key])
+
+ def process_distribution(self, requirement, dist, deps=True, *info):
+ self.update_pth(dist)
+ self.package_index.add(dist)
+ if dist in self.local_index[dist.key]:
+ self.local_index.remove(dist)
+ self.local_index.add(dist)
+ self.install_egg_scripts(dist)
+ self.installed_projects[dist.key] = dist
+ log.info(self.installation_report(requirement, dist, *info))
+ if (dist.has_metadata('dependency_links.txt') and
+ not self.no_find_links):
+ self.package_index.add_find_links(
+ dist.get_metadata_lines('dependency_links.txt')
+ )
+ if not deps and not self.always_copy:
+ return
+ elif requirement is not None and dist.key != requirement.key:
+ log.warn("Skipping dependencies for %s", dist)
+ return # XXX this is not the distribution we were looking for
+ elif requirement is None or dist not in requirement:
+ # if we wound up with a different version, resolve what we've got
+ distreq = dist.as_requirement()
+ requirement = Requirement(str(distreq))
+ log.info("Processing dependencies for %s", requirement)
+ try:
+ distros = WorkingSet([]).resolve(
+ [requirement], self.local_index, self.easy_install
+ )
+ except DistributionNotFound as e:
+ raise DistutilsError(str(e))
+ except VersionConflict as e:
+ raise DistutilsError(e.report())
+ if self.always_copy or self.always_copy_from:
+ # Force all the relevant distros to be copied or activated
+ for dist in distros:
+ if dist.key not in self.installed_projects:
+ self.easy_install(dist.as_requirement())
+ log.info("Finished processing dependencies for %s", requirement)
+
+ def should_unzip(self, dist):
+ if self.zip_ok is not None:
+ return not self.zip_ok
+ if dist.has_metadata('not-zip-safe'):
+ return True
+ if not dist.has_metadata('zip-safe'):
+ return True
+ return False
+
+ def maybe_move(self, spec, dist_filename, setup_base):
+ dst = os.path.join(self.build_directory, spec.key)
+ if os.path.exists(dst):
+ msg = (
+ "%r already exists in %s; build directory %s will not be kept"
+ )
+ log.warn(msg, spec.key, self.build_directory, setup_base)
+ return setup_base
+ if os.path.isdir(dist_filename):
+ setup_base = dist_filename
+ else:
+ if os.path.dirname(dist_filename) == setup_base:
+ os.unlink(dist_filename) # get it out of the tmp dir
+ contents = os.listdir(setup_base)
+ if len(contents) == 1:
+ dist_filename = os.path.join(setup_base, contents[0])
+ if os.path.isdir(dist_filename):
+ # if the only thing there is a directory, move it instead
+ setup_base = dist_filename
+ ensure_directory(dst)
+ shutil.move(setup_base, dst)
+ return dst
+
+ def install_wrapper_scripts(self, dist):
+ if self.exclude_scripts:
+ return
+ for args in ScriptWriter.best().get_args(dist):
+ self.write_script(*args)
+
+ def install_script(self, dist, script_name, script_text, dev_path=None):
+ """Generate a legacy script wrapper and install it"""
+ spec = str(dist.as_requirement())
+ is_script = is_python_script(script_text, script_name)
+
+ if is_script:
+ body = self._load_template(dev_path) % locals()
+ script_text = ScriptWriter.get_header(script_text) + body
+ self.write_script(script_name, _to_bytes(script_text), 'b')
+
+ @staticmethod
+ def _load_template(dev_path):
+ """
+ There are a couple of template scripts in the package. This
+ function loads one of them and prepares it for use.
+ """
+ # See https://github.com/pypa/setuptools/issues/134 for info
+ # on script file naming and downstream issues with SVR4
+ name = 'script.tmpl'
+ if dev_path:
+ name = name.replace('.tmpl', ' (dev).tmpl')
+
+ raw_bytes = resource_string('setuptools', name)
+ return raw_bytes.decode('utf-8')
+
+ def write_script(self, script_name, contents, mode="t", blockers=()):
+ """Write an executable file to the scripts directory"""
+ self.delete_blockers( # clean up old .py/.pyw w/o a script
+ [os.path.join(self.script_dir, x) for x in blockers]
+ )
+ log.info("Installing %s script to %s", script_name, self.script_dir)
+ target = os.path.join(self.script_dir, script_name)
+ self.add_output(target)
+
+ if self.dry_run:
+ return
+
+ mask = current_umask()
+ ensure_directory(target)
+ if os.path.exists(target):
+ os.unlink(target)
+ with open(target, "w" + mode) as f:
+ f.write(contents)
+ chmod(target, 0o777 - mask)
+
+ def install_eggs(self, spec, dist_filename, tmpdir):
+ # .egg dirs or files are already built, so just return them
+ if dist_filename.lower().endswith('.egg'):
+ return [self.install_egg(dist_filename, tmpdir)]
+ elif dist_filename.lower().endswith('.exe'):
+ return [self.install_exe(dist_filename, tmpdir)]
+ elif dist_filename.lower().endswith('.whl'):
+ return [self.install_wheel(dist_filename, tmpdir)]
+
+ # Anything else, try to extract and build
+ setup_base = tmpdir
+ if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'):
+ unpack_archive(dist_filename, tmpdir, self.unpack_progress)
+ elif os.path.isdir(dist_filename):
+ setup_base = os.path.abspath(dist_filename)
+
+ if (setup_base.startswith(tmpdir) # something we downloaded
+ and self.build_directory and spec is not None):
+ setup_base = self.maybe_move(spec, dist_filename, setup_base)
+
+ # Find the setup.py file
+ setup_script = os.path.join(setup_base, 'setup.py')
+
+ if not os.path.exists(setup_script):
+ setups = glob(os.path.join(setup_base, '*', 'setup.py'))
+ if not setups:
+ raise DistutilsError(
+ "Couldn't find a setup script in %s" %
+ os.path.abspath(dist_filename)
+ )
+ if len(setups) > 1:
+ raise DistutilsError(
+ "Multiple setup scripts in %s" %
+ os.path.abspath(dist_filename)
+ )
+ setup_script = setups[0]
+
+ # Now run it, and return the result
+ if self.editable:
+ log.info(self.report_editable(spec, setup_script))
+ return []
+ else:
+ return self.build_and_install(setup_script, setup_base)
+
+ def egg_distribution(self, egg_path):
+ if os.path.isdir(egg_path):
+ metadata = PathMetadata(egg_path, os.path.join(egg_path,
+ 'EGG-INFO'))
+ else:
+ metadata = EggMetadata(zipimport.zipimporter(egg_path))
+ return Distribution.from_filename(egg_path, metadata=metadata)
+
+ def install_egg(self, egg_path, tmpdir):
+ destination = os.path.join(
+ self.install_dir,
+ os.path.basename(egg_path),
+ )
+ destination = os.path.abspath(destination)
+ if not self.dry_run:
+ ensure_directory(destination)
+
+ dist = self.egg_distribution(egg_path)
+ if not samefile(egg_path, destination):
+ if os.path.isdir(destination) and not os.path.islink(destination):
+ dir_util.remove_tree(destination, dry_run=self.dry_run)
+ elif os.path.exists(destination):
+ self.execute(
+ os.unlink,
+ (destination,),
+ "Removing " + destination,
+ )
+ try:
+ new_dist_is_zipped = False
+ if os.path.isdir(egg_path):
+ if egg_path.startswith(tmpdir):
+ f, m = shutil.move, "Moving"
+ else:
+ f, m = shutil.copytree, "Copying"
+ elif self.should_unzip(dist):
+ self.mkpath(destination)
+ f, m = self.unpack_and_compile, "Extracting"
+ else:
+ new_dist_is_zipped = True
+ if egg_path.startswith(tmpdir):
+ f, m = shutil.move, "Moving"
+ else:
+ f, m = shutil.copy2, "Copying"
+ self.execute(
+ f,
+ (egg_path, destination),
+ (m + " %s to %s") % (
+ os.path.basename(egg_path),
+ os.path.dirname(destination)
+ ),
+ )
+ update_dist_caches(
+ destination,
+ fix_zipimporter_caches=new_dist_is_zipped,
+ )
+ except Exception:
+ update_dist_caches(destination, fix_zipimporter_caches=False)
+ raise
+
+ self.add_output(destination)
+ return self.egg_distribution(destination)
+
+ def install_exe(self, dist_filename, tmpdir):
+ # See if it's valid, get data
+ cfg = extract_wininst_cfg(dist_filename)
+ if cfg is None:
+ raise DistutilsError(
+ "%s is not a valid distutils Windows .exe" % dist_filename
+ )
+ # Create a dummy distribution object until we build the real distro
+ dist = Distribution(
+ None,
+ project_name=cfg.get('metadata', 'name'),
+ version=cfg.get('metadata', 'version'), platform=get_platform(),
+ )
+
+ # Convert the .exe to an unpacked egg
+ egg_path = os.path.join(tmpdir, dist.egg_name() + '.egg')
+ dist.location = egg_path
+ egg_tmp = egg_path + '.tmp'
+ _egg_info = os.path.join(egg_tmp, 'EGG-INFO')
+ pkg_inf = os.path.join(_egg_info, 'PKG-INFO')
+ ensure_directory(pkg_inf) # make sure EGG-INFO dir exists
+ dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX
+ self.exe_to_egg(dist_filename, egg_tmp)
+
+ # Write EGG-INFO/PKG-INFO
+ if not os.path.exists(pkg_inf):
+ f = open(pkg_inf, 'w')
+ f.write('Metadata-Version: 1.0\n')
+ for k, v in cfg.items('metadata'):
+ if k != 'target_version':
+ f.write('%s: %s\n' % (k.replace('_', '-').title(), v))
+ f.close()
+ script_dir = os.path.join(_egg_info, 'scripts')
+ # delete entry-point scripts to avoid duping
+ self.delete_blockers([
+ os.path.join(script_dir, args[0])
+ for args in ScriptWriter.get_args(dist)
+ ])
+ # Build .egg file from tmpdir
+ bdist_egg.make_zipfile(
+ egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run,
+ )
+ # install the .egg
+ return self.install_egg(egg_path, tmpdir)
+
+ def exe_to_egg(self, dist_filename, egg_tmp):
+ """Extract a bdist_wininst to the directories an egg would use"""
+ # Check for .pth file and set up prefix translations
+ prefixes = get_exe_prefixes(dist_filename)
+ to_compile = []
+ native_libs = []
+ top_level = {}
+
+ def process(src, dst):
+ s = src.lower()
+ for old, new in prefixes:
+ if s.startswith(old):
+ src = new + src[len(old):]
+ parts = src.split('/')
+ dst = os.path.join(egg_tmp, *parts)
+ dl = dst.lower()
+ if dl.endswith('.pyd') or dl.endswith('.dll'):
+ parts[-1] = bdist_egg.strip_module(parts[-1])
+ top_level[os.path.splitext(parts[0])[0]] = 1
+ native_libs.append(src)
+ elif dl.endswith('.py') and old != 'SCRIPTS/':
+ top_level[os.path.splitext(parts[0])[0]] = 1
+ to_compile.append(dst)
+ return dst
+ if not src.endswith('.pth'):
+ log.warn("WARNING: can't process %s", src)
+ return None
+
+ # extract, tracking .pyd/.dll->native_libs and .py -> to_compile
+ unpack_archive(dist_filename, egg_tmp, process)
+ stubs = []
+ for res in native_libs:
+ if res.lower().endswith('.pyd'): # create stubs for .pyd's
+ parts = res.split('/')
+ resource = parts[-1]
+ parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py'
+ pyfile = os.path.join(egg_tmp, *parts)
+ to_compile.append(pyfile)
+ stubs.append(pyfile)
+ bdist_egg.write_stub(resource, pyfile)
+ self.byte_compile(to_compile) # compile .py's
+ bdist_egg.write_safety_flag(
+ os.path.join(egg_tmp, 'EGG-INFO'),
+ bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag
+
+ for name in 'top_level', 'native_libs':
+ if locals()[name]:
+ txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt')
+ if not os.path.exists(txt):
+ f = open(txt, 'w')
+ f.write('\n'.join(locals()[name]) + '\n')
+ f.close()
+
+ def install_wheel(self, wheel_path, tmpdir):
+ wheel = Wheel(wheel_path)
+ assert wheel.is_compatible()
+ destination = os.path.join(self.install_dir, wheel.egg_name())
+ destination = os.path.abspath(destination)
+ if not self.dry_run:
+ ensure_directory(destination)
+ if os.path.isdir(destination) and not os.path.islink(destination):
+ dir_util.remove_tree(destination, dry_run=self.dry_run)
+ elif os.path.exists(destination):
+ self.execute(
+ os.unlink,
+ (destination,),
+ "Removing " + destination,
+ )
+ try:
+ self.execute(
+ wheel.install_as_egg,
+ (destination,),
+ ("Installing %s to %s") % (
+ os.path.basename(wheel_path),
+ os.path.dirname(destination)
+ ),
+ )
+ finally:
+ update_dist_caches(destination, fix_zipimporter_caches=False)
+ self.add_output(destination)
+ return self.egg_distribution(destination)
+
+ __mv_warning = textwrap.dedent("""
+ Because this distribution was installed --multi-version, before you can
+ import modules from this package in an application, you will need to
+ 'import pkg_resources' and then use a 'require()' call similar to one of
+ these examples, in order to select the desired version:
+
+ pkg_resources.require("%(name)s") # latest installed version
+ pkg_resources.require("%(name)s==%(version)s") # this exact version
+ pkg_resources.require("%(name)s>=%(version)s") # this version or higher
+ """).lstrip()
+
+ __id_warning = textwrap.dedent("""
+ Note also that the installation directory must be on sys.path at runtime for
+ this to work. (e.g. by being the application's script directory, by being on
+ PYTHONPATH, or by being added to sys.path by your code.)
+ """)
+
+ def installation_report(self, req, dist, what="Installed"):
+ """Helpful installation message for display to package users"""
+ msg = "\n%(what)s %(eggloc)s%(extras)s"
+ if self.multi_version and not self.no_report:
+ msg += '\n' + self.__mv_warning
+ if self.install_dir not in map(normalize_path, sys.path):
+ msg += '\n' + self.__id_warning
+
+ eggloc = dist.location
+ name = dist.project_name
+ version = dist.version
+ extras = '' # TODO: self.report_extras(req, dist)
+ return msg % locals()
+
+ __editable_msg = textwrap.dedent("""
+ Extracted editable version of %(spec)s to %(dirname)s
+
+ If it uses setuptools in its setup script, you can activate it in
+ "development" mode by going to that directory and running::
+
+ %(python)s setup.py develop
+
+ See the setuptools documentation for the "develop" command for more info.
+ """).lstrip()
+
+ def report_editable(self, spec, setup_script):
+ dirname = os.path.dirname(setup_script)
+ python = sys.executable
+ return '\n' + self.__editable_msg % locals()
+
+ def run_setup(self, setup_script, setup_base, args):
+ sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
+ sys.modules.setdefault('distutils.command.egg_info', egg_info)
+
+ args = list(args)
+ if self.verbose > 2:
+ v = 'v' * (self.verbose - 1)
+ args.insert(0, '-' + v)
+ elif self.verbose < 2:
+ args.insert(0, '-q')
+ if self.dry_run:
+ args.insert(0, '-n')
+ log.info(
+ "Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args)
+ )
+ try:
+ run_setup(setup_script, args)
+ except SystemExit as v:
+ raise DistutilsError("Setup script exited with %s" % (v.args[0],))
+
+ def build_and_install(self, setup_script, setup_base):
+ args = ['bdist_egg', '--dist-dir']
+
+ dist_dir = tempfile.mkdtemp(
+ prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script)
+ )
+ try:
+ self._set_fetcher_options(os.path.dirname(setup_script))
+ args.append(dist_dir)
+
+ self.run_setup(setup_script, setup_base, args)
+ all_eggs = Environment([dist_dir])
+ eggs = []
+ for key in all_eggs:
+ for dist in all_eggs[key]:
+ eggs.append(self.install_egg(dist.location, setup_base))
+ if not eggs and not self.dry_run:
+ log.warn("No eggs found in %s (setup script problem?)",
+ dist_dir)
+ return eggs
+ finally:
+ rmtree(dist_dir)
+ log.set_verbosity(self.verbose) # restore our log verbosity
+
+ def _set_fetcher_options(self, base):
+ """
+ When easy_install is about to run bdist_egg on a source dist, that
+ source dist might have 'setup_requires' directives, requiring
+ additional fetching. Ensure the fetcher options given to easy_install
+ are available to that command as well.
+ """
+ # find the fetch options from easy_install and write them out
+ # to the setup.cfg file.
+ ei_opts = self.distribution.get_option_dict('easy_install').copy()
+ fetch_directives = (
+ 'find_links', 'site_dirs', 'index_url', 'optimize', 'allow_hosts',
+ )
+ fetch_options = {}
+ for key, val in ei_opts.items():
+ if key not in fetch_directives:
+ continue
+ fetch_options[key.replace('_', '-')] = val[1]
+ # create a settings dictionary suitable for `edit_config`
+ settings = dict(easy_install=fetch_options)
+ cfg_filename = os.path.join(base, 'setup.cfg')
+ setopt.edit_config(cfg_filename, settings)
+
+ def update_pth(self, dist):
+ if self.pth_file is None:
+ return
+
+ for d in self.pth_file[dist.key]: # drop old entries
+ if self.multi_version or d.location != dist.location:
+ log.info("Removing %s from easy-install.pth file", d)
+ self.pth_file.remove(d)
+ if d.location in self.shadow_path:
+ self.shadow_path.remove(d.location)
+
+ if not self.multi_version:
+ if dist.location in self.pth_file.paths:
+ log.info(
+ "%s is already the active version in easy-install.pth",
+ dist,
+ )
+ else:
+ log.info("Adding %s to easy-install.pth file", dist)
+ self.pth_file.add(dist) # add new entry
+ if dist.location not in self.shadow_path:
+ self.shadow_path.append(dist.location)
+
+ if not self.dry_run:
+
+ self.pth_file.save()
+
+ if dist.key == 'setuptools':
+ # Ensure that setuptools itself never becomes unavailable!
+ # XXX should this check for latest version?
+ filename = os.path.join(self.install_dir, 'setuptools.pth')
+ if os.path.islink(filename):
+ os.unlink(filename)
+ f = open(filename, 'wt')
+ f.write(self.pth_file.make_relative(dist.location) + '\n')
+ f.close()
+
+ def unpack_progress(self, src, dst):
+ # Progress filter for unpacking
+ log.debug("Unpacking %s to %s", src, dst)
+ return dst # only unpack-and-compile skips files for dry run
+
+ def unpack_and_compile(self, egg_path, destination):
+ to_compile = []
+ to_chmod = []
+
+ def pf(src, dst):
+ if dst.endswith('.py') and not src.startswith('EGG-INFO/'):
+ to_compile.append(dst)
+ elif dst.endswith('.dll') or dst.endswith('.so'):
+ to_chmod.append(dst)
+ self.unpack_progress(src, dst)
+ return not self.dry_run and dst or None
+
+ unpack_archive(egg_path, destination, pf)
+ self.byte_compile(to_compile)
+ if not self.dry_run:
+ for f in to_chmod:
+ mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755
+ chmod(f, mode)
+
+ def byte_compile(self, to_compile):
+ if sys.dont_write_bytecode:
+ return
+
+ from distutils.util import byte_compile
+
+ try:
+ # try to make the byte compile messages quieter
+ log.set_verbosity(self.verbose - 1)
+
+ byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run)
+ if self.optimize:
+ byte_compile(
+ to_compile, optimize=self.optimize, force=1,
+ dry_run=self.dry_run,
+ )
+ finally:
+ log.set_verbosity(self.verbose) # restore original verbosity
+
+ __no_default_msg = textwrap.dedent("""
+ bad install directory or PYTHONPATH
+
+ You are attempting to install a package to a directory that is not
+ on PYTHONPATH and which Python does not read ".pth" files from. The
+ installation directory you specified (via --install-dir, --prefix, or
+ the distutils default setting) was:
+
+ %s
+
+ and your PYTHONPATH environment variable currently contains:
+
+ %r
+
+ Here are some of your options for correcting the problem:
+
+ * You can choose a different installation directory, i.e., one that is
+ on PYTHONPATH or supports .pth files
+
+ * You can add the installation directory to the PYTHONPATH environment
+ variable. (It must then also be on PYTHONPATH whenever you run
+ Python and want to use the package(s) you are installing.)
+
+ * You can set up the installation directory to support ".pth" files by
+ using one of the approaches described here:
+
+ https://setuptools.readthedocs.io/en/latest/easy_install.html#custom-installation-locations
+
+
+ Please make the appropriate changes for your system and try again.""").lstrip()
+
+ def no_default_version_msg(self):
+ template = self.__no_default_msg
+ return template % (self.install_dir, os.environ.get('PYTHONPATH', ''))
+
+ def install_site_py(self):
+ """Make sure there's a site.py in the target dir, if needed"""
+
+ if self.sitepy_installed:
+ return # already did it, or don't need to
+
+ sitepy = os.path.join(self.install_dir, "site.py")
+ source = resource_string("setuptools", "site-patch.py")
+ source = source.decode('utf-8')
+ current = ""
+
+ if os.path.exists(sitepy):
+ log.debug("Checking existing site.py in %s", self.install_dir)
+ with io.open(sitepy) as strm:
+ current = strm.read()
+
+ if not current.startswith('def __boot():'):
+ raise DistutilsError(
+ "%s is not a setuptools-generated site.py; please"
+ " remove it." % sitepy
+ )
+
+ if current != source:
+ log.info("Creating %s", sitepy)
+ if not self.dry_run:
+ ensure_directory(sitepy)
+ with io.open(sitepy, 'w', encoding='utf-8') as strm:
+ strm.write(source)
+ self.byte_compile([sitepy])
+
+ self.sitepy_installed = True
+
+ def create_home_path(self):
+ """Create directories under ~."""
+ if not self.user:
+ return
+ home = convert_path(os.path.expanduser("~"))
+ for name, path in six.iteritems(self.config_vars):
+ if path.startswith(home) and not os.path.isdir(path):
+ self.debug_print("os.makedirs('%s', 0o700)" % path)
+ os.makedirs(path, 0o700)
+
+ if sys.version[:3] in ('2.3', '2.4', '2.5') or 'real_prefix' in sys.__dict__:
+ sitedir_name = 'site-packages'
+ else:
+ sitedir_name = 'dist-packages'
+
+ INSTALL_SCHEMES = dict(
+ posix=dict(
+ install_dir='$base/lib/python$py_version_short/site-packages',
+ script_dir='$base/bin',
+ ),
+ unix_local = dict(
+ install_dir = '$base/local/lib/python$py_version_short/%s' % sitedir_name,
+ script_dir = '$base/local/bin',
+ ),
+ posix_local = dict(
+ install_dir = '$base/local/lib/python$py_version_short/%s' % sitedir_name,
+ script_dir = '$base/local/bin',
+ ),
+ deb_system = dict(
+ install_dir = '$base/lib/python3/%s' % sitedir_name,
+ script_dir = '$base/bin',
+ ),
+ )
+
+ DEFAULT_SCHEME = dict(
+ install_dir='$base/Lib/site-packages',
+ script_dir='$base/Scripts',
+ )
+
+ def _expand(self, *attrs):
+ config_vars = self.get_finalized_command('install').config_vars
+
+ if self.prefix or self.install_layout:
+ if self.install_layout and self.install_layout in ['deb']:
+ scheme_name = "deb_system"
+ self.prefix = '/usr'
+ elif self.prefix or 'real_prefix' in sys.__dict__:
+ scheme_name = os.name
+ else:
+ scheme_name = "posix_local"
+ # Set default install_dir/scripts from --prefix
+ config_vars = config_vars.copy()
+ config_vars['base'] = self.prefix
+ scheme = self.INSTALL_SCHEMES.get(scheme_name,self.DEFAULT_SCHEME)
+ for attr, val in scheme.items():
+ if getattr(self, attr, None) is None:
+ setattr(self, attr, val)
+
+ from distutils.util import subst_vars
+
+ for attr in attrs:
+ val = getattr(self, attr)
+ if val is not None:
+ val = subst_vars(val, config_vars)
+ if os.name == 'posix':
+ val = os.path.expanduser(val)
+ setattr(self, attr, val)
+
+
+def _pythonpath():
+ items = os.environ.get('PYTHONPATH', '').split(os.pathsep)
+ return filter(None, items)
+
+
+def get_site_dirs():
+ """
+ Return a list of 'site' dirs
+ """
+
+ sitedirs = []
+
+ # start with PYTHONPATH
+ sitedirs.extend(_pythonpath())
+
+ prefixes = [sys.prefix]
+ if sys.exec_prefix != sys.prefix:
+ prefixes.append(sys.exec_prefix)
+ for prefix in prefixes:
+ if prefix:
+ if sys.platform in ('os2emx', 'riscos'):
+ sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
+ elif os.sep == '/':
+ sitedirs.extend([
+ os.path.join(
+ prefix,
+ "local/lib",
+ "python" + sys.version[:3],
+ "dist-packages",
+ ),
+ os.path.join(
+ prefix,
+ "lib",
+ "python{}.{}".format(*sys.version_info),
+ "dist-packages",
+ ),
+ os.path.join(prefix, "lib", "site-python"),
+ ])
+ else:
+ sitedirs.extend([
+ prefix,
+ os.path.join(prefix, "lib", "site-packages"),
+ ])
+ if sys.platform == 'darwin':
+ # for framework builds *only* we add the standard Apple
+ # locations. Currently only per-user, but /Library and
+ # /Network/Library could be added too
+ if 'Python.framework' in prefix:
+ home = os.environ.get('HOME')
+ if home:
+ home_sp = os.path.join(
+ home,
+ 'Library',
+ 'Python',
+ '{}.{}'.format(*sys.version_info),
+ 'site-packages',
+ )
+ sitedirs.append(home_sp)
+ lib_paths = get_path('purelib'), get_path('platlib')
+ for site_lib in lib_paths:
+ if site_lib not in sitedirs:
+ sitedirs.append(site_lib)
+
+ if site.ENABLE_USER_SITE:
+ sitedirs.append(site.USER_SITE)
+
+ try:
+ sitedirs.extend(site.getsitepackages())
+ except AttributeError:
+ pass
+
+ sitedirs = list(map(normalize_path, sitedirs))
+
+ return sitedirs
+
+
+def expand_paths(inputs):
+ """Yield sys.path directories that might contain "old-style" packages"""
+
+ seen = {}
+
+ for dirname in inputs:
+ dirname = normalize_path(dirname)
+ if dirname in seen:
+ continue
+
+ seen[dirname] = 1
+ if not os.path.isdir(dirname):
+ continue
+
+ files = os.listdir(dirname)
+ yield dirname, files
+
+ for name in files:
+ if not name.endswith('.pth'):
+ # We only care about the .pth files
+ continue
+ if name in ('easy-install.pth', 'setuptools.pth'):
+ # Ignore .pth files that we control
+ continue
+
+ # Read the .pth file
+ f = open(os.path.join(dirname, name))
+ lines = list(yield_lines(f))
+ f.close()
+
+ # Yield existing non-dupe, non-import directory lines from it
+ for line in lines:
+ if not line.startswith("import"):
+ line = normalize_path(line.rstrip())
+ if line not in seen:
+ seen[line] = 1
+ if not os.path.isdir(line):
+ continue
+ yield line, os.listdir(line)
+
+
+def extract_wininst_cfg(dist_filename):
+ """Extract configuration data from a bdist_wininst .exe
+
+ Returns a configparser.RawConfigParser, or None
+ """
+ f = open(dist_filename, 'rb')
+ try:
+ endrec = zipfile._EndRecData(f)
+ if endrec is None:
+ return None
+
+ prepended = (endrec[9] - endrec[5]) - endrec[6]
+ if prepended < 12: # no wininst data here
+ return None
+ f.seek(prepended - 12)
+
+ tag, cfglen, bmlen = struct.unpack("<iii", f.read(12))
+ if tag not in (0x1234567A, 0x1234567B):
+ return None # not a valid tag
+
+ f.seek(prepended - (12 + cfglen))
+ init = {'version': '', 'target_version': ''}
+ cfg = configparser.RawConfigParser(init)
+ try:
+ part = f.read(cfglen)
+ # Read up to the first null byte.
+ config = part.split(b'\0', 1)[0]
+ # Now the config is in bytes, but for RawConfigParser, it should
+ # be text, so decode it.
+ config = config.decode(sys.getfilesystemencoding())
+ cfg.readfp(six.StringIO(config))
+ except configparser.Error:
+ return None
+ if not cfg.has_section('metadata') or not cfg.has_section('Setup'):
+ return None
+ return cfg
+
+ finally:
+ f.close()
+
+
+def get_exe_prefixes(exe_filename):
+ """Get exe->egg path translations for a given .exe file"""
+
+ prefixes = [
+ ('PURELIB/', ''),
+ ('PLATLIB/pywin32_system32', ''),
+ ('PLATLIB/', ''),
+ ('SCRIPTS/', 'EGG-INFO/scripts/'),
+ ('DATA/lib/site-packages', ''),
+ ]
+ z = zipfile.ZipFile(exe_filename)
+ try:
+ for info in z.infolist():
+ name = info.filename
+ parts = name.split('/')
+ if len(parts) == 3 and parts[2] == 'PKG-INFO':
+ if parts[1].endswith('.egg-info'):
+ prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/'))
+ break
+ if len(parts) != 2 or not name.endswith('.pth'):
+ continue
+ if name.endswith('-nspkg.pth'):
+ continue
+ if parts[0].upper() in ('PURELIB', 'PLATLIB'):
+ contents = z.read(name)
+ if six.PY3:
+ contents = contents.decode()
+ for pth in yield_lines(contents):
+ pth = pth.strip().replace('\\', '/')
+ if not pth.startswith('import'):
+ prefixes.append((('%s/%s/' % (parts[0], pth)), ''))
+ finally:
+ z.close()
+ prefixes = [(x.lower(), y) for x, y in prefixes]
+ prefixes.sort()
+ prefixes.reverse()
+ return prefixes
+
+
+class PthDistributions(Environment):
+ """A .pth file with Distribution paths in it"""
+
+ dirty = False
+
+ def __init__(self, filename, sitedirs=()):
+ self.filename = filename
+ self.sitedirs = list(map(normalize_path, sitedirs))
+ self.basedir = normalize_path(os.path.dirname(self.filename))
+ self._load()
+ Environment.__init__(self, [], None, None)
+ for path in yield_lines(self.paths):
+ list(map(self.add, find_distributions(path, True)))
+
+ def _load(self):
+ self.paths = []
+ saw_import = False
+ seen = dict.fromkeys(self.sitedirs)
+ if os.path.isfile(self.filename):
+ f = open(self.filename, 'rt')
+ for line in f:
+ if line.startswith('import'):
+ saw_import = True
+ continue
+ path = line.rstrip()
+ self.paths.append(path)
+ if not path.strip() or path.strip().startswith('#'):
+ continue
+ # skip non-existent paths, in case somebody deleted a package
+ # manually, and duplicate paths as well
+ path = self.paths[-1] = normalize_path(
+ os.path.join(self.basedir, path)
+ )
+ if not os.path.exists(path) or path in seen:
+ self.paths.pop() # skip it
+ self.dirty = True # we cleaned up, so we're dirty now :)
+ continue
+ seen[path] = 1
+ f.close()
+
+ if self.paths and not saw_import:
+ self.dirty = True # ensure anything we touch has import wrappers
+ while self.paths and not self.paths[-1].strip():
+ self.paths.pop()
+
+ def save(self):
+ """Write changed .pth file back to disk"""
+ if not self.dirty:
+ return
+
+ rel_paths = list(map(self.make_relative, self.paths))
+ if rel_paths:
+ log.debug("Saving %s", self.filename)
+ lines = self._wrap_lines(rel_paths)
+ data = '\n'.join(lines) + '\n'
+
+ if os.path.islink(self.filename):
+ os.unlink(self.filename)
+ with open(self.filename, 'wt') as f:
+ f.write(data)
+
+ elif os.path.exists(self.filename):
+ log.debug("Deleting empty %s", self.filename)
+ os.unlink(self.filename)
+
+ self.dirty = False
+
+ @staticmethod
+ def _wrap_lines(lines):
+ return lines
+
+ def add(self, dist):
+ """Add `dist` to the distribution map"""
+ new_path = (
+ dist.location not in self.paths and (
+ dist.location not in self.sitedirs or
+ # account for '.' being in PYTHONPATH
+ dist.location == os.getcwd()
+ )
+ )
+ if new_path:
+ self.paths.append(dist.location)
+ self.dirty = True
+ Environment.add(self, dist)
+
+ def remove(self, dist):
+ """Remove `dist` from the distribution map"""
+ while dist.location in self.paths:
+ self.paths.remove(dist.location)
+ self.dirty = True
+ Environment.remove(self, dist)
+
+ def make_relative(self, path):
+ npath, last = os.path.split(normalize_path(path))
+ baselen = len(self.basedir)
+ parts = [last]
+ sep = os.altsep == '/' and '/' or os.sep
+ while len(npath) >= baselen:
+ if npath == self.basedir:
+ parts.append(os.curdir)
+ parts.reverse()
+ return sep.join(parts)
+ npath, last = os.path.split(npath)
+ parts.append(last)
+ else:
+ return path
+
+
+class RewritePthDistributions(PthDistributions):
+ @classmethod
+ def _wrap_lines(cls, lines):
+ yield cls.prelude
+ for line in lines:
+ yield line
+ yield cls.postlude
+
+ prelude = _one_liner("""
+ import sys
+ sys.__plen = len(sys.path)
+ """)
+ postlude = _one_liner("""
+ import sys
+ new = sys.path[sys.__plen:]
+ del sys.path[sys.__plen:]
+ p = getattr(sys, '__egginsert', 0)
+ sys.path[p:p] = new
+ sys.__egginsert = p + len(new)
+ """)
+
+
+if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'raw') == 'rewrite':
+ PthDistributions = RewritePthDistributions
+
+
+def _first_line_re():
+ """
+ Return a regular expression based on first_line_re suitable for matching
+ strings.
+ """
+ if isinstance(first_line_re.pattern, str):
+ return first_line_re
+
+ # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern.
+ return re.compile(first_line_re.pattern.decode())
+
+
+def auto_chmod(func, arg, exc):
+ if func in [os.unlink, os.remove] and os.name == 'nt':
+ chmod(arg, stat.S_IWRITE)
+ return func(arg)
+ et, ev, _ = sys.exc_info()
+ six.reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg))))
+
+
+def update_dist_caches(dist_path, fix_zipimporter_caches):
+ """
+ Fix any globally cached `dist_path` related data
+
+ `dist_path` should be a path of a newly installed egg distribution (zipped
+ or unzipped).
+
+ sys.path_importer_cache contains finder objects that have been cached when
+ importing data from the original distribution. Any such finders need to be
+ cleared since the replacement distribution might be packaged differently,
+ e.g. a zipped egg distribution might get replaced with an unzipped egg
+ folder or vice versa. Having the old finders cached may then cause Python
+ to attempt loading modules from the replacement distribution using an
+ incorrect loader.
+
+ zipimport.zipimporter objects are Python loaders charged with importing
+ data packaged inside zip archives. If stale loaders referencing the
+ original distribution, are left behind, they can fail to load modules from
+ the replacement distribution. E.g. if an old zipimport.zipimporter instance
+ is used to load data from a new zipped egg archive, it may cause the
+ operation to attempt to locate the requested data in the wrong location -
+ one indicated by the original distribution's zip archive directory
+ information. Such an operation may then fail outright, e.g. report having
+ read a 'bad local file header', or even worse, it may fail silently &
+ return invalid data.
+
+ zipimport._zip_directory_cache contains cached zip archive directory
+ information for all existing zipimport.zipimporter instances and all such
+ instances connected to the same archive share the same cached directory
+ information.
+
+ If asked, and the underlying Python implementation allows it, we can fix
+ all existing zipimport.zipimporter instances instead of having to track
+ them down and remove them one by one, by updating their shared cached zip
+ archive directory information. This, of course, assumes that the
+ replacement distribution is packaged as a zipped egg.
+
+ If not asked to fix existing zipimport.zipimporter instances, we still do
+ our best to clear any remaining zipimport.zipimporter related cached data
+ that might somehow later get used when attempting to load data from the new
+ distribution and thus cause such load operations to fail. Note that when
+ tracking down such remaining stale data, we can not catch every conceivable
+ usage from here, and we clear only those that we know of and have found to
+ cause problems if left alive. Any remaining caches should be updated by
+ whomever is in charge of maintaining them, i.e. they should be ready to
+ handle us replacing their zip archives with new distributions at runtime.
+
+ """
+ # There are several other known sources of stale zipimport.zipimporter
+ # instances that we do not clear here, but might if ever given a reason to
+ # do so:
+ # * Global setuptools pkg_resources.working_set (a.k.a. 'master working
+ # set') may contain distributions which may in turn contain their
+ # zipimport.zipimporter loaders.
+ # * Several zipimport.zipimporter loaders held by local variables further
+ # up the function call stack when running the setuptools installation.
+ # * Already loaded modules may have their __loader__ attribute set to the
+ # exact loader instance used when importing them. Python 3.4 docs state
+ # that this information is intended mostly for introspection and so is
+ # not expected to cause us problems.
+ normalized_path = normalize_path(dist_path)
+ _uncache(normalized_path, sys.path_importer_cache)
+ if fix_zipimporter_caches:
+ _replace_zip_directory_cache_data(normalized_path)
+ else:
+ # Here, even though we do not want to fix existing and now stale
+ # zipimporter cache information, we still want to remove it. Related to
+ # Python's zip archive directory information cache, we clear each of
+ # its stale entries in two phases:
+ # 1. Clear the entry so attempting to access zip archive information
+ # via any existing stale zipimport.zipimporter instances fails.
+ # 2. Remove the entry from the cache so any newly constructed
+ # zipimport.zipimporter instances do not end up using old stale
+ # zip archive directory information.
+ # This whole stale data removal step does not seem strictly necessary,
+ # but has been left in because it was done before we started replacing
+ # the zip archive directory information cache content if possible, and
+ # there are no relevant unit tests that we can depend on to tell us if
+ # this is really needed.
+ _remove_and_clear_zip_directory_cache_data(normalized_path)
+
+
+def _collect_zipimporter_cache_entries(normalized_path, cache):
+ """
+ Return zipimporter cache entry keys related to a given normalized path.
+
+ Alternative path spellings (e.g. those using different character case or
+ those using alternative path separators) related to the same path are
+ included. Any sub-path entries are included as well, i.e. those
+ corresponding to zip archives embedded in other zip archives.
+
+ """
+ result = []
+ prefix_len = len(normalized_path)
+ for p in cache:
+ np = normalize_path(p)
+ if (np.startswith(normalized_path) and
+ np[prefix_len:prefix_len + 1] in (os.sep, '')):
+ result.append(p)
+ return result
+
+
+def _update_zipimporter_cache(normalized_path, cache, updater=None):
+ """
+ Update zipimporter cache data for a given normalized path.
+
+ Any sub-path entries are processed as well, i.e. those corresponding to zip
+ archives embedded in other zip archives.
+
+ Given updater is a callable taking a cache entry key and the original entry
+ (after already removing the entry from the cache), and expected to update
+ the entry and possibly return a new one to be inserted in its place.
+ Returning None indicates that the entry should not be replaced with a new
+ one. If no updater is given, the cache entries are simply removed without
+ any additional processing, the same as if the updater simply returned None.
+
+ """
+ for p in _collect_zipimporter_cache_entries(normalized_path, cache):
+ # N.B. pypy's custom zipimport._zip_directory_cache implementation does
+ # not support the complete dict interface:
+ # * Does not support item assignment, thus not allowing this function
+ # to be used only for removing existing cache entries.
+ # * Does not support the dict.pop() method, forcing us to use the
+ # get/del patterns instead. For more detailed information see the
+ # following links:
+ # https://github.com/pypa/setuptools/issues/202#issuecomment-202913420
+ # http://bit.ly/2h9itJX
+ old_entry = cache[p]
+ del cache[p]
+ new_entry = updater and updater(p, old_entry)
+ if new_entry is not None:
+ cache[p] = new_entry
+
+
+def _uncache(normalized_path, cache):
+ _update_zipimporter_cache(normalized_path, cache)
+
+
+def _remove_and_clear_zip_directory_cache_data(normalized_path):
+ def clear_and_remove_cached_zip_archive_directory_data(path, old_entry):
+ old_entry.clear()
+
+ _update_zipimporter_cache(
+ normalized_path, zipimport._zip_directory_cache,
+ updater=clear_and_remove_cached_zip_archive_directory_data)
+
+
+# PyPy Python implementation does not allow directly writing to the
+# zipimport._zip_directory_cache and so prevents us from attempting to correct
+# its content. The best we can do there is clear the problematic cache content
+# and have PyPy repopulate it as needed. The downside is that if there are any
+# stale zipimport.zipimporter instances laying around, attempting to use them
+# will fail due to not having its zip archive directory information available
+# instead of being automatically corrected to use the new correct zip archive
+# directory information.
+if '__pypy__' in sys.builtin_module_names:
+ _replace_zip_directory_cache_data = \
+ _remove_and_clear_zip_directory_cache_data
+else:
+
+ def _replace_zip_directory_cache_data(normalized_path):
+ def replace_cached_zip_archive_directory_data(path, old_entry):
+ # N.B. In theory, we could load the zip directory information just
+ # once for all updated path spellings, and then copy it locally and
+ # update its contained path strings to contain the correct
+ # spelling, but that seems like a way too invasive move (this cache
+ # structure is not officially documented anywhere and could in
+ # theory change with new Python releases) for no significant
+ # benefit.
+ old_entry.clear()
+ zipimport.zipimporter(path)
+ old_entry.update(zipimport._zip_directory_cache[path])
+ return old_entry
+
+ _update_zipimporter_cache(
+ normalized_path, zipimport._zip_directory_cache,
+ updater=replace_cached_zip_archive_directory_data)
+
+
+def is_python(text, filename='<string>'):
+ "Is this string a valid Python script?"
+ try:
+ compile(text, filename, 'exec')
+ except (SyntaxError, TypeError):
+ return False
+ else:
+ return True
+
+
+def is_sh(executable):
+ """Determine if the specified executable is a .sh (contains a #! line)"""
+ try:
+ with io.open(executable, encoding='latin-1') as fp:
+ magic = fp.read(2)
+ except (OSError, IOError):
+ return executable
+ return magic == '#!'
+
+
+def nt_quote_arg(arg):
+ """Quote a command line argument according to Windows parsing rules"""
+ return subprocess.list2cmdline([arg])
+
+
+def is_python_script(script_text, filename):
+ """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc.
+ """
+ if filename.endswith('.py') or filename.endswith('.pyw'):
+ return True # extension says it's Python
+ if is_python(script_text, filename):
+ return True # it's syntactically valid Python
+ if script_text.startswith('#!'):
+ # It begins with a '#!' line, so check if 'python' is in it somewhere
+ return 'python' in script_text.splitlines()[0].lower()
+
+ return False # Not any Python I can recognize
+
+
+try:
+ from os import chmod as _chmod
+except ImportError:
+ # Jython compatibility
+ def _chmod(*args):
+ pass
+
+
+def chmod(path, mode):
+ log.debug("changing mode of %s to %o", path, mode)
+ try:
+ _chmod(path, mode)
+ except os.error as e:
+ log.debug("chmod failed: %s", e)
+
+
+class CommandSpec(list):
+ """
+ A command spec for a #! header, specified as a list of arguments akin to
+ those passed to Popen.
+ """
+
+ options = []
+ split_args = dict()
+
+ @classmethod
+ def best(cls):
+ """
+ Choose the best CommandSpec class based on environmental conditions.
+ """
+ return cls
+
+ @classmethod
+ def _sys_executable(cls):
+ _default = os.path.normpath(sys.executable)
+ return os.environ.get('__PYVENV_LAUNCHER__', _default)
+
+ @classmethod
+ def from_param(cls, param):
+ """
+ Construct a CommandSpec from a parameter to build_scripts, which may
+ be None.
+ """
+ if isinstance(param, cls):
+ return param
+ if isinstance(param, list):
+ return cls(param)
+ if param is None:
+ return cls.from_environment()
+ # otherwise, assume it's a string.
+ return cls.from_string(param)
+
+ @classmethod
+ def from_environment(cls):
+ return cls([cls._sys_executable()])
+
+ @classmethod
+ def from_string(cls, string):
+ """
+ Construct a command spec from a simple string representing a command
+ line parseable by shlex.split.
+ """
+ items = shlex.split(string, **cls.split_args)
+ return cls(items)
+
+ def install_options(self, script_text):
+ self.options = shlex.split(self._extract_options(script_text))
+ cmdline = subprocess.list2cmdline(self)
+ if not isascii(cmdline):
+ self.options[:0] = ['-x']
+
+ @staticmethod
+ def _extract_options(orig_script):
+ """
+ Extract any options from the first line of the script.
+ """
+ first = (orig_script + '\n').splitlines()[0]
+ match = _first_line_re().match(first)
+ options = match.group(1) or '' if match else ''
+ return options.strip()
+
+ def as_header(self):
+ return self._render(self + list(self.options))
+
+ @staticmethod
+ def _strip_quotes(item):
+ _QUOTES = '"\''
+ for q in _QUOTES:
+ if item.startswith(q) and item.endswith(q):
+ return item[1:-1]
+ return item
+
+ @staticmethod
+ def _render(items):
+ cmdline = subprocess.list2cmdline(
+ CommandSpec._strip_quotes(item.strip()) for item in items)
+ return '#!' + cmdline + '\n'
+
+
+# For pbr compat; will be removed in a future version.
+sys_executable = CommandSpec._sys_executable()
+
+
+class WindowsCommandSpec(CommandSpec):
+ split_args = dict(posix=False)
+
+
+class ScriptWriter:
+ """
+ Encapsulates behavior around writing entry point scripts for console and
+ gui apps.
+ """
+
+ template = textwrap.dedent(r"""
+ # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r
+ __requires__ = %(spec)r
+ import re
+ import sys
+ from pkg_resources import load_entry_point
+
+ if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
+ sys.exit(
+ load_entry_point(%(spec)r, %(group)r, %(name)r)()
+ )
+ """).lstrip()
+
+ command_spec_class = CommandSpec
+
+ @classmethod
+ def get_script_args(cls, dist, executable=None, wininst=False):
+ # for backward compatibility
+ warnings.warn("Use get_args", EasyInstallDeprecationWarning)
+ writer = (WindowsScriptWriter if wininst else ScriptWriter).best()
+ header = cls.get_script_header("", executable, wininst)
+ return writer.get_args(dist, header)
+
+ @classmethod
+ def get_script_header(cls, script_text, executable=None, wininst=False):
+ # for backward compatibility
+ warnings.warn("Use get_header", EasyInstallDeprecationWarning, stacklevel=2)
+ if wininst:
+ executable = "python.exe"
+ return cls.get_header(script_text, executable)
+
+ @classmethod
+ def get_args(cls, dist, header=None):
+ """
+ Yield write_script() argument tuples for a distribution's
+ console_scripts and gui_scripts entry points.
+ """
+ if header is None:
+ header = cls.get_header()
+ spec = str(dist.as_requirement())
+ for type_ in 'console', 'gui':
+ group = type_ + '_scripts'
+ for name, ep in dist.get_entry_map(group).items():
+ cls._ensure_safe_name(name)
+ script_text = cls.template % locals()
+ args = cls._get_script_args(type_, name, header, script_text)
+ for res in args:
+ yield res
+
+ @staticmethod
+ def _ensure_safe_name(name):
+ """
+ Prevent paths in *_scripts entry point names.
+ """
+ has_path_sep = re.search(r'[\\/]', name)
+ if has_path_sep:
+ raise ValueError("Path separators not allowed in script names")
+
+ @classmethod
+ def get_writer(cls, force_windows):
+ # for backward compatibility
+ warnings.warn("Use best", EasyInstallDeprecationWarning)
+ return WindowsScriptWriter.best() if force_windows else cls.best()
+
+ @classmethod
+ def best(cls):
+ """
+ Select the best ScriptWriter for this environment.
+ """
+ if sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt'):
+ return WindowsScriptWriter.best()
+ else:
+ return cls
+
+ @classmethod
+ def _get_script_args(cls, type_, name, header, script_text):
+ # Simply write the stub with no extension.
+ yield (name, header + script_text)
+
+ @classmethod
+ def get_header(cls, script_text="", executable=None):
+ """Create a #! line, getting options (if any) from script_text"""
+ cmd = cls.command_spec_class.best().from_param(executable)
+ cmd.install_options(script_text)
+ return cmd.as_header()
+
+
+class WindowsScriptWriter(ScriptWriter):
+ command_spec_class = WindowsCommandSpec
+
+ @classmethod
+ def get_writer(cls):
+ # for backward compatibility
+ warnings.warn("Use best", EasyInstallDeprecationWarning)
+ return cls.best()
+
+ @classmethod
+ def best(cls):
+ """
+ Select the best ScriptWriter suitable for Windows
+ """
+ writer_lookup = dict(
+ executable=WindowsExecutableLauncherWriter,
+ natural=cls,
+ )
+ # for compatibility, use the executable launcher by default
+ launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable')
+ return writer_lookup[launcher]
+
+ @classmethod
+ def _get_script_args(cls, type_, name, header, script_text):
+ "For Windows, add a .py extension"
+ ext = dict(console='.pya', gui='.pyw')[type_]
+ if ext not in os.environ['PATHEXT'].lower().split(';'):
+ msg = (
+ "{ext} not listed in PATHEXT; scripts will not be "
+ "recognized as executables."
+ ).format(**locals())
+ warnings.warn(msg, UserWarning)
+ old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe']
+ old.remove(ext)
+ header = cls._adjust_header(type_, header)
+ blockers = [name + x for x in old]
+ yield name + ext, header + script_text, 't', blockers
+
+ @classmethod
+ def _adjust_header(cls, type_, orig_header):
+ """
+ Make sure 'pythonw' is used for gui and and 'python' is used for
+ console (regardless of what sys.executable is).
+ """
+ pattern = 'pythonw.exe'
+ repl = 'python.exe'
+ if type_ == 'gui':
+ pattern, repl = repl, pattern
+ pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE)
+ new_header = pattern_ob.sub(string=orig_header, repl=repl)
+ return new_header if cls._use_header(new_header) else orig_header
+
+ @staticmethod
+ def _use_header(new_header):
+ """
+ Should _adjust_header use the replaced header?
+
+ On non-windows systems, always use. On
+ Windows systems, only use the replaced header if it resolves
+ to an executable on the system.
+ """
+ clean_header = new_header[2:-1].strip('"')
+ return sys.platform != 'win32' or find_executable(clean_header)
+
+
+class WindowsExecutableLauncherWriter(WindowsScriptWriter):
+ @classmethod
+ def _get_script_args(cls, type_, name, header, script_text):
+ """
+ For Windows, add a .py extension and an .exe launcher
+ """
+ if type_ == 'gui':
+ launcher_type = 'gui'
+ ext = '-script.pyw'
+ old = ['.pyw']
+ else:
+ launcher_type = 'cli'
+ ext = '-script.py'
+ old = ['.py', '.pyc', '.pyo']
+ hdr = cls._adjust_header(type_, header)
+ blockers = [name + x for x in old]
+ yield (name + ext, hdr + script_text, 't', blockers)
+ yield (
+ name + '.exe', get_win_launcher(launcher_type),
+ 'b' # write in binary mode
+ )
+ if not is_64bit():
+ # install a manifest for the launcher to prevent Windows
+ # from detecting it as an installer (which it will for
+ # launchers like easy_install.exe). Consider only
+ # adding a manifest for launchers detected as installers.
+ # See Distribute #143 for details.
+ m_name = name + '.exe.manifest'
+ yield (m_name, load_launcher_manifest(name), 't')
+
+
+# for backward-compatibility
+get_script_args = ScriptWriter.get_script_args
+get_script_header = ScriptWriter.get_script_header
+
+
+def get_win_launcher(type):
+ """
+ Load the Windows launcher (executable) suitable for launching a script.
+
+ `type` should be either 'cli' or 'gui'
+
+ Returns the executable as a byte string.
+ """
+ launcher_fn = '%s.exe' % type
+ if is_64bit():
+ launcher_fn = launcher_fn.replace(".", "-64.")
+ else:
+ launcher_fn = launcher_fn.replace(".", "-32.")
+ return resource_string('setuptools', launcher_fn)
+
+
+def load_launcher_manifest(name):
+ manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml')
+ if six.PY2:
+ return manifest % vars()
+ else:
+ return manifest.decode('utf-8') % vars()
+
+
+def rmtree(path, ignore_errors=False, onerror=auto_chmod):
+ return shutil.rmtree(path, ignore_errors, onerror)
+
+
+def current_umask():
+ tmp = os.umask(0o022)
+ os.umask(tmp)
+ return tmp
+
+
+def bootstrap():
+ # This function is called when setuptools*.egg is run using /bin/sh
+ import setuptools
+
+ argv0 = os.path.dirname(setuptools.__path__[0])
+ sys.argv[0] = argv0
+ sys.argv.append(argv0)
+ main()
+
+
+def main(argv=None, **kw):
+ from setuptools import setup
+ from setuptools.dist import Distribution
+
+ class DistributionWithoutHelpCommands(Distribution):
+ common_usage = ""
+
+ def _show_help(self, *args, **kw):
+ with _patch_usage():
+ Distribution._show_help(self, *args, **kw)
+
+ if argv is None:
+ argv = sys.argv[1:]
+
+ with _patch_usage():
+ setup(
+ script_args=['-q', 'easy_install', '-v'] + argv,
+ script_name=sys.argv[0] or 'easy_install',
+ distclass=DistributionWithoutHelpCommands,
+ **kw
+ )
+
+
+@contextlib.contextmanager
+def _patch_usage():
+ import distutils.core
+ USAGE = textwrap.dedent("""
+ usage: %(script)s [options] requirement_or_url ...
+ or: %(script)s --help
+ """).lstrip()
+
+ def gen_usage(script_name):
+ return USAGE % dict(
+ script=os.path.basename(script_name),
+ )
+
+ saved = distutils.core.gen_usage
+ distutils.core.gen_usage = gen_usage
+ try:
+ yield
+ finally:
+ distutils.core.gen_usage = saved
+
+class EasyInstallDeprecationWarning(SetuptoolsDeprecationWarning):
+ """Class for warning about deprecations in EasyInstall in SetupTools. Not ignored by default, unlike DeprecationWarning."""
+
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/egg_info.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/egg_info.py
new file mode 100644
index 0000000000000000000000000000000000000000..b767ef31d3155dd0292f748f8749c405fd1d3258
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/egg_info.py
@@ -0,0 +1,717 @@
+"""setuptools.command.egg_info
+
+Create a distribution's .egg-info directory and contents"""
+
+from distutils.filelist import FileList as _FileList
+from distutils.errors import DistutilsInternalError
+from distutils.util import convert_path
+from distutils import log
+import distutils.errors
+import distutils.filelist
+import os
+import re
+import sys
+import io
+import warnings
+import time
+import collections
+
+from setuptools.extern import six
+from setuptools.extern.six.moves import map
+
+from setuptools import Command
+from setuptools.command.sdist import sdist
+from setuptools.command.sdist import walk_revctrl
+from setuptools.command.setopt import edit_config
+from setuptools.command import bdist_egg
+from pkg_resources import (
+ parse_requirements, safe_name, parse_version,
+ safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
+import setuptools.unicode_utils as unicode_utils
+from setuptools.glob import glob
+
+from setuptools.extern import packaging
+from setuptools import SetuptoolsDeprecationWarning
+
+def translate_pattern(glob):
+ """
+ Translate a file path glob like '*.txt' in to a regular expression.
+ This differs from fnmatch.translate which allows wildcards to match
+ directory separators. It also knows about '**/' which matches any number of
+ directories.
+ """
+ pat = ''
+
+ # This will split on '/' within [character classes]. This is deliberate.
+ chunks = glob.split(os.path.sep)
+
+ sep = re.escape(os.sep)
+ valid_char = '[^%s]' % (sep,)
+
+ for c, chunk in enumerate(chunks):
+ last_chunk = c == len(chunks) - 1
+
+ # Chunks that are a literal ** are globstars. They match anything.
+ if chunk == '**':
+ if last_chunk:
+ # Match anything if this is the last component
+ pat += '.*'
+ else:
+ # Match '(name/)*'
+ pat += '(?:%s+%s)*' % (valid_char, sep)
+ continue # Break here as the whole path component has been handled
+
+ # Find any special characters in the remainder
+ i = 0
+ chunk_len = len(chunk)
+ while i < chunk_len:
+ char = chunk[i]
+ if char == '*':
+ # Match any number of name characters
+ pat += valid_char + '*'
+ elif char == '?':
+ # Match a name character
+ pat += valid_char
+ elif char == '[':
+ # Character class
+ inner_i = i + 1
+ # Skip initial !/] chars
+ if inner_i < chunk_len and chunk[inner_i] == '!':
+ inner_i = inner_i + 1
+ if inner_i < chunk_len and chunk[inner_i] == ']':
+ inner_i = inner_i + 1
+
+ # Loop till the closing ] is found
+ while inner_i < chunk_len and chunk[inner_i] != ']':
+ inner_i = inner_i + 1
+
+ if inner_i >= chunk_len:
+ # Got to the end of the string without finding a closing ]
+ # Do not treat this as a matching group, but as a literal [
+ pat += re.escape(char)
+ else:
+ # Grab the insides of the [brackets]
+ inner = chunk[i + 1:inner_i]
+ char_class = ''
+
+ # Class negation
+ if inner[0] == '!':
+ char_class = '^'
+ inner = inner[1:]
+
+ char_class += re.escape(inner)
+ pat += '[%s]' % (char_class,)
+
+ # Skip to the end ]
+ i = inner_i
+ else:
+ pat += re.escape(char)
+ i += 1
+
+ # Join each chunk with the dir separator
+ if not last_chunk:
+ pat += sep
+
+ pat += r'\Z'
+ return re.compile(pat, flags=re.MULTILINE|re.DOTALL)
+
+
+class InfoCommon:
+ tag_build = None
+ tag_date = None
+
+ @property
+ def name(self):
+ return safe_name(self.distribution.get_name())
+
+ def tagged_version(self):
+ version = self.distribution.get_version()
+ # egg_info may be called more than once for a distribution,
+ # in which case the version string already contains all tags.
+ if self.vtags and version.endswith(self.vtags):
+ return safe_version(version)
+ return safe_version(version + self.vtags)
+
+ def tags(self):
+ version = ''
+ if self.tag_build:
+ version += self.tag_build
+ if self.tag_date:
+ version += time.strftime("-%Y%m%d")
+ return version
+ vtags = property(tags)
+
+
+class egg_info(InfoCommon, Command):
+ description = "create a distribution's .egg-info directory"
+
+ user_options = [
+ ('egg-base=', 'e', "directory containing .egg-info directories"
+ " (default: top of the source tree)"),
+ ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
+ ('tag-build=', 'b', "Specify explicit tag to add to version number"),
+ ('no-date', 'D', "Don't include date stamp [default]"),
+ ]
+
+ boolean_options = ['tag-date']
+ negative_opt = {
+ 'no-date': 'tag-date',
+ }
+
+ def initialize_options(self):
+ self.egg_base = None
+ self.egg_name = None
+ self.egg_info = None
+ self.egg_version = None
+ self.broken_egg_info = False
+
+ ####################################
+ # allow the 'tag_svn_revision' to be detected and
+ # set, supporting sdists built on older Setuptools.
+ @property
+ def tag_svn_revision(self):
+ pass
+
+ @tag_svn_revision.setter
+ def tag_svn_revision(self, value):
+ pass
+ ####################################
+
+ def save_version_info(self, filename):
+ """
+ Materialize the value of date into the
+ build tag. Install build keys in a deterministic order
+ to avoid arbitrary reordering on subsequent builds.
+ """
+ egg_info = collections.OrderedDict()
+ # follow the order these keys would have been added
+ # when PYTHONHASHSEED=0
+ egg_info['tag_build'] = self.tags()
+ egg_info['tag_date'] = 0
+ edit_config(filename, dict(egg_info=egg_info))
+
+ def finalize_options(self):
+ # Note: we need to capture the current value returned
+ # by `self.tagged_version()`, so we can later update
+ # `self.distribution.metadata.version` without
+ # repercussions.
+ self.egg_name = self.name
+ self.egg_version = self.tagged_version()
+ parsed_version = parse_version(self.egg_version)
+
+ try:
+ is_version = isinstance(parsed_version, packaging.version.Version)
+ spec = (
+ "%s==%s" if is_version else "%s===%s"
+ )
+ list(
+ parse_requirements(spec % (self.egg_name, self.egg_version))
+ )
+ except ValueError:
+ raise distutils.errors.DistutilsOptionError(
+ "Invalid distribution name or version syntax: %s-%s" %
+ (self.egg_name, self.egg_version)
+ )
+
+ if self.egg_base is None:
+ dirs = self.distribution.package_dir
+ self.egg_base = (dirs or {}).get('', os.curdir)
+
+ self.ensure_dirname('egg_base')
+ self.egg_info = to_filename(self.egg_name) + '.egg-info'
+ if self.egg_base != os.curdir:
+ self.egg_info = os.path.join(self.egg_base, self.egg_info)
+ if '-' in self.egg_name:
+ self.check_broken_egg_info()
+
+ # Set package version for the benefit of dumber commands
+ # (e.g. sdist, bdist_wininst, etc.)
+ #
+ self.distribution.metadata.version = self.egg_version
+
+ # If we bootstrapped around the lack of a PKG-INFO, as might be the
+ # case in a fresh checkout, make sure that any special tags get added
+ # to the version info
+ #
+ pd = self.distribution._patched_dist
+ if pd is not None and pd.key == self.egg_name.lower():
+ pd._version = self.egg_version
+ pd._parsed_version = parse_version(self.egg_version)
+ self.distribution._patched_dist = None
+
+ def write_or_delete_file(self, what, filename, data, force=False):
+ """Write `data` to `filename` or delete if empty
+
+ If `data` is non-empty, this routine is the same as ``write_file()``.
+ If `data` is empty but not ``None``, this is the same as calling
+ ``delete_file(filename)`. If `data` is ``None``, then this is a no-op
+ unless `filename` exists, in which case a warning is issued about the
+ orphaned file (if `force` is false), or deleted (if `force` is true).
+ """
+ if data:
+ self.write_file(what, filename, data)
+ elif os.path.exists(filename):
+ if data is None and not force:
+ log.warn(
+ "%s not set in setup(), but %s exists", what, filename
+ )
+ return
+ else:
+ self.delete_file(filename)
+
+ def write_file(self, what, filename, data):
+ """Write `data` to `filename` (if not a dry run) after announcing it
+
+ `what` is used in a log message to identify what is being written
+ to the file.
+ """
+ log.info("writing %s to %s", what, filename)
+ if six.PY3:
+ data = data.encode("utf-8")
+ if not self.dry_run:
+ f = open(filename, 'wb')
+ f.write(data)
+ f.close()
+
+ def delete_file(self, filename):
+ """Delete `filename` (if not a dry run) after announcing it"""
+ log.info("deleting %s", filename)
+ if not self.dry_run:
+ os.unlink(filename)
+
+ def run(self):
+ self.mkpath(self.egg_info)
+ os.utime(self.egg_info, None)
+ installer = self.distribution.fetch_build_egg
+ for ep in iter_entry_points('egg_info.writers'):
+ ep.require(installer=installer)
+ writer = ep.resolve()
+ writer(self, ep.name, os.path.join(self.egg_info, ep.name))
+
+ # Get rid of native_libs.txt if it was put there by older bdist_egg
+ nl = os.path.join(self.egg_info, "native_libs.txt")
+ if os.path.exists(nl):
+ self.delete_file(nl)
+
+ self.find_sources()
+
+ def find_sources(self):
+ """Generate SOURCES.txt manifest file"""
+ manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
+ mm = manifest_maker(self.distribution)
+ mm.manifest = manifest_filename
+ mm.run()
+ self.filelist = mm.filelist
+
+ def check_broken_egg_info(self):
+ bei = self.egg_name + '.egg-info'
+ if self.egg_base != os.curdir:
+ bei = os.path.join(self.egg_base, bei)
+ if os.path.exists(bei):
+ log.warn(
+ "-" * 78 + '\n'
+ "Note: Your current .egg-info directory has a '-' in its name;"
+ '\nthis will not work correctly with "setup.py develop".\n\n'
+ 'Please rename %s to %s to correct this problem.\n' + '-' * 78,
+ bei, self.egg_info
+ )
+ self.broken_egg_info = self.egg_info
+ self.egg_info = bei # make it work for now
+
+
+class FileList(_FileList):
+ # Implementations of the various MANIFEST.in commands
+
+ def process_template_line(self, line):
+ # Parse the line: split it up, make sure the right number of words
+ # is there, and return the relevant words. 'action' is always
+ # defined: it's the first word of the line. Which of the other
+ # three are defined depends on the action; it'll be either
+ # patterns, (dir and patterns), or (dir_pattern).
+ (action, patterns, dir, dir_pattern) = self._parse_template_line(line)
+
+ # OK, now we know that the action is valid and we have the
+ # right number of words on the line for that action -- so we
+ # can proceed with minimal error-checking.
+ if action == 'include':
+ self.debug_print("include " + ' '.join(patterns))
+ for pattern in patterns:
+ if not self.include(pattern):
+ log.warn("warning: no files found matching '%s'", pattern)
+
+ elif action == 'exclude':
+ self.debug_print("exclude " + ' '.join(patterns))
+ for pattern in patterns:
+ if not self.exclude(pattern):
+ log.warn(("warning: no previously-included files "
+ "found matching '%s'"), pattern)
+
+ elif action == 'global-include':
+ self.debug_print("global-include " + ' '.join(patterns))
+ for pattern in patterns:
+ if not self.global_include(pattern):
+ log.warn(("warning: no files found matching '%s' "
+ "anywhere in distribution"), pattern)
+
+ elif action == 'global-exclude':
+ self.debug_print("global-exclude " + ' '.join(patterns))
+ for pattern in patterns:
+ if not self.global_exclude(pattern):
+ log.warn(("warning: no previously-included files matching "
+ "'%s' found anywhere in distribution"),
+ pattern)
+
+ elif action == 'recursive-include':
+ self.debug_print("recursive-include %s %s" %
+ (dir, ' '.join(patterns)))
+ for pattern in patterns:
+ if not self.recursive_include(dir, pattern):
+ log.warn(("warning: no files found matching '%s' "
+ "under directory '%s'"),
+ pattern, dir)
+
+ elif action == 'recursive-exclude':
+ self.debug_print("recursive-exclude %s %s" %
+ (dir, ' '.join(patterns)))
+ for pattern in patterns:
+ if not self.recursive_exclude(dir, pattern):
+ log.warn(("warning: no previously-included files matching "
+ "'%s' found under directory '%s'"),
+ pattern, dir)
+
+ elif action == 'graft':
+ self.debug_print("graft " + dir_pattern)
+ if not self.graft(dir_pattern):
+ log.warn("warning: no directories found matching '%s'",
+ dir_pattern)
+
+ elif action == 'prune':
+ self.debug_print("prune " + dir_pattern)
+ if not self.prune(dir_pattern):
+ log.warn(("no previously-included directories found "
+ "matching '%s'"), dir_pattern)
+
+ else:
+ raise DistutilsInternalError(
+ "this cannot happen: invalid action '%s'" % action)
+
+ def _remove_files(self, predicate):
+ """
+ Remove all files from the file list that match the predicate.
+ Return True if any matching files were removed
+ """
+ found = False
+ for i in range(len(self.files) - 1, -1, -1):
+ if predicate(self.files[i]):
+ self.debug_print(" removing " + self.files[i])
+ del self.files[i]
+ found = True
+ return found
+
+ def include(self, pattern):
+ """Include files that match 'pattern'."""
+ found = [f for f in glob(pattern) if not os.path.isdir(f)]
+ self.extend(found)
+ return bool(found)
+
+ def exclude(self, pattern):
+ """Exclude files that match 'pattern'."""
+ match = translate_pattern(pattern)
+ return self._remove_files(match.match)
+
+ def recursive_include(self, dir, pattern):
+ """
+ Include all files anywhere in 'dir/' that match the pattern.
+ """
+ full_pattern = os.path.join(dir, '**', pattern)
+ found = [f for f in glob(full_pattern, recursive=True)
+ if not os.path.isdir(f)]
+ self.extend(found)
+ return bool(found)
+
+ def recursive_exclude(self, dir, pattern):
+ """
+ Exclude any file anywhere in 'dir/' that match the pattern.
+ """
+ match = translate_pattern(os.path.join(dir, '**', pattern))
+ return self._remove_files(match.match)
+
+ def graft(self, dir):
+ """Include all files from 'dir/'."""
+ found = [
+ item
+ for match_dir in glob(dir)
+ for item in distutils.filelist.findall(match_dir)
+ ]
+ self.extend(found)
+ return bool(found)
+
+ def prune(self, dir):
+ """Filter out files from 'dir/'."""
+ match = translate_pattern(os.path.join(dir, '**'))
+ return self._remove_files(match.match)
+
+ def global_include(self, pattern):
+ """
+ Include all files anywhere in the current directory that match the
+ pattern. This is very inefficient on large file trees.
+ """
+ if self.allfiles is None:
+ self.findall()
+ match = translate_pattern(os.path.join('**', pattern))
+ found = [f for f in self.allfiles if match.match(f)]
+ self.extend(found)
+ return bool(found)
+
+ def global_exclude(self, pattern):
+ """
+ Exclude all files anywhere that match the pattern.
+ """
+ match = translate_pattern(os.path.join('**', pattern))
+ return self._remove_files(match.match)
+
+ def append(self, item):
+ if item.endswith('\r'): # Fix older sdists built on Windows
+ item = item[:-1]
+ path = convert_path(item)
+
+ if self._safe_path(path):
+ self.files.append(path)
+
+ def extend(self, paths):
+ self.files.extend(filter(self._safe_path, paths))
+
+ def _repair(self):
+ """
+ Replace self.files with only safe paths
+
+ Because some owners of FileList manipulate the underlying
+ ``files`` attribute directly, this method must be called to
+ repair those paths.
+ """
+ self.files = list(filter(self._safe_path, self.files))
+
+ def _safe_path(self, path):
+ enc_warn = "'%s' not %s encodable -- skipping"
+
+ # To avoid accidental trans-codings errors, first to unicode
+ u_path = unicode_utils.filesys_decode(path)
+ if u_path is None:
+ log.warn("'%s' in unexpected encoding -- skipping" % path)
+ return False
+
+ # Must ensure utf-8 encodability
+ utf8_path = unicode_utils.try_encode(u_path, "utf-8")
+ if utf8_path is None:
+ log.warn(enc_warn, path, 'utf-8')
+ return False
+
+ try:
+ # accept is either way checks out
+ if os.path.exists(u_path) or os.path.exists(utf8_path):
+ return True
+ # this will catch any encode errors decoding u_path
+ except UnicodeEncodeError:
+ log.warn(enc_warn, path, sys.getfilesystemencoding())
+
+
+class manifest_maker(sdist):
+ template = "MANIFEST.in"
+
+ def initialize_options(self):
+ self.use_defaults = 1
+ self.prune = 1
+ self.manifest_only = 1
+ self.force_manifest = 1
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ self.filelist = FileList()
+ if not os.path.exists(self.manifest):
+ self.write_manifest() # it must exist so it'll get in the list
+ self.add_defaults()
+ if os.path.exists(self.template):
+ self.read_template()
+ self.prune_file_list()
+ self.filelist.sort()
+ self.filelist.remove_duplicates()
+ self.write_manifest()
+
+ def _manifest_normalize(self, path):
+ path = unicode_utils.filesys_decode(path)
+ return path.replace(os.sep, '/')
+
+ def write_manifest(self):
+ """
+ Write the file list in 'self.filelist' to the manifest file
+ named by 'self.manifest'.
+ """
+ self.filelist._repair()
+
+ # Now _repairs should encodability, but not unicode
+ files = [self._manifest_normalize(f) for f in self.filelist.files]
+ msg = "writing manifest file '%s'" % self.manifest
+ self.execute(write_file, (self.manifest, files), msg)
+
+ def warn(self, msg):
+ if not self._should_suppress_warning(msg):
+ sdist.warn(self, msg)
+
+ @staticmethod
+ def _should_suppress_warning(msg):
+ """
+ suppress missing-file warnings from sdist
+ """
+ return re.match(r"standard file .*not found", msg)
+
+ def add_defaults(self):
+ sdist.add_defaults(self)
+ self.check_license()
+ self.filelist.append(self.template)
+ self.filelist.append(self.manifest)
+ rcfiles = list(walk_revctrl())
+ if rcfiles:
+ self.filelist.extend(rcfiles)
+ elif os.path.exists(self.manifest):
+ self.read_manifest()
+
+ if os.path.exists("setup.py"):
+ # setup.py should be included by default, even if it's not
+ # the script called to create the sdist
+ self.filelist.append("setup.py")
+
+ ei_cmd = self.get_finalized_command('egg_info')
+ self.filelist.graft(ei_cmd.egg_info)
+
+ def prune_file_list(self):
+ build = self.get_finalized_command('build')
+ base_dir = self.distribution.get_fullname()
+ self.filelist.prune(build.build_base)
+ self.filelist.prune(base_dir)
+ sep = re.escape(os.sep)
+ self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep,
+ is_regex=1)
+
+
+def write_file(filename, contents):
+ """Create a file with the specified name and write 'contents' (a
+ sequence of strings without line terminators) to it.
+ """
+ contents = "\n".join(contents)
+
+ # assuming the contents has been vetted for utf-8 encoding
+ contents = contents.encode("utf-8")
+
+ with open(filename, "wb") as f: # always write POSIX-style manifest
+ f.write(contents)
+
+
+def write_pkg_info(cmd, basename, filename):
+ log.info("writing %s", filename)
+ if not cmd.dry_run:
+ metadata = cmd.distribution.metadata
+ metadata.version, oldver = cmd.egg_version, metadata.version
+ metadata.name, oldname = cmd.egg_name, metadata.name
+
+ try:
+ # write unescaped data to PKG-INFO, so older pkg_resources
+ # can still parse it
+ metadata.write_pkg_info(cmd.egg_info)
+ finally:
+ metadata.name, metadata.version = oldname, oldver
+
+ safe = getattr(cmd.distribution, 'zip_safe', None)
+
+ bdist_egg.write_safety_flag(cmd.egg_info, safe)
+
+
+def warn_depends_obsolete(cmd, basename, filename):
+ if os.path.exists(filename):
+ log.warn(
+ "WARNING: 'depends.txt' is not used by setuptools 0.6!\n"
+ "Use the install_requires/extras_require setup() args instead."
+ )
+
+
+def _write_requirements(stream, reqs):
+ lines = yield_lines(reqs or ())
+ append_cr = lambda line: line + '\n'
+ lines = map(append_cr, sorted(lines))
+ stream.writelines(lines)
+
+
+def write_requirements(cmd, basename, filename):
+ dist = cmd.distribution
+ data = six.StringIO()
+ _write_requirements(data, dist.install_requires)
+ extras_require = dist.extras_require or {}
+ for extra in sorted(extras_require):
+ data.write('\n[{extra}]\n'.format(**vars()))
+ _write_requirements(data, extras_require[extra])
+ cmd.write_or_delete_file("requirements", filename, data.getvalue())
+
+
+def write_setup_requirements(cmd, basename, filename):
+ data = io.StringIO()
+ _write_requirements(data, cmd.distribution.setup_requires)
+ cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())
+
+
+def write_toplevel_names(cmd, basename, filename):
+ pkgs = dict.fromkeys(
+ [
+ k.split('.', 1)[0]
+ for k in cmd.distribution.iter_distribution_names()
+ ]
+ )
+ cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n')
+
+
+def overwrite_arg(cmd, basename, filename):
+ write_arg(cmd, basename, filename, True)
+
+
+def write_arg(cmd, basename, filename, force=False):
+ argname = os.path.splitext(basename)[0]
+ value = getattr(cmd.distribution, argname, None)
+ if value is not None:
+ value = '\n'.join(value) + '\n'
+ cmd.write_or_delete_file(argname, filename, value, force)
+
+
+def write_entries(cmd, basename, filename):
+ ep = cmd.distribution.entry_points
+
+ if isinstance(ep, six.string_types) or ep is None:
+ data = ep
+ elif ep is not None:
+ data = []
+ for section, contents in sorted(ep.items()):
+ if not isinstance(contents, six.string_types):
+ contents = EntryPoint.parse_group(section, contents)
+ contents = '\n'.join(sorted(map(str, contents.values())))
+ data.append('[%s]\n%s\n\n' % (section, contents))
+ data = ''.join(data)
+
+ cmd.write_or_delete_file('entry points', filename, data, True)
+
+
+def get_pkg_info_revision():
+ """
+ Get a -r### off of PKG-INFO Version in case this is an sdist of
+ a subversion revision.
+ """
+ warnings.warn("get_pkg_info_revision is deprecated.", EggInfoDeprecationWarning)
+ if os.path.exists('PKG-INFO'):
+ with io.open('PKG-INFO') as f:
+ for line in f:
+ match = re.match(r"Version:.*-r(\d+)\s*$", line)
+ if match:
+ return int(match.group(1))
+ return 0
+
+
+class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning):
+ """Class for warning about deprecations in eggInfo in setupTools. Not ignored by default, unlike DeprecationWarning."""
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/install.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/install.py
new file mode 100644
index 0000000000000000000000000000000000000000..72b9a3e424707633c7e31a347170f358cfa3f87a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/install.py
@@ -0,0 +1,125 @@
+from distutils.errors import DistutilsArgError
+import inspect
+import glob
+import warnings
+import platform
+import distutils.command.install as orig
+
+import setuptools
+
+# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for
+# now. See https://github.com/pypa/setuptools/issues/199/
+_install = orig.install
+
+
+class install(orig.install):
+ """Use easy_install to install the package, w/dependencies"""
+
+ user_options = orig.install.user_options + [
+ ('old-and-unmanageable', None, "Try not to use this!"),
+ ('single-version-externally-managed', None,
+ "used by system package builders to create 'flat' eggs"),
+ ]
+ boolean_options = orig.install.boolean_options + [
+ 'old-and-unmanageable', 'single-version-externally-managed',
+ ]
+ new_commands = [
+ ('install_egg_info', lambda self: True),
+ ('install_scripts', lambda self: True),
+ ]
+ _nc = dict(new_commands)
+
+ def initialize_options(self):
+ orig.install.initialize_options(self)
+ self.old_and_unmanageable = None
+ self.single_version_externally_managed = None
+
+ def finalize_options(self):
+ orig.install.finalize_options(self)
+ if self.root:
+ self.single_version_externally_managed = True
+ elif self.single_version_externally_managed:
+ if not self.root and not self.record:
+ raise DistutilsArgError(
+ "You must specify --record or --root when building system"
+ " packages"
+ )
+
+ def handle_extra_path(self):
+ if self.root or self.single_version_externally_managed:
+ # explicit backward-compatibility mode, allow extra_path to work
+ return orig.install.handle_extra_path(self)
+
+ # Ignore extra_path when installing an egg (or being run by another
+ # command without --root or --single-version-externally-managed
+ self.path_file = None
+ self.extra_dirs = ''
+
+ def run(self):
+ # Explicit request for old-style install? Just do it
+ if self.old_and_unmanageable or self.single_version_externally_managed:
+ return orig.install.run(self)
+
+ if not self._called_from_setup(inspect.currentframe()):
+ # Run in backward-compatibility mode to support bdist_* commands.
+ orig.install.run(self)
+ else:
+ self.do_egg_install()
+
+ @staticmethod
+ def _called_from_setup(run_frame):
+ """
+ Attempt to detect whether run() was called from setup() or by another
+ command. If called by setup(), the parent caller will be the
+ 'run_command' method in 'distutils.dist', and *its* caller will be
+ the 'run_commands' method. If called any other way, the
+ immediate caller *might* be 'run_command', but it won't have been
+ called by 'run_commands'. Return True in that case or if a call stack
+ is unavailable. Return False otherwise.
+ """
+ if run_frame is None:
+ msg = "Call stack not available. bdist_* commands may fail."
+ warnings.warn(msg)
+ if platform.python_implementation() == 'IronPython':
+ msg = "For best results, pass -X:Frames to enable call stack."
+ warnings.warn(msg)
+ return True
+ res = inspect.getouterframes(run_frame)[2]
+ caller, = res[:1]
+ info = inspect.getframeinfo(caller)
+ caller_module = caller.f_globals.get('__name__', '')
+ return (
+ caller_module == 'distutils.dist'
+ and info.function == 'run_commands'
+ )
+
+ def do_egg_install(self):
+
+ easy_install = self.distribution.get_command_class('easy_install')
+
+ cmd = easy_install(
+ self.distribution, args="x", root=self.root, record=self.record,
+ )
+ cmd.ensure_finalized() # finalize before bdist_egg munges install cmd
+ cmd.always_copy_from = '.' # make sure local-dir eggs get installed
+
+ # pick up setup-dir .egg files only: no .egg-info
+ cmd.package_index.scan(glob.glob('*.egg'))
+
+ self.run_command('bdist_egg')
+ args = [self.distribution.get_command_obj('bdist_egg').egg_output]
+
+ if setuptools.bootstrap_install_from:
+ # Bootstrap self-installation of setuptools
+ args.insert(0, setuptools.bootstrap_install_from)
+
+ cmd.args = args
+ cmd.run(show_deprecation=False)
+ setuptools.bootstrap_install_from = None
+
+
+# XXX Python 3.1 doesn't see _nc if this is inside the class
+install.sub_commands = (
+ [cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] +
+ install.new_commands
+)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/install_egg_info.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/install_egg_info.py
new file mode 100644
index 0000000000000000000000000000000000000000..5f405bcad743bac704e90c5489713a5cd4404497
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/install_egg_info.py
@@ -0,0 +1,82 @@
+from distutils import log, dir_util
+import os, sys
+
+from setuptools import Command
+from setuptools import namespaces
+from setuptools.archive_util import unpack_archive
+import pkg_resources
+
+
+class install_egg_info(namespaces.Installer, Command):
+ """Install an .egg-info directory for the package"""
+
+ description = "Install an .egg-info directory for the package"
+
+ user_options = [
+ ('install-dir=', 'd', "directory to install to"),
+ ]
+
+ def initialize_options(self):
+ self.install_dir = None
+ self.install_layout = None
+ self.prefix_option = None
+
+ def finalize_options(self):
+ self.set_undefined_options('install_lib',
+ ('install_dir', 'install_dir'))
+ self.set_undefined_options('install',('install_layout','install_layout'))
+ if sys.hexversion > 0x2060000:
+ self.set_undefined_options('install',('prefix_option','prefix_option'))
+ ei_cmd = self.get_finalized_command("egg_info")
+ basename = pkg_resources.Distribution(
+ None, None, ei_cmd.egg_name, ei_cmd.egg_version
+ ).egg_name() + '.egg-info'
+
+ if self.install_layout:
+ if not self.install_layout.lower() in ['deb']:
+ raise DistutilsOptionError("unknown value for --install-layout")
+ self.install_layout = self.install_layout.lower()
+ basename = basename.replace('-py%s' % pkg_resources.PY_MAJOR, '')
+ elif self.prefix_option or 'real_prefix' in sys.__dict__:
+ # don't modify for virtualenv
+ pass
+ else:
+ basename = basename.replace('-py%s' % pkg_resources.PY_MAJOR, '')
+
+ self.source = ei_cmd.egg_info
+ self.target = os.path.join(self.install_dir, basename)
+ self.outputs = []
+
+ def run(self):
+ self.run_command('egg_info')
+ if os.path.isdir(self.target) and not os.path.islink(self.target):
+ dir_util.remove_tree(self.target, dry_run=self.dry_run)
+ elif os.path.exists(self.target):
+ self.execute(os.unlink, (self.target,), "Removing " + self.target)
+ if not self.dry_run:
+ pkg_resources.ensure_directory(self.target)
+ self.execute(
+ self.copytree, (), "Copying %s to %s" % (self.source, self.target)
+ )
+ self.install_namespaces()
+
+ def get_outputs(self):
+ return self.outputs
+
+ def copytree(self):
+ # Copy the .egg-info tree to site-packages
+ def skimmer(src, dst):
+ # filter out source-control directories; note that 'src' is always
+ # a '/'-separated path, regardless of platform. 'dst' is a
+ # platform-specific path.
+ for skip in '.svn/', 'CVS/':
+ if src.startswith(skip) or '/' + skip in src:
+ return None
+ if self.install_layout and self.install_layout in ['deb'] and src.startswith('SOURCES.txt'):
+ log.info("Skipping SOURCES.txt")
+ return None
+ self.outputs.append(dst)
+ log.debug("Copying %s to %s", src, dst)
+ return dst
+
+ unpack_archive(self.source, self.target, skimmer)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/install_lib.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/install_lib.py
new file mode 100644
index 0000000000000000000000000000000000000000..bf81519d98e8221707f45c1a3901b8d836095d30
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/install_lib.py
@@ -0,0 +1,147 @@
+import os
+import sys
+from itertools import product, starmap
+import distutils.command.install_lib as orig
+
+
+class install_lib(orig.install_lib):
+ """Don't add compiled flags to filenames of non-Python files"""
+
+ def initialize_options(self):
+ orig.install_lib.initialize_options(self)
+ self.multiarch = None
+ self.install_layout = None
+
+ def finalize_options(self):
+ orig.install_lib.finalize_options(self)
+ self.set_undefined_options('install',('install_layout','install_layout'))
+ if self.install_layout == 'deb' and sys.version_info[:2] >= (3, 3):
+ import sysconfig
+ self.multiarch = sysconfig.get_config_var('MULTIARCH')
+
+ def run(self):
+ self.build()
+ outfiles = self.install()
+ if outfiles is not None:
+ # always compile, in case we have any extension stubs to deal with
+ self.byte_compile(outfiles)
+
+ def get_exclusions(self):
+ """
+ Return a collections.Sized collections.Container of paths to be
+ excluded for single_version_externally_managed installations.
+ """
+ all_packages = (
+ pkg
+ for ns_pkg in self._get_SVEM_NSPs()
+ for pkg in self._all_packages(ns_pkg)
+ )
+
+ excl_specs = product(all_packages, self._gen_exclusion_paths())
+ return set(starmap(self._exclude_pkg_path, excl_specs))
+
+ def _exclude_pkg_path(self, pkg, exclusion_path):
+ """
+ Given a package name and exclusion path within that package,
+ compute the full exclusion path.
+ """
+ parts = pkg.split('.') + [exclusion_path]
+ return os.path.join(self.install_dir, *parts)
+
+ @staticmethod
+ def _all_packages(pkg_name):
+ """
+ >>> list(install_lib._all_packages('foo.bar.baz'))
+ ['foo.bar.baz', 'foo.bar', 'foo']
+ """
+ while pkg_name:
+ yield pkg_name
+ pkg_name, sep, child = pkg_name.rpartition('.')
+
+ def _get_SVEM_NSPs(self):
+ """
+ Get namespace packages (list) but only for
+ single_version_externally_managed installations and empty otherwise.
+ """
+ # TODO: is it necessary to short-circuit here? i.e. what's the cost
+ # if get_finalized_command is called even when namespace_packages is
+ # False?
+ if not self.distribution.namespace_packages:
+ return []
+
+ install_cmd = self.get_finalized_command('install')
+ svem = install_cmd.single_version_externally_managed
+
+ return self.distribution.namespace_packages if svem else []
+
+ @staticmethod
+ def _gen_exclusion_paths():
+ """
+ Generate file paths to be excluded for namespace packages (bytecode
+ cache files).
+ """
+ # always exclude the package module itself
+ yield '__init__.py'
+
+ yield '__init__.pyc'
+ yield '__init__.pyo'
+
+ if not hasattr(sys, 'implementation'):
+ return
+
+ base = os.path.join('__pycache__', '__init__.' + sys.implementation.cache_tag)
+ yield base + '.pyc'
+ yield base + '.pyo'
+ yield base + '.opt-1.pyc'
+ yield base + '.opt-2.pyc'
+
+ def copy_tree(
+ self, infile, outfile,
+ preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
+ ):
+ assert preserve_mode and preserve_times and not preserve_symlinks
+ exclude = self.get_exclusions()
+
+ if not exclude:
+ import distutils.dir_util
+ distutils.dir_util._multiarch = self.multiarch
+ return orig.install_lib.copy_tree(self, infile, outfile)
+
+ # Exclude namespace package __init__.py* files from the output
+
+ from setuptools.archive_util import unpack_directory
+ from distutils import log
+
+ outfiles = []
+
+ if self.multiarch:
+ import sysconfig
+ ext_suffix = sysconfig.get_config_var ('EXT_SUFFIX')
+ if ext_suffix.endswith(self.multiarch + ext_suffix[-3:]):
+ new_suffix = None
+ else:
+ new_suffix = "%s-%s%s" % (ext_suffix[:-3], self.multiarch, ext_suffix[-3:])
+
+ def pf(src, dst):
+ if dst in exclude:
+ log.warn("Skipping installation of %s (namespace package)",
+ dst)
+ return False
+
+ if self.multiarch and new_suffix and dst.endswith(ext_suffix) and not dst.endswith(new_suffix):
+ dst = dst.replace(ext_suffix, new_suffix)
+ log.info("renaming extension to %s", os.path.basename(dst))
+
+ log.info("copying %s -> %s", src, os.path.dirname(dst))
+ outfiles.append(dst)
+ return dst
+
+ unpack_directory(infile, outfile, pf)
+ return outfiles
+
+ def get_outputs(self):
+ outputs = orig.install_lib.get_outputs(self)
+ exclude = self.get_exclusions()
+ if exclude:
+ return [f for f in outputs if f not in exclude]
+ return outputs
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/install_scripts.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/install_scripts.py
new file mode 100644
index 0000000000000000000000000000000000000000..16234273a2d36b0b3d821a7a97bf8f03cf3f2948
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/install_scripts.py
@@ -0,0 +1,65 @@
+from distutils import log
+import distutils.command.install_scripts as orig
+import os
+import sys
+
+from pkg_resources import Distribution, PathMetadata, ensure_directory
+
+
+class install_scripts(orig.install_scripts):
+ """Do normal script install, plus any egg_info wrapper scripts"""
+
+ def initialize_options(self):
+ orig.install_scripts.initialize_options(self)
+ self.no_ep = False
+
+ def run(self):
+ import setuptools.command.easy_install as ei
+
+ self.run_command("egg_info")
+ if self.distribution.scripts:
+ orig.install_scripts.run(self) # run first to set up self.outfiles
+ else:
+ self.outfiles = []
+ if self.no_ep:
+ # don't install entry point scripts into .egg file!
+ return
+
+ ei_cmd = self.get_finalized_command("egg_info")
+ dist = Distribution(
+ ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
+ ei_cmd.egg_name, ei_cmd.egg_version,
+ )
+ bs_cmd = self.get_finalized_command('build_scripts')
+ exec_param = getattr(bs_cmd, 'executable', None)
+ bw_cmd = self.get_finalized_command("bdist_wininst")
+ is_wininst = getattr(bw_cmd, '_is_running', False)
+ writer = ei.ScriptWriter
+ if is_wininst:
+ exec_param = "python.exe"
+ writer = ei.WindowsScriptWriter
+ if exec_param == sys.executable:
+ # In case the path to the Python executable contains a space, wrap
+ # it so it's not split up.
+ exec_param = [exec_param]
+ # resolve the writer to the environment
+ writer = writer.best()
+ cmd = writer.command_spec_class.best().from_param(exec_param)
+ for args in writer.get_args(dist, cmd.as_header()):
+ self.write_script(*args)
+
+ def write_script(self, script_name, contents, mode="t", *ignored):
+ """Write an executable file to the scripts directory"""
+ from setuptools.command.easy_install import chmod, current_umask
+
+ log.info("Installing %s script to %s", script_name, self.install_dir)
+ target = os.path.join(self.install_dir, script_name)
+ self.outfiles.append(target)
+
+ mask = current_umask()
+ if not self.dry_run:
+ ensure_directory(target)
+ f = open(target, "w" + mode)
+ f.write(contents)
+ f.close()
+ chmod(target, 0o777 - mask)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/launcher manifest.xml b/monEnvTP/lib/python3.8/site-packages/setuptools/command/launcher manifest.xml
new file mode 100644
index 0000000000000000000000000000000000000000..5972a96d8ded85cc14147ffc1400ec67c3b5a578
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/launcher manifest.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+ <assemblyIdentity version="1.0.0.0"
+ processorArchitecture="X86"
+ name="%(name)s"
+ type="win32"/>
+ <!-- Identify the application security requirements. -->
+ <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+ <security>
+ <requestedPrivileges>
+ <requestedExecutionLevel level="asInvoker" uiAccess="false"/>
+ </requestedPrivileges>
+ </security>
+ </trustInfo>
+</assembly>
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/py36compat.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/py36compat.py
new file mode 100644
index 0000000000000000000000000000000000000000..61063e7542586c05c3af21d31cd917ebd1118272
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/py36compat.py
@@ -0,0 +1,136 @@
+import os
+from glob import glob
+from distutils.util import convert_path
+from distutils.command import sdist
+
+from setuptools.extern.six.moves import filter
+
+
+class sdist_add_defaults:
+ """
+ Mix-in providing forward-compatibility for functionality as found in
+ distutils on Python 3.7.
+
+ Do not edit the code in this class except to update functionality
+ as implemented in distutils. Instead, override in the subclass.
+ """
+
+ def add_defaults(self):
+ """Add all the default files to self.filelist:
+ - README or README.txt
+ - setup.py
+ - test/test*.py
+ - all pure Python modules mentioned in setup script
+ - all files pointed by package_data (build_py)
+ - all files defined in data_files.
+ - all files defined as scripts.
+ - all C sources listed as part of extensions or C libraries
+ in the setup script (doesn't catch C headers!)
+ Warns if (README or README.txt) or setup.py are missing; everything
+ else is optional.
+ """
+ self._add_defaults_standards()
+ self._add_defaults_optional()
+ self._add_defaults_python()
+ self._add_defaults_data_files()
+ self._add_defaults_ext()
+ self._add_defaults_c_libs()
+ self._add_defaults_scripts()
+
+ @staticmethod
+ def _cs_path_exists(fspath):
+ """
+ Case-sensitive path existence check
+
+ >>> sdist_add_defaults._cs_path_exists(__file__)
+ True
+ >>> sdist_add_defaults._cs_path_exists(__file__.upper())
+ False
+ """
+ if not os.path.exists(fspath):
+ return False
+ # make absolute so we always have a directory
+ abspath = os.path.abspath(fspath)
+ directory, filename = os.path.split(abspath)
+ return filename in os.listdir(directory)
+
+ def _add_defaults_standards(self):
+ standards = [self.READMES, self.distribution.script_name]
+ for fn in standards:
+ if isinstance(fn, tuple):
+ alts = fn
+ got_it = False
+ for fn in alts:
+ if self._cs_path_exists(fn):
+ got_it = True
+ self.filelist.append(fn)
+ break
+
+ if not got_it:
+ self.warn("standard file not found: should have one of " +
+ ', '.join(alts))
+ else:
+ if self._cs_path_exists(fn):
+ self.filelist.append(fn)
+ else:
+ self.warn("standard file '%s' not found" % fn)
+
+ def _add_defaults_optional(self):
+ optional = ['test/test*.py', 'setup.cfg']
+ for pattern in optional:
+ files = filter(os.path.isfile, glob(pattern))
+ self.filelist.extend(files)
+
+ def _add_defaults_python(self):
+ # build_py is used to get:
+ # - python modules
+ # - files defined in package_data
+ build_py = self.get_finalized_command('build_py')
+
+ # getting python files
+ if self.distribution.has_pure_modules():
+ self.filelist.extend(build_py.get_source_files())
+
+ # getting package_data files
+ # (computed in build_py.data_files by build_py.finalize_options)
+ for pkg, src_dir, build_dir, filenames in build_py.data_files:
+ for filename in filenames:
+ self.filelist.append(os.path.join(src_dir, filename))
+
+ def _add_defaults_data_files(self):
+ # getting distribution.data_files
+ if self.distribution.has_data_files():
+ for item in self.distribution.data_files:
+ if isinstance(item, str):
+ # plain file
+ item = convert_path(item)
+ if os.path.isfile(item):
+ self.filelist.append(item)
+ else:
+ # a (dirname, filenames) tuple
+ dirname, filenames = item
+ for f in filenames:
+ f = convert_path(f)
+ if os.path.isfile(f):
+ self.filelist.append(f)
+
+ def _add_defaults_ext(self):
+ if self.distribution.has_ext_modules():
+ build_ext = self.get_finalized_command('build_ext')
+ self.filelist.extend(build_ext.get_source_files())
+
+ def _add_defaults_c_libs(self):
+ if self.distribution.has_c_libraries():
+ build_clib = self.get_finalized_command('build_clib')
+ self.filelist.extend(build_clib.get_source_files())
+
+ def _add_defaults_scripts(self):
+ if self.distribution.has_scripts():
+ build_scripts = self.get_finalized_command('build_scripts')
+ self.filelist.extend(build_scripts.get_source_files())
+
+
+if hasattr(sdist.sdist, '_add_defaults_standards'):
+ # disable the functionality already available upstream
+ class sdist_add_defaults:
+ pass
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/register.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/register.py
new file mode 100644
index 0000000000000000000000000000000000000000..b8266b9a60f8c363ba35f7b73befd7c9c7cb4abc
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/register.py
@@ -0,0 +1,18 @@
+from distutils import log
+import distutils.command.register as orig
+
+from setuptools.errors import RemovedCommandError
+
+
+class register(orig.register):
+ """Formerly used to register packages on PyPI."""
+
+ def run(self):
+ msg = (
+ "The register command has been removed, use twine to upload "
+ + "instead (https://pypi.org/p/twine)"
+ )
+
+ self.announce("ERROR: " + msg, log.ERROR)
+
+ raise RemovedCommandError(msg)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/rotate.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/rotate.py
new file mode 100644
index 0000000000000000000000000000000000000000..b89353f529b3d08e768dea69a9dc8b5e7403003d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/rotate.py
@@ -0,0 +1,66 @@
+from distutils.util import convert_path
+from distutils import log
+from distutils.errors import DistutilsOptionError
+import os
+import shutil
+
+from setuptools.extern import six
+
+from setuptools import Command
+
+
+class rotate(Command):
+ """Delete older distributions"""
+
+ description = "delete older distributions, keeping N newest files"
+ user_options = [
+ ('match=', 'm', "patterns to match (required)"),
+ ('dist-dir=', 'd', "directory where the distributions are"),
+ ('keep=', 'k', "number of matching distributions to keep"),
+ ]
+
+ boolean_options = []
+
+ def initialize_options(self):
+ self.match = None
+ self.dist_dir = None
+ self.keep = None
+
+ def finalize_options(self):
+ if self.match is None:
+ raise DistutilsOptionError(
+ "Must specify one or more (comma-separated) match patterns "
+ "(e.g. '.zip' or '.egg')"
+ )
+ if self.keep is None:
+ raise DistutilsOptionError("Must specify number of files to keep")
+ try:
+ self.keep = int(self.keep)
+ except ValueError:
+ raise DistutilsOptionError("--keep must be an integer")
+ if isinstance(self.match, six.string_types):
+ self.match = [
+ convert_path(p.strip()) for p in self.match.split(',')
+ ]
+ self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
+
+ def run(self):
+ self.run_command("egg_info")
+ from glob import glob
+
+ for pattern in self.match:
+ pattern = self.distribution.get_name() + '*' + pattern
+ files = glob(os.path.join(self.dist_dir, pattern))
+ files = [(os.path.getmtime(f), f) for f in files]
+ files.sort()
+ files.reverse()
+
+ log.info("%d file(s) matching %s", len(files), pattern)
+ files = files[self.keep:]
+ for (t, f) in files:
+ log.info("Deleting %s", f)
+ if not self.dry_run:
+ if os.path.isdir(f):
+ shutil.rmtree(f)
+ else:
+ os.unlink(f)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/saveopts.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/saveopts.py
new file mode 100644
index 0000000000000000000000000000000000000000..611cec552867a6d50b7edd700c86c7396d906ea2
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/saveopts.py
@@ -0,0 +1,22 @@
+from setuptools.command.setopt import edit_config, option_base
+
+
+class saveopts(option_base):
+ """Save command-line options to a file"""
+
+ description = "save supplied options to setup.cfg or other config file"
+
+ def run(self):
+ dist = self.distribution
+ settings = {}
+
+ for cmd in dist.command_options:
+
+ if cmd == 'saveopts':
+ continue # don't save our own options!
+
+ for opt, (src, val) in dist.get_option_dict(cmd).items():
+ if src == "command line":
+ settings.setdefault(cmd, {})[opt] = val
+
+ edit_config(self.filename, settings, self.dry_run)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/sdist.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/sdist.py
new file mode 100644
index 0000000000000000000000000000000000000000..a851453f9aa9506d307e1aa7e802fdee9e943eae
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/sdist.py
@@ -0,0 +1,252 @@
+from distutils import log
+import distutils.command.sdist as orig
+import os
+import sys
+import io
+import contextlib
+
+from setuptools.extern import six, ordered_set
+
+from .py36compat import sdist_add_defaults
+
+import pkg_resources
+
+_default_revctrl = list
+
+
+def walk_revctrl(dirname=''):
+ """Find all files under revision control"""
+ for ep in pkg_resources.iter_entry_points('setuptools.file_finders'):
+ for item in ep.load()(dirname):
+ yield item
+
+
+class sdist(sdist_add_defaults, orig.sdist):
+ """Smart sdist that finds anything supported by revision control"""
+
+ user_options = [
+ ('formats=', None,
+ "formats for source distribution (comma-separated list)"),
+ ('keep-temp', 'k',
+ "keep the distribution tree around after creating " +
+ "archive file(s)"),
+ ('dist-dir=', 'd',
+ "directory to put the source distribution archive(s) in "
+ "[default: dist]"),
+ ]
+
+ negative_opt = {}
+
+ README_EXTENSIONS = ['', '.rst', '.txt', '.md']
+ READMES = tuple('README{0}'.format(ext) for ext in README_EXTENSIONS)
+
+ def run(self):
+ self.run_command('egg_info')
+ ei_cmd = self.get_finalized_command('egg_info')
+ self.filelist = ei_cmd.filelist
+ self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt'))
+ self.check_readme()
+
+ # Run sub commands
+ for cmd_name in self.get_sub_commands():
+ self.run_command(cmd_name)
+
+ self.make_distribution()
+
+ dist_files = getattr(self.distribution, 'dist_files', [])
+ for file in self.archive_files:
+ data = ('sdist', '', file)
+ if data not in dist_files:
+ dist_files.append(data)
+
+ def initialize_options(self):
+ orig.sdist.initialize_options(self)
+
+ self._default_to_gztar()
+
+ def _default_to_gztar(self):
+ # only needed on Python prior to 3.6.
+ if sys.version_info >= (3, 6, 0, 'beta', 1):
+ return
+ self.formats = ['gztar']
+
+ def make_distribution(self):
+ """
+ Workaround for #516
+ """
+ with self._remove_os_link():
+ orig.sdist.make_distribution(self)
+
+ @staticmethod
+ @contextlib.contextmanager
+ def _remove_os_link():
+ """
+ In a context, remove and restore os.link if it exists
+ """
+
+ class NoValue:
+ pass
+
+ orig_val = getattr(os, 'link', NoValue)
+ try:
+ del os.link
+ except Exception:
+ pass
+ try:
+ yield
+ finally:
+ if orig_val is not NoValue:
+ setattr(os, 'link', orig_val)
+
+ def __read_template_hack(self):
+ # This grody hack closes the template file (MANIFEST.in) if an
+ # exception occurs during read_template.
+ # Doing so prevents an error when easy_install attempts to delete the
+ # file.
+ try:
+ orig.sdist.read_template(self)
+ except Exception:
+ _, _, tb = sys.exc_info()
+ tb.tb_next.tb_frame.f_locals['template'].close()
+ raise
+
+ # Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle
+ # has been fixed, so only override the method if we're using an earlier
+ # Python.
+ has_leaky_handle = (
+ sys.version_info < (2, 7, 2)
+ or (3, 0) <= sys.version_info < (3, 1, 4)
+ or (3, 2) <= sys.version_info < (3, 2, 1)
+ )
+ if has_leaky_handle:
+ read_template = __read_template_hack
+
+ def _add_defaults_optional(self):
+ if six.PY2:
+ sdist_add_defaults._add_defaults_optional(self)
+ else:
+ super()._add_defaults_optional()
+ if os.path.isfile('pyproject.toml'):
+ self.filelist.append('pyproject.toml')
+
+ def _add_defaults_python(self):
+ """getting python files"""
+ if self.distribution.has_pure_modules():
+ build_py = self.get_finalized_command('build_py')
+ self.filelist.extend(build_py.get_source_files())
+ self._add_data_files(self._safe_data_files(build_py))
+
+ def _safe_data_files(self, build_py):
+ """
+ Extracting data_files from build_py is known to cause
+ infinite recursion errors when `include_package_data`
+ is enabled, so suppress it in that case.
+ """
+ if self.distribution.include_package_data:
+ return ()
+ return build_py.data_files
+
+ def _add_data_files(self, data_files):
+ """
+ Add data files as found in build_py.data_files.
+ """
+ self.filelist.extend(
+ os.path.join(src_dir, name)
+ for _, src_dir, _, filenames in data_files
+ for name in filenames
+ )
+
+ def _add_defaults_data_files(self):
+ try:
+ if six.PY2:
+ sdist_add_defaults._add_defaults_data_files(self)
+ else:
+ super()._add_defaults_data_files()
+ except TypeError:
+ log.warn("data_files contains unexpected objects")
+
+ def check_readme(self):
+ for f in self.READMES:
+ if os.path.exists(f):
+ return
+ else:
+ self.warn(
+ "standard file not found: should have one of " +
+ ', '.join(self.READMES)
+ )
+
+ def make_release_tree(self, base_dir, files):
+ orig.sdist.make_release_tree(self, base_dir, files)
+
+ # Save any egg_info command line options used to create this sdist
+ dest = os.path.join(base_dir, 'setup.cfg')
+ if hasattr(os, 'link') and os.path.exists(dest):
+ # unlink and re-copy, since it might be hard-linked, and
+ # we don't want to change the source version
+ os.unlink(dest)
+ self.copy_file('setup.cfg', dest)
+
+ self.get_finalized_command('egg_info').save_version_info(dest)
+
+ def _manifest_is_not_generated(self):
+ # check for special comment used in 2.7.1 and higher
+ if not os.path.isfile(self.manifest):
+ return False
+
+ with io.open(self.manifest, 'rb') as fp:
+ first_line = fp.readline()
+ return (first_line !=
+ '# file GENERATED by distutils, do NOT edit\n'.encode())
+
+ def read_manifest(self):
+ """Read the manifest file (named by 'self.manifest') and use it to
+ fill in 'self.filelist', the list of files to include in the source
+ distribution.
+ """
+ log.info("reading manifest file '%s'", self.manifest)
+ manifest = open(self.manifest, 'rb')
+ for line in manifest:
+ # The manifest must contain UTF-8. See #303.
+ if six.PY3:
+ try:
+ line = line.decode('UTF-8')
+ except UnicodeDecodeError:
+ log.warn("%r not UTF-8 decodable -- skipping" % line)
+ continue
+ # ignore comments and blank lines
+ line = line.strip()
+ if line.startswith('#') or not line:
+ continue
+ self.filelist.append(line)
+ manifest.close()
+
+ def check_license(self):
+ """Checks if license_file' or 'license_files' is configured and adds any
+ valid paths to 'self.filelist'.
+ """
+
+ files = ordered_set.OrderedSet()
+
+ opts = self.distribution.get_option_dict('metadata')
+
+ # ignore the source of the value
+ _, license_file = opts.get('license_file', (None, None))
+
+ if license_file is None:
+ log.debug("'license_file' option was not specified")
+ else:
+ files.add(license_file)
+
+ try:
+ files.update(self.distribution.metadata.license_files)
+ except TypeError:
+ log.warn("warning: 'license_files' option is malformed")
+
+ for f in files:
+ if not os.path.exists(f):
+ log.warn(
+ "warning: Failed to find the configured license file '%s'",
+ f)
+ files.remove(f)
+
+ self.filelist.extend(files)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/setopt.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/setopt.py
new file mode 100644
index 0000000000000000000000000000000000000000..7e57cc02627fc3c3bb49613731a51c72452f96ba
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/setopt.py
@@ -0,0 +1,149 @@
+from distutils.util import convert_path
+from distutils import log
+from distutils.errors import DistutilsOptionError
+import distutils
+import os
+
+from setuptools.extern.six.moves import configparser
+
+from setuptools import Command
+
+__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
+
+
+def config_file(kind="local"):
+ """Get the filename of the distutils, local, global, or per-user config
+
+ `kind` must be one of "local", "global", or "user"
+ """
+ if kind == 'local':
+ return 'setup.cfg'
+ if kind == 'global':
+ return os.path.join(
+ os.path.dirname(distutils.__file__), 'distutils.cfg'
+ )
+ if kind == 'user':
+ dot = os.name == 'posix' and '.' or ''
+ return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
+ raise ValueError(
+ "config_file() type must be 'local', 'global', or 'user'", kind
+ )
+
+
+def edit_config(filename, settings, dry_run=False):
+ """Edit a configuration file to include `settings`
+
+ `settings` is a dictionary of dictionaries or ``None`` values, keyed by
+ command/section name. A ``None`` value means to delete the entire section,
+ while a dictionary lists settings to be changed or deleted in that section.
+ A setting of ``None`` means to delete that setting.
+ """
+ log.debug("Reading configuration from %s", filename)
+ opts = configparser.RawConfigParser()
+ opts.read([filename])
+ for section, options in settings.items():
+ if options is None:
+ log.info("Deleting section [%s] from %s", section, filename)
+ opts.remove_section(section)
+ else:
+ if not opts.has_section(section):
+ log.debug("Adding new section [%s] to %s", section, filename)
+ opts.add_section(section)
+ for option, value in options.items():
+ if value is None:
+ log.debug(
+ "Deleting %s.%s from %s",
+ section, option, filename
+ )
+ opts.remove_option(section, option)
+ if not opts.options(section):
+ log.info("Deleting empty [%s] section from %s",
+ section, filename)
+ opts.remove_section(section)
+ else:
+ log.debug(
+ "Setting %s.%s to %r in %s",
+ section, option, value, filename
+ )
+ opts.set(section, option, value)
+
+ log.info("Writing %s", filename)
+ if not dry_run:
+ with open(filename, 'w') as f:
+ opts.write(f)
+
+
+class option_base(Command):
+ """Abstract base class for commands that mess with config files"""
+
+ user_options = [
+ ('global-config', 'g',
+ "save options to the site-wide distutils.cfg file"),
+ ('user-config', 'u',
+ "save options to the current user's pydistutils.cfg file"),
+ ('filename=', 'f',
+ "configuration file to use (default=setup.cfg)"),
+ ]
+
+ boolean_options = [
+ 'global-config', 'user-config',
+ ]
+
+ def initialize_options(self):
+ self.global_config = None
+ self.user_config = None
+ self.filename = None
+
+ def finalize_options(self):
+ filenames = []
+ if self.global_config:
+ filenames.append(config_file('global'))
+ if self.user_config:
+ filenames.append(config_file('user'))
+ if self.filename is not None:
+ filenames.append(self.filename)
+ if not filenames:
+ filenames.append(config_file('local'))
+ if len(filenames) > 1:
+ raise DistutilsOptionError(
+ "Must specify only one configuration file option",
+ filenames
+ )
+ self.filename, = filenames
+
+
+class setopt(option_base):
+ """Save command-line options to a file"""
+
+ description = "set an option in setup.cfg or another config file"
+
+ user_options = [
+ ('command=', 'c', 'command to set an option for'),
+ ('option=', 'o', 'option to set'),
+ ('set-value=', 's', 'value of the option'),
+ ('remove', 'r', 'remove (unset) the value'),
+ ] + option_base.user_options
+
+ boolean_options = option_base.boolean_options + ['remove']
+
+ def initialize_options(self):
+ option_base.initialize_options(self)
+ self.command = None
+ self.option = None
+ self.set_value = None
+ self.remove = None
+
+ def finalize_options(self):
+ option_base.finalize_options(self)
+ if self.command is None or self.option is None:
+ raise DistutilsOptionError("Must specify --command *and* --option")
+ if self.set_value is None and not self.remove:
+ raise DistutilsOptionError("Must specify --set-value or --remove")
+
+ def run(self):
+ edit_config(
+ self.filename, {
+ self.command: {self.option.replace('-', '_'): self.set_value}
+ },
+ self.dry_run
+ )
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/test.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/test.py
new file mode 100644
index 0000000000000000000000000000000000000000..c148b38d10c7691c2045520e5aedb60293dd714d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/test.py
@@ -0,0 +1,279 @@
+import os
+import operator
+import sys
+import contextlib
+import itertools
+import unittest
+from distutils.errors import DistutilsError, DistutilsOptionError
+from distutils import log
+from unittest import TestLoader
+
+from setuptools.extern import six
+from setuptools.extern.six.moves import map, filter
+
+from pkg_resources import (resource_listdir, resource_exists, normalize_path,
+ working_set, _namespace_packages, evaluate_marker,
+ add_activation_listener, require, EntryPoint)
+from setuptools import Command
+from .build_py import _unique_everseen
+
+__metaclass__ = type
+
+
+class ScanningLoader(TestLoader):
+
+ def __init__(self):
+ TestLoader.__init__(self)
+ self._visited = set()
+
+ def loadTestsFromModule(self, module, pattern=None):
+ """Return a suite of all tests cases contained in the given module
+
+ If the module is a package, load tests from all the modules in it.
+ If the module has an ``additional_tests`` function, call it and add
+ the return value to the tests.
+ """
+ if module in self._visited:
+ return None
+ self._visited.add(module)
+
+ tests = []
+ tests.append(TestLoader.loadTestsFromModule(self, module))
+
+ if hasattr(module, "additional_tests"):
+ tests.append(module.additional_tests())
+
+ if hasattr(module, '__path__'):
+ for file in resource_listdir(module.__name__, ''):
+ if file.endswith('.py') and file != '__init__.py':
+ submodule = module.__name__ + '.' + file[:-3]
+ else:
+ if resource_exists(module.__name__, file + '/__init__.py'):
+ submodule = module.__name__ + '.' + file
+ else:
+ continue
+ tests.append(self.loadTestsFromName(submodule))
+
+ if len(tests) != 1:
+ return self.suiteClass(tests)
+ else:
+ return tests[0] # don't create a nested suite for only one return
+
+
+# adapted from jaraco.classes.properties:NonDataProperty
+class NonDataProperty:
+ def __init__(self, fget):
+ self.fget = fget
+
+ def __get__(self, obj, objtype=None):
+ if obj is None:
+ return self
+ return self.fget(obj)
+
+
+class test(Command):
+ """Command to run unit tests after in-place build"""
+
+ description = "run unit tests after in-place build (deprecated)"
+
+ user_options = [
+ ('test-module=', 'm', "Run 'test_suite' in specified module"),
+ ('test-suite=', 's',
+ "Run single test, case or suite (e.g. 'module.test_suite')"),
+ ('test-runner=', 'r', "Test runner to use"),
+ ]
+
+ def initialize_options(self):
+ self.test_suite = None
+ self.test_module = None
+ self.test_loader = None
+ self.test_runner = None
+
+ def finalize_options(self):
+
+ if self.test_suite and self.test_module:
+ msg = "You may specify a module or a suite, but not both"
+ raise DistutilsOptionError(msg)
+
+ if self.test_suite is None:
+ if self.test_module is None:
+ self.test_suite = self.distribution.test_suite
+ else:
+ self.test_suite = self.test_module + ".test_suite"
+
+ if self.test_loader is None:
+ self.test_loader = getattr(self.distribution, 'test_loader', None)
+ if self.test_loader is None:
+ self.test_loader = "setuptools.command.test:ScanningLoader"
+ if self.test_runner is None:
+ self.test_runner = getattr(self.distribution, 'test_runner', None)
+
+ @NonDataProperty
+ def test_args(self):
+ return list(self._test_args())
+
+ def _test_args(self):
+ if not self.test_suite and sys.version_info >= (2, 7):
+ yield 'discover'
+ if self.verbose:
+ yield '--verbose'
+ if self.test_suite:
+ yield self.test_suite
+
+ def with_project_on_sys_path(self, func):
+ """
+ Backward compatibility for project_on_sys_path context.
+ """
+ with self.project_on_sys_path():
+ func()
+
+ @contextlib.contextmanager
+ def project_on_sys_path(self, include_dists=[]):
+ with_2to3 = six.PY3 and getattr(self.distribution, 'use_2to3', False)
+
+ if with_2to3:
+ # If we run 2to3 we can not do this inplace:
+
+ # Ensure metadata is up-to-date
+ self.reinitialize_command('build_py', inplace=0)
+ self.run_command('build_py')
+ bpy_cmd = self.get_finalized_command("build_py")
+ build_path = normalize_path(bpy_cmd.build_lib)
+
+ # Build extensions
+ self.reinitialize_command('egg_info', egg_base=build_path)
+ self.run_command('egg_info')
+
+ self.reinitialize_command('build_ext', inplace=0)
+ self.run_command('build_ext')
+ else:
+ # Without 2to3 inplace works fine:
+ self.run_command('egg_info')
+
+ # Build extensions in-place
+ self.reinitialize_command('build_ext', inplace=1)
+ self.run_command('build_ext')
+
+ ei_cmd = self.get_finalized_command("egg_info")
+
+ old_path = sys.path[:]
+ old_modules = sys.modules.copy()
+
+ try:
+ project_path = normalize_path(ei_cmd.egg_base)
+ sys.path.insert(0, project_path)
+ working_set.__init__()
+ add_activation_listener(lambda dist: dist.activate())
+ require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version))
+ with self.paths_on_pythonpath([project_path]):
+ yield
+ finally:
+ sys.path[:] = old_path
+ sys.modules.clear()
+ sys.modules.update(old_modules)
+ working_set.__init__()
+
+ @staticmethod
+ @contextlib.contextmanager
+ def paths_on_pythonpath(paths):
+ """
+ Add the indicated paths to the head of the PYTHONPATH environment
+ variable so that subprocesses will also see the packages at
+ these paths.
+
+ Do this in a context that restores the value on exit.
+ """
+ nothing = object()
+ orig_pythonpath = os.environ.get('PYTHONPATH', nothing)
+ current_pythonpath = os.environ.get('PYTHONPATH', '')
+ try:
+ prefix = os.pathsep.join(_unique_everseen(paths))
+ to_join = filter(None, [prefix, current_pythonpath])
+ new_path = os.pathsep.join(to_join)
+ if new_path:
+ os.environ['PYTHONPATH'] = new_path
+ yield
+ finally:
+ if orig_pythonpath is nothing:
+ os.environ.pop('PYTHONPATH', None)
+ else:
+ os.environ['PYTHONPATH'] = orig_pythonpath
+
+ @staticmethod
+ def install_dists(dist):
+ """
+ Install the requirements indicated by self.distribution and
+ return an iterable of the dists that were built.
+ """
+ ir_d = dist.fetch_build_eggs(dist.install_requires)
+ tr_d = dist.fetch_build_eggs(dist.tests_require or [])
+ er_d = dist.fetch_build_eggs(
+ v for k, v in dist.extras_require.items()
+ if k.startswith(':') and evaluate_marker(k[1:])
+ )
+ return itertools.chain(ir_d, tr_d, er_d)
+
+ def run(self):
+ self.announce(
+ "WARNING: Testing via this command is deprecated and will be "
+ "removed in a future version. Users looking for a generic test "
+ "entry point independent of test runner are encouraged to use "
+ "tox.",
+ log.WARN,
+ )
+
+ installed_dists = self.install_dists(self.distribution)
+
+ cmd = ' '.join(self._argv)
+ if self.dry_run:
+ self.announce('skipping "%s" (dry run)' % cmd)
+ return
+
+ self.announce('running "%s"' % cmd)
+
+ paths = map(operator.attrgetter('location'), installed_dists)
+ with self.paths_on_pythonpath(paths):
+ with self.project_on_sys_path():
+ self.run_tests()
+
+ def run_tests(self):
+ # Purge modules under test from sys.modules. The test loader will
+ # re-import them from the build location. Required when 2to3 is used
+ # with namespace packages.
+ if six.PY3 and getattr(self.distribution, 'use_2to3', False):
+ module = self.test_suite.split('.')[0]
+ if module in _namespace_packages:
+ del_modules = []
+ if module in sys.modules:
+ del_modules.append(module)
+ module += '.'
+ for name in sys.modules:
+ if name.startswith(module):
+ del_modules.append(name)
+ list(map(sys.modules.__delitem__, del_modules))
+
+ test = unittest.main(
+ None, None, self._argv,
+ testLoader=self._resolve_as_ep(self.test_loader),
+ testRunner=self._resolve_as_ep(self.test_runner),
+ exit=False,
+ )
+ if not test.result.wasSuccessful():
+ msg = 'Test failed: %s' % test.result
+ self.announce(msg, log.ERROR)
+ raise DistutilsError(msg)
+
+ @property
+ def _argv(self):
+ return ['unittest'] + self.test_args
+
+ @staticmethod
+ def _resolve_as_ep(val):
+ """
+ Load the indicated attribute value, called, as a as if it were
+ specified as an entry point.
+ """
+ if val is None:
+ return
+ parsed = EntryPoint.parse("x=" + val)
+ return parsed.resolve()()
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/upload.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/upload.py
new file mode 100644
index 0000000000000000000000000000000000000000..ec7f81e22772511d668e5ab92f625db33259e803
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/upload.py
@@ -0,0 +1,17 @@
+from distutils import log
+from distutils.command import upload as orig
+
+from setuptools.errors import RemovedCommandError
+
+
+class upload(orig.upload):
+ """Formerly used to upload packages to PyPI."""
+
+ def run(self):
+ msg = (
+ "The upload command has been removed, use twine to upload "
+ + "instead (https://pypi.org/p/twine)"
+ )
+
+ self.announce("ERROR: " + msg, log.ERROR)
+ raise RemovedCommandError(msg)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/command/upload_docs.py b/monEnvTP/lib/python3.8/site-packages/setuptools/command/upload_docs.py
new file mode 100644
index 0000000000000000000000000000000000000000..07aa564af451ce41d818d72f8ee93cb46887cecf
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/command/upload_docs.py
@@ -0,0 +1,206 @@
+# -*- coding: utf-8 -*-
+"""upload_docs
+
+Implements a Distutils 'upload_docs' subcommand (upload documentation to
+PyPI's pythonhosted.org).
+"""
+
+from base64 import standard_b64encode
+from distutils import log
+from distutils.errors import DistutilsOptionError
+import os
+import socket
+import zipfile
+import tempfile
+import shutil
+import itertools
+import functools
+
+from setuptools.extern import six
+from setuptools.extern.six.moves import http_client, urllib
+
+from pkg_resources import iter_entry_points
+from .upload import upload
+
+
+def _encode(s):
+ errors = 'surrogateescape' if six.PY3 else 'strict'
+ return s.encode('utf-8', errors)
+
+
+class upload_docs(upload):
+ # override the default repository as upload_docs isn't
+ # supported by Warehouse (and won't be).
+ DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi/'
+
+ description = 'Upload documentation to PyPI'
+
+ user_options = [
+ ('repository=', 'r',
+ "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY),
+ ('show-response', None,
+ 'display full response text from server'),
+ ('upload-dir=', None, 'directory to upload'),
+ ]
+ boolean_options = upload.boolean_options
+
+ def has_sphinx(self):
+ if self.upload_dir is None:
+ for ep in iter_entry_points('distutils.commands', 'build_sphinx'):
+ return True
+
+ sub_commands = [('build_sphinx', has_sphinx)]
+
+ def initialize_options(self):
+ upload.initialize_options(self)
+ self.upload_dir = None
+ self.target_dir = None
+
+ def finalize_options(self):
+ upload.finalize_options(self)
+ if self.upload_dir is None:
+ if self.has_sphinx():
+ build_sphinx = self.get_finalized_command('build_sphinx')
+ self.target_dir = build_sphinx.builder_target_dir
+ else:
+ build = self.get_finalized_command('build')
+ self.target_dir = os.path.join(build.build_base, 'docs')
+ else:
+ self.ensure_dirname('upload_dir')
+ self.target_dir = self.upload_dir
+ if 'pypi.python.org' in self.repository:
+ log.warn("Upload_docs command is deprecated. Use RTD instead.")
+ self.announce('Using upload directory %s' % self.target_dir)
+
+ def create_zipfile(self, filename):
+ zip_file = zipfile.ZipFile(filename, "w")
+ try:
+ self.mkpath(self.target_dir) # just in case
+ for root, dirs, files in os.walk(self.target_dir):
+ if root == self.target_dir and not files:
+ tmpl = "no files found in upload directory '%s'"
+ raise DistutilsOptionError(tmpl % self.target_dir)
+ for name in files:
+ full = os.path.join(root, name)
+ relative = root[len(self.target_dir):].lstrip(os.path.sep)
+ dest = os.path.join(relative, name)
+ zip_file.write(full, dest)
+ finally:
+ zip_file.close()
+
+ def run(self):
+ # Run sub commands
+ for cmd_name in self.get_sub_commands():
+ self.run_command(cmd_name)
+
+ tmp_dir = tempfile.mkdtemp()
+ name = self.distribution.metadata.get_name()
+ zip_file = os.path.join(tmp_dir, "%s.zip" % name)
+ try:
+ self.create_zipfile(zip_file)
+ self.upload_file(zip_file)
+ finally:
+ shutil.rmtree(tmp_dir)
+
+ @staticmethod
+ def _build_part(item, sep_boundary):
+ key, values = item
+ title = '\nContent-Disposition: form-data; name="%s"' % key
+ # handle multiple entries for the same name
+ if not isinstance(values, list):
+ values = [values]
+ for value in values:
+ if isinstance(value, tuple):
+ title += '; filename="%s"' % value[0]
+ value = value[1]
+ else:
+ value = _encode(value)
+ yield sep_boundary
+ yield _encode(title)
+ yield b"\n\n"
+ yield value
+ if value and value[-1:] == b'\r':
+ yield b'\n' # write an extra newline (lurve Macs)
+
+ @classmethod
+ def _build_multipart(cls, data):
+ """
+ Build up the MIME payload for the POST data
+ """
+ boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+ sep_boundary = b'\n--' + boundary
+ end_boundary = sep_boundary + b'--'
+ end_items = end_boundary, b"\n",
+ builder = functools.partial(
+ cls._build_part,
+ sep_boundary=sep_boundary,
+ )
+ part_groups = map(builder, data.items())
+ parts = itertools.chain.from_iterable(part_groups)
+ body_items = itertools.chain(parts, end_items)
+ content_type = 'multipart/form-data; boundary=%s' % boundary.decode('ascii')
+ return b''.join(body_items), content_type
+
+ def upload_file(self, filename):
+ with open(filename, 'rb') as f:
+ content = f.read()
+ meta = self.distribution.metadata
+ data = {
+ ':action': 'doc_upload',
+ 'name': meta.get_name(),
+ 'content': (os.path.basename(filename), content),
+ }
+ # set up the authentication
+ credentials = _encode(self.username + ':' + self.password)
+ credentials = standard_b64encode(credentials)
+ if six.PY3:
+ credentials = credentials.decode('ascii')
+ auth = "Basic " + credentials
+
+ body, ct = self._build_multipart(data)
+
+ msg = "Submitting documentation to %s" % (self.repository)
+ self.announce(msg, log.INFO)
+
+ # build the Request
+ # We can't use urllib2 since we need to send the Basic
+ # auth right with the first request
+ schema, netloc, url, params, query, fragments = \
+ urllib.parse.urlparse(self.repository)
+ assert not params and not query and not fragments
+ if schema == 'http':
+ conn = http_client.HTTPConnection(netloc)
+ elif schema == 'https':
+ conn = http_client.HTTPSConnection(netloc)
+ else:
+ raise AssertionError("unsupported schema " + schema)
+
+ data = ''
+ try:
+ conn.connect()
+ conn.putrequest("POST", url)
+ content_type = ct
+ conn.putheader('Content-type', content_type)
+ conn.putheader('Content-length', str(len(body)))
+ conn.putheader('Authorization', auth)
+ conn.endheaders()
+ conn.send(body)
+ except socket.error as e:
+ self.announce(str(e), log.ERROR)
+ return
+
+ r = conn.getresponse()
+ if r.status == 200:
+ msg = 'Server response (%s): %s' % (r.status, r.reason)
+ self.announce(msg, log.INFO)
+ elif r.status == 301:
+ location = r.getheader('Location')
+ if location is None:
+ location = 'https://pythonhosted.org/%s/' % meta.get_name()
+ msg = 'Upload successful. Visit %s' % location
+ self.announce(msg, log.INFO)
+ else:
+ msg = 'Upload failed (%s): %s' % (r.status, r.reason)
+ self.announce(msg, log.ERROR)
+ if self.show_response:
+ print('-' * 75, r.read(), '-' * 75)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/dep_util.py b/monEnvTP/lib/python3.8/site-packages/setuptools/dep_util.py
new file mode 100644
index 0000000000000000000000000000000000000000..2931c13ec35aa60b742ac4c46ceabd4ed32a5511
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/dep_util.py
@@ -0,0 +1,23 @@
+from distutils.dep_util import newer_group
+
+# yes, this is was almost entirely copy-pasted from
+# 'newer_pairwise()', this is just another convenience
+# function.
+def newer_pairwise_group(sources_groups, targets):
+ """Walk both arguments in parallel, testing if each source group is newer
+ than its corresponding target. Returns a pair of lists (sources_groups,
+ targets) where sources is newer than target, according to the semantics
+ of 'newer_group()'.
+ """
+ if len(sources_groups) != len(targets):
+ raise ValueError("'sources_group' and 'targets' must be the same length")
+
+ # build a pair of lists (sources_groups, targets) where source is newer
+ n_sources = []
+ n_targets = []
+ for i in range(len(sources_groups)):
+ if newer_group(sources_groups[i], targets[i]):
+ n_sources.append(sources_groups[i])
+ n_targets.append(targets[i])
+
+ return n_sources, n_targets
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/depends.py b/monEnvTP/lib/python3.8/site-packages/setuptools/depends.py
new file mode 100644
index 0000000000000000000000000000000000000000..a37675cbd9bc9583fd01cc158198e2f4deda321b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/depends.py
@@ -0,0 +1,176 @@
+import sys
+import marshal
+import contextlib
+from distutils.version import StrictVersion
+
+from .py33compat import Bytecode
+
+from .py27compat import find_module, PY_COMPILED, PY_FROZEN, PY_SOURCE
+from . import py27compat
+
+
+__all__ = [
+ 'Require', 'find_module', 'get_module_constant', 'extract_constant'
+]
+
+
+class Require:
+ """A prerequisite to building or installing a distribution"""
+
+ def __init__(
+ self, name, requested_version, module, homepage='',
+ attribute=None, format=None):
+
+ if format is None and requested_version is not None:
+ format = StrictVersion
+
+ if format is not None:
+ requested_version = format(requested_version)
+ if attribute is None:
+ attribute = '__version__'
+
+ self.__dict__.update(locals())
+ del self.self
+
+ def full_name(self):
+ """Return full package/distribution name, w/version"""
+ if self.requested_version is not None:
+ return '%s-%s' % (self.name, self.requested_version)
+ return self.name
+
+ def version_ok(self, version):
+ """Is 'version' sufficiently up-to-date?"""
+ return self.attribute is None or self.format is None or \
+ str(version) != "unknown" and version >= self.requested_version
+
+ def get_version(self, paths=None, default="unknown"):
+ """Get version number of installed module, 'None', or 'default'
+
+ Search 'paths' for module. If not found, return 'None'. If found,
+ return the extracted version attribute, or 'default' if no version
+ attribute was specified, or the value cannot be determined without
+ importing the module. The version is formatted according to the
+ requirement's version format (if any), unless it is 'None' or the
+ supplied 'default'.
+ """
+
+ if self.attribute is None:
+ try:
+ f, p, i = find_module(self.module, paths)
+ if f:
+ f.close()
+ return default
+ except ImportError:
+ return None
+
+ v = get_module_constant(self.module, self.attribute, default, paths)
+
+ if v is not None and v is not default and self.format is not None:
+ return self.format(v)
+
+ return v
+
+ def is_present(self, paths=None):
+ """Return true if dependency is present on 'paths'"""
+ return self.get_version(paths) is not None
+
+ def is_current(self, paths=None):
+ """Return true if dependency is present and up-to-date on 'paths'"""
+ version = self.get_version(paths)
+ if version is None:
+ return False
+ return self.version_ok(version)
+
+
+def maybe_close(f):
+ @contextlib.contextmanager
+ def empty():
+ yield
+ return
+ if not f:
+ return empty()
+
+ return contextlib.closing(f)
+
+
+def get_module_constant(module, symbol, default=-1, paths=None):
+ """Find 'module' by searching 'paths', and extract 'symbol'
+
+ Return 'None' if 'module' does not exist on 'paths', or it does not define
+ 'symbol'. If the module defines 'symbol' as a constant, return the
+ constant. Otherwise, return 'default'."""
+
+ try:
+ f, path, (suffix, mode, kind) = info = find_module(module, paths)
+ except ImportError:
+ # Module doesn't exist
+ return None
+
+ with maybe_close(f):
+ if kind == PY_COMPILED:
+ f.read(8) # skip magic & date
+ code = marshal.load(f)
+ elif kind == PY_FROZEN:
+ code = py27compat.get_frozen_object(module, paths)
+ elif kind == PY_SOURCE:
+ code = compile(f.read(), path, 'exec')
+ else:
+ # Not something we can parse; we'll have to import it. :(
+ imported = py27compat.get_module(module, paths, info)
+ return getattr(imported, symbol, None)
+
+ return extract_constant(code, symbol, default)
+
+
+def extract_constant(code, symbol, default=-1):
+ """Extract the constant value of 'symbol' from 'code'
+
+ If the name 'symbol' is bound to a constant value by the Python code
+ object 'code', return that value. If 'symbol' is bound to an expression,
+ return 'default'. Otherwise, return 'None'.
+
+ Return value is based on the first assignment to 'symbol'. 'symbol' must
+ be a global, or at least a non-"fast" local in the code block. That is,
+ only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
+ must be present in 'code.co_names'.
+ """
+ if symbol not in code.co_names:
+ # name's not there, can't possibly be an assignment
+ return None
+
+ name_idx = list(code.co_names).index(symbol)
+
+ STORE_NAME = 90
+ STORE_GLOBAL = 97
+ LOAD_CONST = 100
+
+ const = default
+
+ for byte_code in Bytecode(code):
+ op = byte_code.opcode
+ arg = byte_code.arg
+
+ if op == LOAD_CONST:
+ const = code.co_consts[arg]
+ elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL):
+ return const
+ else:
+ const = default
+
+
+def _update_globals():
+ """
+ Patch the globals to remove the objects not available on some platforms.
+
+ XXX it'd be better to test assertions about bytecode instead.
+ """
+
+ if not sys.platform.startswith('java') and sys.platform != 'cli':
+ return
+ incompatible = 'extract_constant', 'get_module_constant'
+ for name in incompatible:
+ del globals()[name]
+ __all__.remove(name)
+
+
+_update_globals()
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/dist.py b/monEnvTP/lib/python3.8/site-packages/setuptools/dist.py
new file mode 100644
index 0000000000000000000000000000000000000000..f22429e8e191683da2cc83c7cc5eba205a541988
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/dist.py
@@ -0,0 +1,1274 @@
+# -*- coding: utf-8 -*-
+__all__ = ['Distribution']
+
+import io
+import sys
+import re
+import os
+import warnings
+import numbers
+import distutils.log
+import distutils.core
+import distutils.cmd
+import distutils.dist
+from distutils.util import strtobool
+from distutils.debug import DEBUG
+from distutils.fancy_getopt import translate_longopt
+import itertools
+
+from collections import defaultdict
+from email import message_from_file
+
+from distutils.errors import (
+ DistutilsOptionError, DistutilsPlatformError, DistutilsSetupError,
+)
+from distutils.util import rfc822_escape
+from distutils.version import StrictVersion
+
+from setuptools.extern import six
+from setuptools.extern import packaging
+from setuptools.extern import ordered_set
+from setuptools.extern.six.moves import map, filter, filterfalse
+
+from . import SetuptoolsDeprecationWarning
+
+from setuptools.depends import Require
+from setuptools import windows_support
+from setuptools.monkey import get_unpatched
+from setuptools.config import parse_configuration
+import pkg_resources
+
+__import__('setuptools.extern.packaging.specifiers')
+__import__('setuptools.extern.packaging.version')
+
+
+def _get_unpatched(cls):
+ warnings.warn("Do not call this function", DistDeprecationWarning)
+ return get_unpatched(cls)
+
+
+def get_metadata_version(self):
+ mv = getattr(self, 'metadata_version', None)
+
+ if mv is None:
+ if self.long_description_content_type or self.provides_extras:
+ mv = StrictVersion('2.1')
+ elif (self.maintainer is not None or
+ self.maintainer_email is not None or
+ getattr(self, 'python_requires', None) is not None or
+ self.project_urls):
+ mv = StrictVersion('1.2')
+ elif (self.provides or self.requires or self.obsoletes or
+ self.classifiers or self.download_url):
+ mv = StrictVersion('1.1')
+ else:
+ mv = StrictVersion('1.0')
+
+ self.metadata_version = mv
+
+ return mv
+
+
+def read_pkg_file(self, file):
+ """Reads the metadata values from a file object."""
+ msg = message_from_file(file)
+
+ def _read_field(name):
+ value = msg[name]
+ if value == 'UNKNOWN':
+ return None
+ return value
+
+ def _read_list(name):
+ values = msg.get_all(name, None)
+ if values == []:
+ return None
+ return values
+
+ self.metadata_version = StrictVersion(msg['metadata-version'])
+ self.name = _read_field('name')
+ self.version = _read_field('version')
+ self.description = _read_field('summary')
+ # we are filling author only.
+ self.author = _read_field('author')
+ self.maintainer = None
+ self.author_email = _read_field('author-email')
+ self.maintainer_email = None
+ self.url = _read_field('home-page')
+ self.license = _read_field('license')
+
+ if 'download-url' in msg:
+ self.download_url = _read_field('download-url')
+ else:
+ self.download_url = None
+
+ self.long_description = _read_field('description')
+ self.description = _read_field('summary')
+
+ if 'keywords' in msg:
+ self.keywords = _read_field('keywords').split(',')
+
+ self.platforms = _read_list('platform')
+ self.classifiers = _read_list('classifier')
+
+ # PEP 314 - these fields only exist in 1.1
+ if self.metadata_version == StrictVersion('1.1'):
+ self.requires = _read_list('requires')
+ self.provides = _read_list('provides')
+ self.obsoletes = _read_list('obsoletes')
+ else:
+ self.requires = None
+ self.provides = None
+ self.obsoletes = None
+
+
+# Based on Python 3.5 version
+def write_pkg_file(self, file):
+ """Write the PKG-INFO format data to a file object.
+ """
+ version = self.get_metadata_version()
+
+ if six.PY2:
+ def write_field(key, value):
+ file.write("%s: %s\n" % (key, self._encode_field(value)))
+ else:
+ def write_field(key, value):
+ file.write("%s: %s\n" % (key, value))
+
+ write_field('Metadata-Version', str(version))
+ write_field('Name', self.get_name())
+ write_field('Version', self.get_version())
+ write_field('Summary', self.get_description())
+ write_field('Home-page', self.get_url())
+
+ if version < StrictVersion('1.2'):
+ write_field('Author', self.get_contact())
+ write_field('Author-email', self.get_contact_email())
+ else:
+ optional_fields = (
+ ('Author', 'author'),
+ ('Author-email', 'author_email'),
+ ('Maintainer', 'maintainer'),
+ ('Maintainer-email', 'maintainer_email'),
+ )
+
+ for field, attr in optional_fields:
+ attr_val = getattr(self, attr)
+
+ if attr_val is not None:
+ write_field(field, attr_val)
+
+ write_field('License', self.get_license())
+ if self.download_url:
+ write_field('Download-URL', self.download_url)
+ for project_url in self.project_urls.items():
+ write_field('Project-URL', '%s, %s' % project_url)
+
+ long_desc = rfc822_escape(self.get_long_description())
+ write_field('Description', long_desc)
+
+ keywords = ','.join(self.get_keywords())
+ if keywords:
+ write_field('Keywords', keywords)
+
+ if version >= StrictVersion('1.2'):
+ for platform in self.get_platforms():
+ write_field('Platform', platform)
+ else:
+ self._write_list(file, 'Platform', self.get_platforms())
+
+ self._write_list(file, 'Classifier', self.get_classifiers())
+
+ # PEP 314
+ self._write_list(file, 'Requires', self.get_requires())
+ self._write_list(file, 'Provides', self.get_provides())
+ self._write_list(file, 'Obsoletes', self.get_obsoletes())
+
+ # Setuptools specific for PEP 345
+ if hasattr(self, 'python_requires'):
+ write_field('Requires-Python', self.python_requires)
+
+ # PEP 566
+ if self.long_description_content_type:
+ write_field(
+ 'Description-Content-Type',
+ self.long_description_content_type
+ )
+ if self.provides_extras:
+ for extra in sorted(self.provides_extras):
+ write_field('Provides-Extra', extra)
+
+
+sequence = tuple, list
+
+
+def check_importable(dist, attr, value):
+ try:
+ ep = pkg_resources.EntryPoint.parse('x=' + value)
+ assert not ep.extras
+ except (TypeError, ValueError, AttributeError, AssertionError):
+ raise DistutilsSetupError(
+ "%r must be importable 'module:attrs' string (got %r)"
+ % (attr, value)
+ )
+
+
+def assert_string_list(dist, attr, value):
+ """Verify that value is a string list"""
+ try:
+ # verify that value is a list or tuple to exclude unordered
+ # or single-use iterables
+ assert isinstance(value, (list, tuple))
+ # verify that elements of value are strings
+ assert ''.join(value) != value
+ except (TypeError, ValueError, AttributeError, AssertionError):
+ raise DistutilsSetupError(
+ "%r must be a list of strings (got %r)" % (attr, value)
+ )
+
+
+def check_nsp(dist, attr, value):
+ """Verify that namespace packages are valid"""
+ ns_packages = value
+ assert_string_list(dist, attr, ns_packages)
+ for nsp in ns_packages:
+ if not dist.has_contents_for(nsp):
+ raise DistutilsSetupError(
+ "Distribution contains no modules or packages for " +
+ "namespace package %r" % nsp
+ )
+ parent, sep, child = nsp.rpartition('.')
+ if parent and parent not in ns_packages:
+ distutils.log.warn(
+ "WARNING: %r is declared as a package namespace, but %r"
+ " is not: please correct this in setup.py", nsp, parent
+ )
+
+
+def check_extras(dist, attr, value):
+ """Verify that extras_require mapping is valid"""
+ try:
+ list(itertools.starmap(_check_extra, value.items()))
+ except (TypeError, ValueError, AttributeError):
+ raise DistutilsSetupError(
+ "'extras_require' must be a dictionary whose values are "
+ "strings or lists of strings containing valid project/version "
+ "requirement specifiers."
+ )
+
+
+def _check_extra(extra, reqs):
+ name, sep, marker = extra.partition(':')
+ if marker and pkg_resources.invalid_marker(marker):
+ raise DistutilsSetupError("Invalid environment marker: " + marker)
+ list(pkg_resources.parse_requirements(reqs))
+
+
+def assert_bool(dist, attr, value):
+ """Verify that value is True, False, 0, or 1"""
+ if bool(value) != value:
+ tmpl = "{attr!r} must be a boolean value (got {value!r})"
+ raise DistutilsSetupError(tmpl.format(attr=attr, value=value))
+
+
+def check_requirements(dist, attr, value):
+ """Verify that install_requires is a valid requirements list"""
+ try:
+ list(pkg_resources.parse_requirements(value))
+ if isinstance(value, (dict, set)):
+ raise TypeError("Unordered types are not allowed")
+ except (TypeError, ValueError) as error:
+ tmpl = (
+ "{attr!r} must be a string or list of strings "
+ "containing valid project/version requirement specifiers; {error}"
+ )
+ raise DistutilsSetupError(tmpl.format(attr=attr, error=error))
+
+
+def check_specifier(dist, attr, value):
+ """Verify that value is a valid version specifier"""
+ try:
+ packaging.specifiers.SpecifierSet(value)
+ except packaging.specifiers.InvalidSpecifier as error:
+ tmpl = (
+ "{attr!r} must be a string "
+ "containing valid version specifiers; {error}"
+ )
+ raise DistutilsSetupError(tmpl.format(attr=attr, error=error))
+
+
+def check_entry_points(dist, attr, value):
+ """Verify that entry_points map is parseable"""
+ try:
+ pkg_resources.EntryPoint.parse_map(value)
+ except ValueError as e:
+ raise DistutilsSetupError(e)
+
+
+def check_test_suite(dist, attr, value):
+ if not isinstance(value, six.string_types):
+ raise DistutilsSetupError("test_suite must be a string")
+
+
+def check_package_data(dist, attr, value):
+ """Verify that value is a dictionary of package names to glob lists"""
+ if not isinstance(value, dict):
+ raise DistutilsSetupError(
+ "{!r} must be a dictionary mapping package names to lists of "
+ "string wildcard patterns".format(attr))
+ for k, v in value.items():
+ if not isinstance(k, six.string_types):
+ raise DistutilsSetupError(
+ "keys of {!r} dict must be strings (got {!r})"
+ .format(attr, k)
+ )
+ assert_string_list(dist, 'values of {!r} dict'.format(attr), v)
+
+
+def check_packages(dist, attr, value):
+ for pkgname in value:
+ if not re.match(r'\w+(\.\w+)*', pkgname):
+ distutils.log.warn(
+ "WARNING: %r not a valid package name; please use only "
+ ".-separated package names in setup.py", pkgname
+ )
+
+
+_Distribution = get_unpatched(distutils.core.Distribution)
+
+
+class Distribution(_Distribution):
+ """Distribution with support for features, tests, and package data
+
+ This is an enhanced version of 'distutils.dist.Distribution' that
+ effectively adds the following new optional keyword arguments to 'setup()':
+
+ 'install_requires' -- a string or sequence of strings specifying project
+ versions that the distribution requires when installed, in the format
+ used by 'pkg_resources.require()'. They will be installed
+ automatically when the package is installed. If you wish to use
+ packages that are not available in PyPI, or want to give your users an
+ alternate download location, you can add a 'find_links' option to the
+ '[easy_install]' section of your project's 'setup.cfg' file, and then
+ setuptools will scan the listed web pages for links that satisfy the
+ requirements.
+
+ 'extras_require' -- a dictionary mapping names of optional "extras" to the
+ additional requirement(s) that using those extras incurs. For example,
+ this::
+
+ extras_require = dict(reST = ["docutils>=0.3", "reSTedit"])
+
+ indicates that the distribution can optionally provide an extra
+ capability called "reST", but it can only be used if docutils and
+ reSTedit are installed. If the user installs your package using
+ EasyInstall and requests one of your extras, the corresponding
+ additional requirements will be installed if needed.
+
+ 'features' **deprecated** -- a dictionary mapping option names to
+ 'setuptools.Feature'
+ objects. Features are a portion of the distribution that can be
+ included or excluded based on user options, inter-feature dependencies,
+ and availability on the current system. Excluded features are omitted
+ from all setup commands, including source and binary distributions, so
+ you can create multiple distributions from the same source tree.
+ Feature names should be valid Python identifiers, except that they may
+ contain the '-' (minus) sign. Features can be included or excluded
+ via the command line options '--with-X' and '--without-X', where 'X' is
+ the name of the feature. Whether a feature is included by default, and
+ whether you are allowed to control this from the command line, is
+ determined by the Feature object. See the 'Feature' class for more
+ information.
+
+ 'test_suite' -- the name of a test suite to run for the 'test' command.
+ If the user runs 'python setup.py test', the package will be installed,
+ and the named test suite will be run. The format is the same as
+ would be used on a 'unittest.py' command line. That is, it is the
+ dotted name of an object to import and call to generate a test suite.
+
+ 'package_data' -- a dictionary mapping package names to lists of filenames
+ or globs to use to find data files contained in the named packages.
+ If the dictionary has filenames or globs listed under '""' (the empty
+ string), those names will be searched for in every package, in addition
+ to any names for the specific package. Data files found using these
+ names/globs will be installed along with the package, in the same
+ location as the package. Note that globs are allowed to reference
+ the contents of non-package subdirectories, as long as you use '/' as
+ a path separator. (Globs are automatically converted to
+ platform-specific paths at runtime.)
+
+ In addition to these new keywords, this class also has several new methods
+ for manipulating the distribution's contents. For example, the 'include()'
+ and 'exclude()' methods can be thought of as in-place add and subtract
+ commands that add or remove packages, modules, extensions, and so on from
+ the distribution. They are used by the feature subsystem to configure the
+ distribution for the included and excluded features.
+ """
+
+ _DISTUTILS_UNSUPPORTED_METADATA = {
+ 'long_description_content_type': None,
+ 'project_urls': dict,
+ 'provides_extras': ordered_set.OrderedSet,
+ 'license_files': ordered_set.OrderedSet,
+ }
+
+ _patched_dist = None
+
+ def patch_missing_pkg_info(self, attrs):
+ # Fake up a replacement for the data that would normally come from
+ # PKG-INFO, but which might not yet be built if this is a fresh
+ # checkout.
+ #
+ if not attrs or 'name' not in attrs or 'version' not in attrs:
+ return
+ key = pkg_resources.safe_name(str(attrs['name'])).lower()
+ dist = pkg_resources.working_set.by_key.get(key)
+ if dist is not None and not dist.has_metadata('PKG-INFO'):
+ dist._version = pkg_resources.safe_version(str(attrs['version']))
+ self._patched_dist = dist
+
+ def __init__(self, attrs=None):
+ have_package_data = hasattr(self, "package_data")
+ if not have_package_data:
+ self.package_data = {}
+ attrs = attrs or {}
+ if 'features' in attrs or 'require_features' in attrs:
+ Feature.warn_deprecated()
+ self.require_features = []
+ self.features = {}
+ self.dist_files = []
+ # Filter-out setuptools' specific options.
+ self.src_root = attrs.pop("src_root", None)
+ self.patch_missing_pkg_info(attrs)
+ self.dependency_links = attrs.pop('dependency_links', [])
+ self.setup_requires = attrs.pop('setup_requires', [])
+ for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
+ vars(self).setdefault(ep.name, None)
+ _Distribution.__init__(self, {
+ k: v for k, v in attrs.items()
+ if k not in self._DISTUTILS_UNSUPPORTED_METADATA
+ })
+
+ # Fill-in missing metadata fields not supported by distutils.
+ # Note some fields may have been set by other tools (e.g. pbr)
+ # above; they are taken preferrentially to setup() arguments
+ for option, default in self._DISTUTILS_UNSUPPORTED_METADATA.items():
+ for source in self.metadata.__dict__, attrs:
+ if option in source:
+ value = source[option]
+ break
+ else:
+ value = default() if default else None
+ setattr(self.metadata, option, value)
+
+ if isinstance(self.metadata.version, numbers.Number):
+ # Some people apparently take "version number" too literally :)
+ self.metadata.version = str(self.metadata.version)
+
+ if self.metadata.version is not None:
+ try:
+ ver = packaging.version.Version(self.metadata.version)
+ normalized_version = str(ver)
+ if self.metadata.version != normalized_version:
+ warnings.warn(
+ "Normalizing '%s' to '%s'" % (
+ self.metadata.version,
+ normalized_version,
+ )
+ )
+ self.metadata.version = normalized_version
+ except (packaging.version.InvalidVersion, TypeError):
+ warnings.warn(
+ "The version specified (%r) is an invalid version, this "
+ "may not work as expected with newer versions of "
+ "setuptools, pip, and PyPI. Please see PEP 440 for more "
+ "details." % self.metadata.version
+ )
+ self._finalize_requires()
+
+ def _finalize_requires(self):
+ """
+ Set `metadata.python_requires` and fix environment markers
+ in `install_requires` and `extras_require`.
+ """
+ if getattr(self, 'python_requires', None):
+ self.metadata.python_requires = self.python_requires
+
+ if getattr(self, 'extras_require', None):
+ for extra in self.extras_require.keys():
+ # Since this gets called multiple times at points where the
+ # keys have become 'converted' extras, ensure that we are only
+ # truly adding extras we haven't seen before here.
+ extra = extra.split(':')[0]
+ if extra:
+ self.metadata.provides_extras.add(extra)
+
+ self._convert_extras_requirements()
+ self._move_install_requirements_markers()
+
+ def _convert_extras_requirements(self):
+ """
+ Convert requirements in `extras_require` of the form
+ `"extra": ["barbazquux; {marker}"]` to
+ `"extra:{marker}": ["barbazquux"]`.
+ """
+ spec_ext_reqs = getattr(self, 'extras_require', None) or {}
+ self._tmp_extras_require = defaultdict(list)
+ for section, v in spec_ext_reqs.items():
+ # Do not strip empty sections.
+ self._tmp_extras_require[section]
+ for r in pkg_resources.parse_requirements(v):
+ suffix = self._suffix_for(r)
+ self._tmp_extras_require[section + suffix].append(r)
+
+ @staticmethod
+ def _suffix_for(req):
+ """
+ For a requirement, return the 'extras_require' suffix for
+ that requirement.
+ """
+ return ':' + str(req.marker) if req.marker else ''
+
+ def _move_install_requirements_markers(self):
+ """
+ Move requirements in `install_requires` that are using environment
+ markers `extras_require`.
+ """
+
+ # divide the install_requires into two sets, simple ones still
+ # handled by install_requires and more complex ones handled
+ # by extras_require.
+
+ def is_simple_req(req):
+ return not req.marker
+
+ spec_inst_reqs = getattr(self, 'install_requires', None) or ()
+ inst_reqs = list(pkg_resources.parse_requirements(spec_inst_reqs))
+ simple_reqs = filter(is_simple_req, inst_reqs)
+ complex_reqs = filterfalse(is_simple_req, inst_reqs)
+ self.install_requires = list(map(str, simple_reqs))
+
+ for r in complex_reqs:
+ self._tmp_extras_require[':' + str(r.marker)].append(r)
+ self.extras_require = dict(
+ (k, [str(r) for r in map(self._clean_req, v)])
+ for k, v in self._tmp_extras_require.items()
+ )
+
+ def _clean_req(self, req):
+ """
+ Given a Requirement, remove environment markers and return it.
+ """
+ req.marker = None
+ return req
+
+ def _parse_config_files(self, filenames=None):
+ """
+ Adapted from distutils.dist.Distribution.parse_config_files,
+ this method provides the same functionality in subtly-improved
+ ways.
+ """
+ from setuptools.extern.six.moves.configparser import ConfigParser
+
+ # Ignore install directory options if we have a venv
+ if six.PY3 and sys.prefix != sys.base_prefix:
+ ignore_options = [
+ 'install-base', 'install-platbase', 'install-lib',
+ 'install-platlib', 'install-purelib', 'install-headers',
+ 'install-scripts', 'install-data', 'prefix', 'exec-prefix',
+ 'home', 'user', 'root']
+ else:
+ ignore_options = []
+
+ ignore_options = frozenset(ignore_options)
+
+ if filenames is None:
+ filenames = self.find_config_files()
+
+ if DEBUG:
+ self.announce("Distribution.parse_config_files():")
+
+ parser = ConfigParser()
+ for filename in filenames:
+ with io.open(filename, encoding='utf-8') as reader:
+ if DEBUG:
+ self.announce(" reading {filename}".format(**locals()))
+ (parser.read_file if six.PY3 else parser.readfp)(reader)
+ for section in parser.sections():
+ options = parser.options(section)
+ opt_dict = self.get_option_dict(section)
+
+ for opt in options:
+ if opt != '__name__' and opt not in ignore_options:
+ val = self._try_str(parser.get(section, opt))
+ opt = opt.replace('-', '_')
+ opt_dict[opt] = (filename, val)
+
+ # Make the ConfigParser forget everything (so we retain
+ # the original filenames that options come from)
+ parser.__init__()
+
+ # If there was a "global" section in the config file, use it
+ # to set Distribution options.
+
+ if 'global' in self.command_options:
+ for (opt, (src, val)) in self.command_options['global'].items():
+ alias = self.negative_opt.get(opt)
+ try:
+ if alias:
+ setattr(self, alias, not strtobool(val))
+ elif opt in ('verbose', 'dry_run'): # ugh!
+ setattr(self, opt, strtobool(val))
+ else:
+ setattr(self, opt, val)
+ except ValueError as msg:
+ raise DistutilsOptionError(msg)
+
+ @staticmethod
+ def _try_str(val):
+ """
+ On Python 2, much of distutils relies on string values being of
+ type 'str' (bytes) and not unicode text. If the value can be safely
+ encoded to bytes using the default encoding, prefer that.
+
+ Why the default encoding? Because that value can be implicitly
+ decoded back to text if needed.
+
+ Ref #1653
+ """
+ if six.PY3:
+ return val
+ try:
+ return val.encode()
+ except UnicodeEncodeError:
+ pass
+ return val
+
+ def _set_command_options(self, command_obj, option_dict=None):
+ """
+ Set the options for 'command_obj' from 'option_dict'. Basically
+ this means copying elements of a dictionary ('option_dict') to
+ attributes of an instance ('command').
+
+ 'command_obj' must be a Command instance. If 'option_dict' is not
+ supplied, uses the standard option dictionary for this command
+ (from 'self.command_options').
+
+ (Adopted from distutils.dist.Distribution._set_command_options)
+ """
+ command_name = command_obj.get_command_name()
+ if option_dict is None:
+ option_dict = self.get_option_dict(command_name)
+
+ if DEBUG:
+ self.announce(" setting options for '%s' command:" % command_name)
+ for (option, (source, value)) in option_dict.items():
+ if DEBUG:
+ self.announce(" %s = %s (from %s)" % (option, value,
+ source))
+ try:
+ bool_opts = [translate_longopt(o)
+ for o in command_obj.boolean_options]
+ except AttributeError:
+ bool_opts = []
+ try:
+ neg_opt = command_obj.negative_opt
+ except AttributeError:
+ neg_opt = {}
+
+ try:
+ is_string = isinstance(value, six.string_types)
+ if option in neg_opt and is_string:
+ setattr(command_obj, neg_opt[option], not strtobool(value))
+ elif option in bool_opts and is_string:
+ setattr(command_obj, option, strtobool(value))
+ elif hasattr(command_obj, option):
+ setattr(command_obj, option, value)
+ else:
+ raise DistutilsOptionError(
+ "error in %s: command '%s' has no such option '%s'"
+ % (source, command_name, option))
+ except ValueError as msg:
+ raise DistutilsOptionError(msg)
+
+ def parse_config_files(self, filenames=None, ignore_option_errors=False):
+ """Parses configuration files from various levels
+ and loads configuration.
+
+ """
+ self._parse_config_files(filenames=filenames)
+
+ parse_configuration(self, self.command_options,
+ ignore_option_errors=ignore_option_errors)
+ self._finalize_requires()
+
+ def parse_command_line(self):
+ """Process features after parsing command line options"""
+ result = _Distribution.parse_command_line(self)
+ if self.features:
+ self._finalize_features()
+ return result
+
+ def _feature_attrname(self, name):
+ """Convert feature name to corresponding option attribute name"""
+ return 'with_' + name.replace('-', '_')
+
+ def fetch_build_eggs(self, requires):
+ """Resolve pre-setup requirements"""
+ resolved_dists = pkg_resources.working_set.resolve(
+ pkg_resources.parse_requirements(requires),
+ installer=self.fetch_build_egg,
+ replace_conflicting=True,
+ )
+ for dist in resolved_dists:
+ pkg_resources.working_set.add(dist, replace=True)
+ return resolved_dists
+
+ def finalize_options(self):
+ """
+ Allow plugins to apply arbitrary operations to the
+ distribution. Each hook may optionally define a 'order'
+ to influence the order of execution. Smaller numbers
+ go first and the default is 0.
+ """
+ hook_key = 'setuptools.finalize_distribution_options'
+
+ def by_order(hook):
+ return getattr(hook, 'order', 0)
+ eps = pkg_resources.iter_entry_points(hook_key)
+ for ep in sorted(eps, key=by_order):
+ ep.load()(self)
+
+ def _finalize_setup_keywords(self):
+ for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
+ value = getattr(self, ep.name, None)
+ if value is not None:
+ ep.require(installer=self.fetch_build_egg)
+ ep.load()(self, ep.name, value)
+
+ def _finalize_2to3_doctests(self):
+ if getattr(self, 'convert_2to3_doctests', None):
+ # XXX may convert to set here when we can rely on set being builtin
+ self.convert_2to3_doctests = [
+ os.path.abspath(p)
+ for p in self.convert_2to3_doctests
+ ]
+ else:
+ self.convert_2to3_doctests = []
+
+ def get_egg_cache_dir(self):
+ egg_cache_dir = os.path.join(os.curdir, '.eggs')
+ if not os.path.exists(egg_cache_dir):
+ os.mkdir(egg_cache_dir)
+ windows_support.hide_file(egg_cache_dir)
+ readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt')
+ with open(readme_txt_filename, 'w') as f:
+ f.write('This directory contains eggs that were downloaded '
+ 'by setuptools to build, test, and run plug-ins.\n\n')
+ f.write('This directory caches those eggs to prevent '
+ 'repeated downloads.\n\n')
+ f.write('However, it is safe to delete this directory.\n\n')
+
+ return egg_cache_dir
+
+ def fetch_build_egg(self, req):
+ """Fetch an egg needed for building"""
+ from setuptools.installer import fetch_build_egg
+ return fetch_build_egg(self, req)
+
+ def _finalize_feature_opts(self):
+ """Add --with-X/--without-X options based on optional features"""
+
+ if not self.features:
+ return
+
+ go = []
+ no = self.negative_opt.copy()
+
+ for name, feature in self.features.items():
+ self._set_feature(name, None)
+ feature.validate(self)
+
+ if feature.optional:
+ descr = feature.description
+ incdef = ' (default)'
+ excdef = ''
+ if not feature.include_by_default():
+ excdef, incdef = incdef, excdef
+
+ new = (
+ ('with-' + name, None, 'include ' + descr + incdef),
+ ('without-' + name, None, 'exclude ' + descr + excdef),
+ )
+ go.extend(new)
+ no['without-' + name] = 'with-' + name
+
+ self.global_options = self.feature_options = go + self.global_options
+ self.negative_opt = self.feature_negopt = no
+
+ def _finalize_features(self):
+ """Add/remove features and resolve dependencies between them"""
+
+ # First, flag all the enabled items (and thus their dependencies)
+ for name, feature in self.features.items():
+ enabled = self.feature_is_included(name)
+ if enabled or (enabled is None and feature.include_by_default()):
+ feature.include_in(self)
+ self._set_feature(name, 1)
+
+ # Then disable the rest, so that off-by-default features don't
+ # get flagged as errors when they're required by an enabled feature
+ for name, feature in self.features.items():
+ if not self.feature_is_included(name):
+ feature.exclude_from(self)
+ self._set_feature(name, 0)
+
+ def get_command_class(self, command):
+ """Pluggable version of get_command_class()"""
+ if command in self.cmdclass:
+ return self.cmdclass[command]
+
+ eps = pkg_resources.iter_entry_points('distutils.commands', command)
+ for ep in eps:
+ ep.require(installer=self.fetch_build_egg)
+ self.cmdclass[command] = cmdclass = ep.load()
+ return cmdclass
+ else:
+ return _Distribution.get_command_class(self, command)
+
+ def print_commands(self):
+ for ep in pkg_resources.iter_entry_points('distutils.commands'):
+ if ep.name not in self.cmdclass:
+ # don't require extras as the commands won't be invoked
+ cmdclass = ep.resolve()
+ self.cmdclass[ep.name] = cmdclass
+ return _Distribution.print_commands(self)
+
+ def get_command_list(self):
+ for ep in pkg_resources.iter_entry_points('distutils.commands'):
+ if ep.name not in self.cmdclass:
+ # don't require extras as the commands won't be invoked
+ cmdclass = ep.resolve()
+ self.cmdclass[ep.name] = cmdclass
+ return _Distribution.get_command_list(self)
+
+ def _set_feature(self, name, status):
+ """Set feature's inclusion status"""
+ setattr(self, self._feature_attrname(name), status)
+
+ def feature_is_included(self, name):
+ """Return 1 if feature is included, 0 if excluded, 'None' if unknown"""
+ return getattr(self, self._feature_attrname(name))
+
+ def include_feature(self, name):
+ """Request inclusion of feature named 'name'"""
+
+ if self.feature_is_included(name) == 0:
+ descr = self.features[name].description
+ raise DistutilsOptionError(
+ descr + " is required, but was excluded or is not available"
+ )
+ self.features[name].include_in(self)
+ self._set_feature(name, 1)
+
+ def include(self, **attrs):
+ """Add items to distribution that are named in keyword arguments
+
+ For example, 'dist.include(py_modules=["x"])' would add 'x' to
+ the distribution's 'py_modules' attribute, if it was not already
+ there.
+
+ Currently, this method only supports inclusion for attributes that are
+ lists or tuples. If you need to add support for adding to other
+ attributes in this or a subclass, you can add an '_include_X' method,
+ where 'X' is the name of the attribute. The method will be called with
+ the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})'
+ will try to call 'dist._include_foo({"bar":"baz"})', which can then
+ handle whatever special inclusion logic is needed.
+ """
+ for k, v in attrs.items():
+ include = getattr(self, '_include_' + k, None)
+ if include:
+ include(v)
+ else:
+ self._include_misc(k, v)
+
+ def exclude_package(self, package):
+ """Remove packages, modules, and extensions in named package"""
+
+ pfx = package + '.'
+ if self.packages:
+ self.packages = [
+ p for p in self.packages
+ if p != package and not p.startswith(pfx)
+ ]
+
+ if self.py_modules:
+ self.py_modules = [
+ p for p in self.py_modules
+ if p != package and not p.startswith(pfx)
+ ]
+
+ if self.ext_modules:
+ self.ext_modules = [
+ p for p in self.ext_modules
+ if p.name != package and not p.name.startswith(pfx)
+ ]
+
+ def has_contents_for(self, package):
+ """Return true if 'exclude_package(package)' would do something"""
+
+ pfx = package + '.'
+
+ for p in self.iter_distribution_names():
+ if p == package or p.startswith(pfx):
+ return True
+
+ def _exclude_misc(self, name, value):
+ """Handle 'exclude()' for list/tuple attrs without a special handler"""
+ if not isinstance(value, sequence):
+ raise DistutilsSetupError(
+ "%s: setting must be a list or tuple (%r)" % (name, value)
+ )
+ try:
+ old = getattr(self, name)
+ except AttributeError:
+ raise DistutilsSetupError(
+ "%s: No such distribution setting" % name
+ )
+ if old is not None and not isinstance(old, sequence):
+ raise DistutilsSetupError(
+ name + ": this setting cannot be changed via include/exclude"
+ )
+ elif old:
+ setattr(self, name, [item for item in old if item not in value])
+
+ def _include_misc(self, name, value):
+ """Handle 'include()' for list/tuple attrs without a special handler"""
+
+ if not isinstance(value, sequence):
+ raise DistutilsSetupError(
+ "%s: setting must be a list (%r)" % (name, value)
+ )
+ try:
+ old = getattr(self, name)
+ except AttributeError:
+ raise DistutilsSetupError(
+ "%s: No such distribution setting" % name
+ )
+ if old is None:
+ setattr(self, name, value)
+ elif not isinstance(old, sequence):
+ raise DistutilsSetupError(
+ name + ": this setting cannot be changed via include/exclude"
+ )
+ else:
+ new = [item for item in value if item not in old]
+ setattr(self, name, old + new)
+
+ def exclude(self, **attrs):
+ """Remove items from distribution that are named in keyword arguments
+
+ For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
+ the distribution's 'py_modules' attribute. Excluding packages uses
+ the 'exclude_package()' method, so all of the package's contained
+ packages, modules, and extensions are also excluded.
+
+ Currently, this method only supports exclusion from attributes that are
+ lists or tuples. If you need to add support for excluding from other
+ attributes in this or a subclass, you can add an '_exclude_X' method,
+ where 'X' is the name of the attribute. The method will be called with
+ the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})'
+ will try to call 'dist._exclude_foo({"bar":"baz"})', which can then
+ handle whatever special exclusion logic is needed.
+ """
+ for k, v in attrs.items():
+ exclude = getattr(self, '_exclude_' + k, None)
+ if exclude:
+ exclude(v)
+ else:
+ self._exclude_misc(k, v)
+
+ def _exclude_packages(self, packages):
+ if not isinstance(packages, sequence):
+ raise DistutilsSetupError(
+ "packages: setting must be a list or tuple (%r)" % (packages,)
+ )
+ list(map(self.exclude_package, packages))
+
+ def _parse_command_opts(self, parser, args):
+ # Remove --with-X/--without-X options when processing command args
+ self.global_options = self.__class__.global_options
+ self.negative_opt = self.__class__.negative_opt
+
+ # First, expand any aliases
+ command = args[0]
+ aliases = self.get_option_dict('aliases')
+ while command in aliases:
+ src, alias = aliases[command]
+ del aliases[command] # ensure each alias can expand only once!
+ import shlex
+ args[:1] = shlex.split(alias, True)
+ command = args[0]
+
+ nargs = _Distribution._parse_command_opts(self, parser, args)
+
+ # Handle commands that want to consume all remaining arguments
+ cmd_class = self.get_command_class(command)
+ if getattr(cmd_class, 'command_consumes_arguments', None):
+ self.get_option_dict(command)['args'] = ("command line", nargs)
+ if nargs is not None:
+ return []
+
+ return nargs
+
+ def get_cmdline_options(self):
+ """Return a '{cmd: {opt:val}}' map of all command-line options
+
+ Option names are all long, but do not include the leading '--', and
+ contain dashes rather than underscores. If the option doesn't take
+ an argument (e.g. '--quiet'), the 'val' is 'None'.
+
+ Note that options provided by config files are intentionally excluded.
+ """
+
+ d = {}
+
+ for cmd, opts in self.command_options.items():
+
+ for opt, (src, val) in opts.items():
+
+ if src != "command line":
+ continue
+
+ opt = opt.replace('_', '-')
+
+ if val == 0:
+ cmdobj = self.get_command_obj(cmd)
+ neg_opt = self.negative_opt.copy()
+ neg_opt.update(getattr(cmdobj, 'negative_opt', {}))
+ for neg, pos in neg_opt.items():
+ if pos == opt:
+ opt = neg
+ val = None
+ break
+ else:
+ raise AssertionError("Shouldn't be able to get here")
+
+ elif val == 1:
+ val = None
+
+ d.setdefault(cmd, {})[opt] = val
+
+ return d
+
+ def iter_distribution_names(self):
+ """Yield all packages, modules, and extension names in distribution"""
+
+ for pkg in self.packages or ():
+ yield pkg
+
+ for module in self.py_modules or ():
+ yield module
+
+ for ext in self.ext_modules or ():
+ if isinstance(ext, tuple):
+ name, buildinfo = ext
+ else:
+ name = ext.name
+ if name.endswith('module'):
+ name = name[:-6]
+ yield name
+
+ def handle_display_options(self, option_order):
+ """If there were any non-global "display-only" options
+ (--help-commands or the metadata display options) on the command
+ line, display the requested info and return true; else return
+ false.
+ """
+ import sys
+
+ if six.PY2 or self.help_commands:
+ return _Distribution.handle_display_options(self, option_order)
+
+ # Stdout may be StringIO (e.g. in tests)
+ if not isinstance(sys.stdout, io.TextIOWrapper):
+ return _Distribution.handle_display_options(self, option_order)
+
+ # Don't wrap stdout if utf-8 is already the encoding. Provides
+ # workaround for #334.
+ if sys.stdout.encoding.lower() in ('utf-8', 'utf8'):
+ return _Distribution.handle_display_options(self, option_order)
+
+ # Print metadata in UTF-8 no matter the platform
+ encoding = sys.stdout.encoding
+ errors = sys.stdout.errors
+ newline = sys.platform != 'win32' and '\n' or None
+ line_buffering = sys.stdout.line_buffering
+
+ sys.stdout = io.TextIOWrapper(
+ sys.stdout.detach(), 'utf-8', errors, newline, line_buffering)
+ try:
+ return _Distribution.handle_display_options(self, option_order)
+ finally:
+ sys.stdout = io.TextIOWrapper(
+ sys.stdout.detach(), encoding, errors, newline, line_buffering)
+
+
+class Feature:
+ """
+ **deprecated** -- The `Feature` facility was never completely implemented
+ or supported, `has reported issues
+ <https://github.com/pypa/setuptools/issues/58>`_ and will be removed in
+ a future version.
+
+ A subset of the distribution that can be excluded if unneeded/wanted
+
+ Features are created using these keyword arguments:
+
+ 'description' -- a short, human readable description of the feature, to
+ be used in error messages, and option help messages.
+
+ 'standard' -- if true, the feature is included by default if it is
+ available on the current system. Otherwise, the feature is only
+ included if requested via a command line '--with-X' option, or if
+ another included feature requires it. The default setting is 'False'.
+
+ 'available' -- if true, the feature is available for installation on the
+ current system. The default setting is 'True'.
+
+ 'optional' -- if true, the feature's inclusion can be controlled from the
+ command line, using the '--with-X' or '--without-X' options. If
+ false, the feature's inclusion status is determined automatically,
+ based on 'availabile', 'standard', and whether any other feature
+ requires it. The default setting is 'True'.
+
+ 'require_features' -- a string or sequence of strings naming features
+ that should also be included if this feature is included. Defaults to
+ empty list. May also contain 'Require' objects that should be
+ added/removed from the distribution.
+
+ 'remove' -- a string or list of strings naming packages to be removed
+ from the distribution if this feature is *not* included. If the
+ feature *is* included, this argument is ignored. This argument exists
+ to support removing features that "crosscut" a distribution, such as
+ defining a 'tests' feature that removes all the 'tests' subpackages
+ provided by other features. The default for this argument is an empty
+ list. (Note: the named package(s) or modules must exist in the base
+ distribution when the 'setup()' function is initially called.)
+
+ other keywords -- any other keyword arguments are saved, and passed to
+ the distribution's 'include()' and 'exclude()' methods when the
+ feature is included or excluded, respectively. So, for example, you
+ could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be
+ added or removed from the distribution as appropriate.
+
+ A feature must include at least one 'requires', 'remove', or other
+ keyword argument. Otherwise, it can't affect the distribution in any way.
+ Note also that you can subclass 'Feature' to create your own specialized
+ feature types that modify the distribution in other ways when included or
+ excluded. See the docstrings for the various methods here for more detail.
+ Aside from the methods, the only feature attributes that distributions look
+ at are 'description' and 'optional'.
+ """
+
+ @staticmethod
+ def warn_deprecated():
+ msg = (
+ "Features are deprecated and will be removed in a future "
+ "version. See https://github.com/pypa/setuptools/issues/65."
+ )
+ warnings.warn(msg, DistDeprecationWarning, stacklevel=3)
+
+ def __init__(
+ self, description, standard=False, available=True,
+ optional=True, require_features=(), remove=(), **extras):
+ self.warn_deprecated()
+
+ self.description = description
+ self.standard = standard
+ self.available = available
+ self.optional = optional
+ if isinstance(require_features, (str, Require)):
+ require_features = require_features,
+
+ self.require_features = [
+ r for r in require_features if isinstance(r, str)
+ ]
+ er = [r for r in require_features if not isinstance(r, str)]
+ if er:
+ extras['require_features'] = er
+
+ if isinstance(remove, str):
+ remove = remove,
+ self.remove = remove
+ self.extras = extras
+
+ if not remove and not require_features and not extras:
+ raise DistutilsSetupError(
+ "Feature %s: must define 'require_features', 'remove', or "
+ "at least one of 'packages', 'py_modules', etc."
+ )
+
+ def include_by_default(self):
+ """Should this feature be included by default?"""
+ return self.available and self.standard
+
+ def include_in(self, dist):
+ """Ensure feature and its requirements are included in distribution
+
+ You may override this in a subclass to perform additional operations on
+ the distribution. Note that this method may be called more than once
+ per feature, and so should be idempotent.
+
+ """
+
+ if not self.available:
+ raise DistutilsPlatformError(
+ self.description + " is required, "
+ "but is not available on this platform"
+ )
+
+ dist.include(**self.extras)
+
+ for f in self.require_features:
+ dist.include_feature(f)
+
+ def exclude_from(self, dist):
+ """Ensure feature is excluded from distribution
+
+ You may override this in a subclass to perform additional operations on
+ the distribution. This method will be called at most once per
+ feature, and only after all included features have been asked to
+ include themselves.
+ """
+
+ dist.exclude(**self.extras)
+
+ if self.remove:
+ for item in self.remove:
+ dist.exclude_package(item)
+
+ def validate(self, dist):
+ """Verify that feature makes sense in context of distribution
+
+ This method is called by the distribution just before it parses its
+ command line. It checks to ensure that the 'remove' attribute, if any,
+ contains only valid package/module names that are present in the base
+ distribution when 'setup()' is called. You may override it in a
+ subclass to perform any other required validation of the feature
+ against a target distribution.
+ """
+
+ for item in self.remove:
+ if not dist.has_contents_for(item):
+ raise DistutilsSetupError(
+ "%s wants to be able to remove %s, but the distribution"
+ " doesn't contain any packages or modules under %s"
+ % (self.description, item, item)
+ )
+
+
+class DistDeprecationWarning(SetuptoolsDeprecationWarning):
+ """Class for warning about deprecations in dist in
+ setuptools. Not ignored by default, unlike DeprecationWarning."""
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/errors.py b/monEnvTP/lib/python3.8/site-packages/setuptools/errors.py
new file mode 100644
index 0000000000000000000000000000000000000000..2701747f56cc77845159f2c5fee2d0ce114259af
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/errors.py
@@ -0,0 +1,16 @@
+"""setuptools.errors
+
+Provides exceptions used by setuptools modules.
+"""
+
+from distutils.errors import DistutilsError
+
+
+class RemovedCommandError(DistutilsError, RuntimeError):
+ """Error used for commands that have been removed in setuptools.
+
+ Since ``setuptools`` is built on ``distutils``, simply removing a command
+ from ``setuptools`` will make the behavior fall back to ``distutils``; this
+ error is raised if a command exists in ``distutils`` but has been actively
+ removed in ``setuptools``.
+ """
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/extension.py b/monEnvTP/lib/python3.8/site-packages/setuptools/extension.py
new file mode 100644
index 0000000000000000000000000000000000000000..29468894f828128f4c36660167dd1f9e68e584be
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/extension.py
@@ -0,0 +1,57 @@
+import re
+import functools
+import distutils.core
+import distutils.errors
+import distutils.extension
+
+from setuptools.extern.six.moves import map
+
+from .monkey import get_unpatched
+
+
+def _have_cython():
+ """
+ Return True if Cython can be imported.
+ """
+ cython_impl = 'Cython.Distutils.build_ext'
+ try:
+ # from (cython_impl) import build_ext
+ __import__(cython_impl, fromlist=['build_ext']).build_ext
+ return True
+ except Exception:
+ pass
+ return False
+
+
+# for compatibility
+have_pyrex = _have_cython
+
+_Extension = get_unpatched(distutils.core.Extension)
+
+
+class Extension(_Extension):
+ """Extension that uses '.c' files in place of '.pyx' files"""
+
+ def __init__(self, name, sources, *args, **kw):
+ # The *args is needed for compatibility as calls may use positional
+ # arguments. py_limited_api may be set only via keyword.
+ self.py_limited_api = kw.pop("py_limited_api", False)
+ _Extension.__init__(self, name, sources, *args, **kw)
+
+ def _convert_pyx_sources_to_lang(self):
+ """
+ Replace sources with .pyx extensions to sources with the target
+ language extension. This mechanism allows language authors to supply
+ pre-converted sources but to prefer the .pyx sources.
+ """
+ if _have_cython():
+ # the build has Cython, so allow it to compile the .pyx files
+ return
+ lang = self.language or ''
+ target_ext = '.cpp' if lang.lower() == 'c++' else '.c'
+ sub = functools.partial(re.sub, '.pyx$', target_ext)
+ self.sources = list(map(sub, self.sources))
+
+
+class Library(Extension):
+ """Just like a regular Extension, but built as a library instead"""
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/extern/__init__.py b/monEnvTP/lib/python3.8/site-packages/setuptools/extern/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e8c616f910bb9bb874c3d44f1efe5239ecb8f621
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/extern/__init__.py
@@ -0,0 +1,73 @@
+import sys
+
+
+class VendorImporter:
+ """
+ A PEP 302 meta path importer for finding optionally-vendored
+ or otherwise naturally-installed packages from root_name.
+ """
+
+ def __init__(self, root_name, vendored_names=(), vendor_pkg=None):
+ self.root_name = root_name
+ self.vendored_names = set(vendored_names)
+ self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor')
+
+ @property
+ def search_path(self):
+ """
+ Search first the vendor package then as a natural package.
+ """
+ yield self.vendor_pkg + '.'
+ yield ''
+
+ def find_module(self, fullname, path=None):
+ """
+ Return self when fullname starts with root_name and the
+ target module is one vendored through this importer.
+ """
+ root, base, target = fullname.partition(self.root_name + '.')
+ if root:
+ return
+ if not any(map(target.startswith, self.vendored_names)):
+ return
+ return self
+
+ def load_module(self, fullname):
+ """
+ Iterate over the search path to locate and load fullname.
+ """
+ root, base, target = fullname.partition(self.root_name + '.')
+ for prefix in self.search_path:
+ try:
+ extant = prefix + target
+ __import__(extant)
+ mod = sys.modules[extant]
+ sys.modules[fullname] = mod
+ # mysterious hack:
+ # Remove the reference to the extant package/module
+ # on later Python versions to cause relative imports
+ # in the vendor package to resolve the same modules
+ # as those going through this importer.
+ if sys.version_info >= (3, ):
+ del sys.modules[extant]
+ return mod
+ except ImportError:
+ pass
+ else:
+ raise ImportError(
+ "The '{target}' package is required; "
+ "normally this is bundled with this package so if you get "
+ "this warning, consult the packager of your "
+ "distribution.".format(**locals())
+ )
+
+ def install(self):
+ """
+ Install this importer into sys.meta_path if not already present.
+ """
+ if self not in sys.meta_path:
+ sys.meta_path.append(self)
+
+
+names = 'six', 'packaging', 'pyparsing', 'ordered_set',
+VendorImporter(__name__, names, 'setuptools._vendor').install()
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/extern/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/setuptools/extern/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2bb22c974c93c8ec46b596eb1e30c5ad7ecf9220
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/extern/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/glob.py b/monEnvTP/lib/python3.8/site-packages/setuptools/glob.py
new file mode 100644
index 0000000000000000000000000000000000000000..9d7cbc5da68da8605d271b9314befb206b87bca6
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/glob.py
@@ -0,0 +1,174 @@
+"""
+Filename globbing utility. Mostly a copy of `glob` from Python 3.5.
+
+Changes include:
+ * `yield from` and PEP3102 `*` removed.
+ * Hidden files are not ignored.
+"""
+
+import os
+import re
+import fnmatch
+
+__all__ = ["glob", "iglob", "escape"]
+
+
+def glob(pathname, recursive=False):
+ """Return a list of paths matching a pathname pattern.
+
+ The pattern may contain simple shell-style wildcards a la
+ fnmatch. However, unlike fnmatch, filenames starting with a
+ dot are special cases that are not matched by '*' and '?'
+ patterns.
+
+ If recursive is true, the pattern '**' will match any files and
+ zero or more directories and subdirectories.
+ """
+ return list(iglob(pathname, recursive=recursive))
+
+
+def iglob(pathname, recursive=False):
+ """Return an iterator which yields the paths matching a pathname pattern.
+
+ The pattern may contain simple shell-style wildcards a la
+ fnmatch. However, unlike fnmatch, filenames starting with a
+ dot are special cases that are not matched by '*' and '?'
+ patterns.
+
+ If recursive is true, the pattern '**' will match any files and
+ zero or more directories and subdirectories.
+ """
+ it = _iglob(pathname, recursive)
+ if recursive and _isrecursive(pathname):
+ s = next(it) # skip empty string
+ assert not s
+ return it
+
+
+def _iglob(pathname, recursive):
+ dirname, basename = os.path.split(pathname)
+ if not has_magic(pathname):
+ if basename:
+ if os.path.lexists(pathname):
+ yield pathname
+ else:
+ # Patterns ending with a slash should match only directories
+ if os.path.isdir(dirname):
+ yield pathname
+ return
+ if not dirname:
+ if recursive and _isrecursive(basename):
+ for x in glob2(dirname, basename):
+ yield x
+ else:
+ for x in glob1(dirname, basename):
+ yield x
+ return
+ # `os.path.split()` returns the argument itself as a dirname if it is a
+ # drive or UNC path. Prevent an infinite recursion if a drive or UNC path
+ # contains magic characters (i.e. r'\\?\C:').
+ if dirname != pathname and has_magic(dirname):
+ dirs = _iglob(dirname, recursive)
+ else:
+ dirs = [dirname]
+ if has_magic(basename):
+ if recursive and _isrecursive(basename):
+ glob_in_dir = glob2
+ else:
+ glob_in_dir = glob1
+ else:
+ glob_in_dir = glob0
+ for dirname in dirs:
+ for name in glob_in_dir(dirname, basename):
+ yield os.path.join(dirname, name)
+
+
+# These 2 helper functions non-recursively glob inside a literal directory.
+# They return a list of basenames. `glob1` accepts a pattern while `glob0`
+# takes a literal basename (so it only has to check for its existence).
+
+
+def glob1(dirname, pattern):
+ if not dirname:
+ if isinstance(pattern, bytes):
+ dirname = os.curdir.encode('ASCII')
+ else:
+ dirname = os.curdir
+ try:
+ names = os.listdir(dirname)
+ except OSError:
+ return []
+ return fnmatch.filter(names, pattern)
+
+
+def glob0(dirname, basename):
+ if not basename:
+ # `os.path.split()` returns an empty basename for paths ending with a
+ # directory separator. 'q*x/' should match only directories.
+ if os.path.isdir(dirname):
+ return [basename]
+ else:
+ if os.path.lexists(os.path.join(dirname, basename)):
+ return [basename]
+ return []
+
+
+# This helper function recursively yields relative pathnames inside a literal
+# directory.
+
+
+def glob2(dirname, pattern):
+ assert _isrecursive(pattern)
+ yield pattern[:0]
+ for x in _rlistdir(dirname):
+ yield x
+
+
+# Recursively yields relative pathnames inside a literal directory.
+def _rlistdir(dirname):
+ if not dirname:
+ if isinstance(dirname, bytes):
+ dirname = os.curdir.encode('ASCII')
+ else:
+ dirname = os.curdir
+ try:
+ names = os.listdir(dirname)
+ except os.error:
+ return
+ for x in names:
+ yield x
+ path = os.path.join(dirname, x) if dirname else x
+ for y in _rlistdir(path):
+ yield os.path.join(x, y)
+
+
+magic_check = re.compile('([*?[])')
+magic_check_bytes = re.compile(b'([*?[])')
+
+
+def has_magic(s):
+ if isinstance(s, bytes):
+ match = magic_check_bytes.search(s)
+ else:
+ match = magic_check.search(s)
+ return match is not None
+
+
+def _isrecursive(pattern):
+ if isinstance(pattern, bytes):
+ return pattern == b'**'
+ else:
+ return pattern == '**'
+
+
+def escape(pathname):
+ """Escape all special characters.
+ """
+ # Escaping is done by wrapping any of "*?[" between square brackets.
+ # Metacharacters do not work in the drive part and shouldn't be escaped.
+ drive, pathname = os.path.splitdrive(pathname)
+ if isinstance(pathname, bytes):
+ pathname = magic_check_bytes.sub(br'[\1]', pathname)
+ else:
+ pathname = magic_check.sub(r'[\1]', pathname)
+ return drive + pathname
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/gui-32.exe b/monEnvTP/lib/python3.8/site-packages/setuptools/gui-32.exe
new file mode 100644
index 0000000000000000000000000000000000000000..f8d3509653ba8f80ca7f3aa7f95616142ba83a94
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/gui-32.exe differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/gui-64.exe b/monEnvTP/lib/python3.8/site-packages/setuptools/gui-64.exe
new file mode 100644
index 0000000000000000000000000000000000000000..330c51a5dde15a0bb610a48cd0ca11770c914dae
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/gui-64.exe differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/gui.exe b/monEnvTP/lib/python3.8/site-packages/setuptools/gui.exe
new file mode 100644
index 0000000000000000000000000000000000000000..f8d3509653ba8f80ca7f3aa7f95616142ba83a94
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/setuptools/gui.exe differ
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/installer.py b/monEnvTP/lib/python3.8/site-packages/setuptools/installer.py
new file mode 100644
index 0000000000000000000000000000000000000000..9f8be2ef8427651e3b0fbef497535e152dde66b1
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/installer.py
@@ -0,0 +1,150 @@
+import glob
+import os
+import subprocess
+import sys
+from distutils import log
+from distutils.errors import DistutilsError
+
+import pkg_resources
+from setuptools.command.easy_install import easy_install
+from setuptools.extern import six
+from setuptools.wheel import Wheel
+
+from .py31compat import TemporaryDirectory
+
+
+def _fixup_find_links(find_links):
+ """Ensure find-links option end-up being a list of strings."""
+ if isinstance(find_links, six.string_types):
+ return find_links.split()
+ assert isinstance(find_links, (tuple, list))
+ return find_links
+
+
+def _legacy_fetch_build_egg(dist, req):
+ """Fetch an egg needed for building.
+
+ Legacy path using EasyInstall.
+ """
+ tmp_dist = dist.__class__({'script_args': ['easy_install']})
+ opts = tmp_dist.get_option_dict('easy_install')
+ opts.clear()
+ opts.update(
+ (k, v)
+ for k, v in dist.get_option_dict('easy_install').items()
+ if k in (
+ # don't use any other settings
+ 'find_links', 'site_dirs', 'index_url',
+ 'optimize', 'site_dirs', 'allow_hosts',
+ ))
+ if dist.dependency_links:
+ links = dist.dependency_links[:]
+ if 'find_links' in opts:
+ links = _fixup_find_links(opts['find_links'][1]) + links
+ opts['find_links'] = ('setup', links)
+ install_dir = dist.get_egg_cache_dir()
+ cmd = easy_install(
+ tmp_dist, args=["x"], install_dir=install_dir,
+ exclude_scripts=True,
+ always_copy=False, build_directory=None, editable=False,
+ upgrade=False, multi_version=True, no_report=True, user=False
+ )
+ cmd.ensure_finalized()
+ return cmd.easy_install(req)
+
+
+def fetch_build_egg(dist, req):
+ """Fetch an egg needed for building.
+
+ Use pip/wheel to fetch/build a wheel."""
+ # Check pip is available.
+ try:
+ pkg_resources.get_distribution('pip')
+ except pkg_resources.DistributionNotFound:
+ dist.announce(
+ 'WARNING: The pip package is not available, falling back '
+ 'to EasyInstall for handling setup_requires/test_requires; '
+ 'this is deprecated and will be removed in a future version.'
+ , log.WARN
+ )
+ return _legacy_fetch_build_egg(dist, req)
+ # Warn if wheel is not.
+ try:
+ pkg_resources.get_distribution('wheel')
+ except pkg_resources.DistributionNotFound:
+ dist.announce('WARNING: The wheel package is not available.', log.WARN)
+ # Ignore environment markers; if supplied, it is required.
+ req = strip_marker(req)
+ # Take easy_install options into account, but do not override relevant
+ # pip environment variables (like PIP_INDEX_URL or PIP_QUIET); they'll
+ # take precedence.
+ opts = dist.get_option_dict('easy_install')
+ if 'allow_hosts' in opts:
+ raise DistutilsError('the `allow-hosts` option is not supported '
+ 'when using pip to install requirements.')
+ if 'PIP_QUIET' in os.environ or 'PIP_VERBOSE' in os.environ:
+ quiet = False
+ else:
+ quiet = True
+ if 'PIP_INDEX_URL' in os.environ:
+ index_url = None
+ elif 'index_url' in opts:
+ index_url = opts['index_url'][1]
+ else:
+ index_url = None
+ if 'find_links' in opts:
+ find_links = _fixup_find_links(opts['find_links'][1])[:]
+ else:
+ find_links = []
+ if dist.dependency_links:
+ find_links.extend(dist.dependency_links)
+ eggs_dir = os.path.realpath(dist.get_egg_cache_dir())
+ environment = pkg_resources.Environment()
+ for egg_dist in pkg_resources.find_distributions(eggs_dir):
+ if egg_dist in req and environment.can_add(egg_dist):
+ return egg_dist
+ with TemporaryDirectory() as tmpdir:
+ cmd = [
+ sys.executable, '-m', 'pip',
+ '--disable-pip-version-check',
+ 'wheel', '--no-deps',
+ '-w', tmpdir,
+ ]
+ if quiet:
+ cmd.append('--quiet')
+ if index_url is not None:
+ cmd.extend(('--index-url', index_url))
+ if find_links is not None:
+ for link in find_links:
+ cmd.extend(('--find-links', link))
+ # If requirement is a PEP 508 direct URL, directly pass
+ # the URL to pip, as `req @ url` does not work on the
+ # command line.
+ if req.url:
+ cmd.append(req.url)
+ else:
+ cmd.append(str(req))
+ try:
+ subprocess.check_call(cmd)
+ except subprocess.CalledProcessError as e:
+ raise DistutilsError(str(e))
+ wheel = Wheel(glob.glob(os.path.join(tmpdir, '*.whl'))[0])
+ dist_location = os.path.join(eggs_dir, wheel.egg_name())
+ wheel.install_as_egg(dist_location)
+ dist_metadata = pkg_resources.PathMetadata(
+ dist_location, os.path.join(dist_location, 'EGG-INFO'))
+ dist = pkg_resources.Distribution.from_filename(
+ dist_location, metadata=dist_metadata)
+ return dist
+
+
+def strip_marker(req):
+ """
+ Return a new requirement without the environment marker to avoid
+ calling pip with something like `babel; extra == "i18n"`, which
+ would always be ignored.
+ """
+ # create a copy to avoid mutating the input
+ req = pkg_resources.Requirement.parse(str(req))
+ req.marker = None
+ return req
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/launch.py b/monEnvTP/lib/python3.8/site-packages/setuptools/launch.py
new file mode 100644
index 0000000000000000000000000000000000000000..308283ea939ed9bced7b099eb8a1879aa9c203d4
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/launch.py
@@ -0,0 +1,35 @@
+"""
+Launch the Python script on the command line after
+setuptools is bootstrapped via import.
+"""
+
+# Note that setuptools gets imported implicitly by the
+# invocation of this script using python -m setuptools.launch
+
+import tokenize
+import sys
+
+
+def run():
+ """
+ Run the script in sys.argv[1] as if it had
+ been invoked naturally.
+ """
+ __builtins__
+ script_name = sys.argv[1]
+ namespace = dict(
+ __file__=script_name,
+ __name__='__main__',
+ __doc__=None,
+ )
+ sys.argv[:] = sys.argv[1:]
+
+ open_ = getattr(tokenize, 'open', open)
+ script = open_(script_name).read()
+ norm_script = script.replace('\\r\\n', '\\n')
+ code = compile(norm_script, script_name, 'exec')
+ exec(code, namespace)
+
+
+if __name__ == '__main__':
+ run()
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/lib2to3_ex.py b/monEnvTP/lib/python3.8/site-packages/setuptools/lib2to3_ex.py
new file mode 100644
index 0000000000000000000000000000000000000000..4b1a73feb26fdad65bafdeb21f5ce6abfb905fc0
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/lib2to3_ex.py
@@ -0,0 +1,62 @@
+"""
+Customized Mixin2to3 support:
+
+ - adds support for converting doctests
+
+
+This module raises an ImportError on Python 2.
+"""
+
+from distutils.util import Mixin2to3 as _Mixin2to3
+from distutils import log
+from lib2to3.refactor import RefactoringTool, get_fixers_from_package
+
+import setuptools
+
+
+class DistutilsRefactoringTool(RefactoringTool):
+ def log_error(self, msg, *args, **kw):
+ log.error(msg, *args)
+
+ def log_message(self, msg, *args):
+ log.info(msg, *args)
+
+ def log_debug(self, msg, *args):
+ log.debug(msg, *args)
+
+
+class Mixin2to3(_Mixin2to3):
+ def run_2to3(self, files, doctests=False):
+ # See of the distribution option has been set, otherwise check the
+ # setuptools default.
+ if self.distribution.use_2to3 is not True:
+ return
+ if not files:
+ return
+ log.info("Fixing " + " ".join(files))
+ self.__build_fixer_names()
+ self.__exclude_fixers()
+ if doctests:
+ if setuptools.run_2to3_on_doctests:
+ r = DistutilsRefactoringTool(self.fixer_names)
+ r.refactor(files, write=True, doctests_only=True)
+ else:
+ _Mixin2to3.run_2to3(self, files)
+
+ def __build_fixer_names(self):
+ if self.fixer_names:
+ return
+ self.fixer_names = []
+ for p in setuptools.lib2to3_fixer_packages:
+ self.fixer_names.extend(get_fixers_from_package(p))
+ if self.distribution.use_2to3_fixers is not None:
+ for p in self.distribution.use_2to3_fixers:
+ self.fixer_names.extend(get_fixers_from_package(p))
+
+ def __exclude_fixers(self):
+ excluded_fixers = getattr(self, 'exclude_fixers', [])
+ if self.distribution.use_2to3_exclude_fixers is not None:
+ excluded_fixers.extend(self.distribution.use_2to3_exclude_fixers)
+ for fixer_name in excluded_fixers:
+ if fixer_name in self.fixer_names:
+ self.fixer_names.remove(fixer_name)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/monkey.py b/monEnvTP/lib/python3.8/site-packages/setuptools/monkey.py
new file mode 100644
index 0000000000000000000000000000000000000000..3c77f8cf27f0ab1e71d64cfc114ef9d1bf72295c
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/monkey.py
@@ -0,0 +1,179 @@
+"""
+Monkey patching of distutils.
+"""
+
+import sys
+import distutils.filelist
+import platform
+import types
+import functools
+from importlib import import_module
+import inspect
+
+from setuptools.extern import six
+
+import setuptools
+
+__all__ = []
+"""
+Everything is private. Contact the project team
+if you think you need this functionality.
+"""
+
+
+def _get_mro(cls):
+ """
+ Returns the bases classes for cls sorted by the MRO.
+
+ Works around an issue on Jython where inspect.getmro will not return all
+ base classes if multiple classes share the same name. Instead, this
+ function will return a tuple containing the class itself, and the contents
+ of cls.__bases__. See https://github.com/pypa/setuptools/issues/1024.
+ """
+ if platform.python_implementation() == "Jython":
+ return (cls,) + cls.__bases__
+ return inspect.getmro(cls)
+
+
+def get_unpatched(item):
+ lookup = (
+ get_unpatched_class if isinstance(item, six.class_types) else
+ get_unpatched_function if isinstance(item, types.FunctionType) else
+ lambda item: None
+ )
+ return lookup(item)
+
+
+def get_unpatched_class(cls):
+ """Protect against re-patching the distutils if reloaded
+
+ Also ensures that no other distutils extension monkeypatched the distutils
+ first.
+ """
+ external_bases = (
+ cls
+ for cls in _get_mro(cls)
+ if not cls.__module__.startswith('setuptools')
+ )
+ base = next(external_bases)
+ if not base.__module__.startswith('distutils'):
+ msg = "distutils has already been patched by %r" % cls
+ raise AssertionError(msg)
+ return base
+
+
+def patch_all():
+ # we can't patch distutils.cmd, alas
+ distutils.core.Command = setuptools.Command
+
+ has_issue_12885 = sys.version_info <= (3, 5, 3)
+
+ if has_issue_12885:
+ # fix findall bug in distutils (http://bugs.python.org/issue12885)
+ distutils.filelist.findall = setuptools.findall
+
+ needs_warehouse = (
+ sys.version_info < (2, 7, 13)
+ or
+ (3, 4) < sys.version_info < (3, 4, 6)
+ or
+ (3, 5) < sys.version_info <= (3, 5, 3)
+ )
+
+ if needs_warehouse:
+ warehouse = 'https://upload.pypi.org/legacy/'
+ distutils.config.PyPIRCCommand.DEFAULT_REPOSITORY = warehouse
+
+ _patch_distribution_metadata()
+
+ # Install Distribution throughout the distutils
+ for module in distutils.dist, distutils.core, distutils.cmd:
+ module.Distribution = setuptools.dist.Distribution
+
+ # Install the patched Extension
+ distutils.core.Extension = setuptools.extension.Extension
+ distutils.extension.Extension = setuptools.extension.Extension
+ if 'distutils.command.build_ext' in sys.modules:
+ sys.modules['distutils.command.build_ext'].Extension = (
+ setuptools.extension.Extension
+ )
+
+ patch_for_msvc_specialized_compiler()
+
+
+def _patch_distribution_metadata():
+ """Patch write_pkg_file and read_pkg_file for higher metadata standards"""
+ for attr in ('write_pkg_file', 'read_pkg_file', 'get_metadata_version'):
+ new_val = getattr(setuptools.dist, attr)
+ setattr(distutils.dist.DistributionMetadata, attr, new_val)
+
+
+def patch_func(replacement, target_mod, func_name):
+ """
+ Patch func_name in target_mod with replacement
+
+ Important - original must be resolved by name to avoid
+ patching an already patched function.
+ """
+ original = getattr(target_mod, func_name)
+
+ # set the 'unpatched' attribute on the replacement to
+ # point to the original.
+ vars(replacement).setdefault('unpatched', original)
+
+ # replace the function in the original module
+ setattr(target_mod, func_name, replacement)
+
+
+def get_unpatched_function(candidate):
+ return getattr(candidate, 'unpatched')
+
+
+def patch_for_msvc_specialized_compiler():
+ """
+ Patch functions in distutils to use standalone Microsoft Visual C++
+ compilers.
+ """
+ # import late to avoid circular imports on Python < 3.5
+ msvc = import_module('setuptools.msvc')
+
+ if platform.system() != 'Windows':
+ # Compilers only availables on Microsoft Windows
+ return
+
+ def patch_params(mod_name, func_name):
+ """
+ Prepare the parameters for patch_func to patch indicated function.
+ """
+ repl_prefix = 'msvc9_' if 'msvc9' in mod_name else 'msvc14_'
+ repl_name = repl_prefix + func_name.lstrip('_')
+ repl = getattr(msvc, repl_name)
+ mod = import_module(mod_name)
+ if not hasattr(mod, func_name):
+ raise ImportError(func_name)
+ return repl, mod, func_name
+
+ # Python 2.7 to 3.4
+ msvc9 = functools.partial(patch_params, 'distutils.msvc9compiler')
+
+ # Python 3.5+
+ msvc14 = functools.partial(patch_params, 'distutils._msvccompiler')
+
+ try:
+ # Patch distutils.msvc9compiler
+ patch_func(*msvc9('find_vcvarsall'))
+ patch_func(*msvc9('query_vcvarsall'))
+ except ImportError:
+ pass
+
+ try:
+ # Patch distutils._msvccompiler._get_vc_env
+ patch_func(*msvc14('_get_vc_env'))
+ except ImportError:
+ pass
+
+ try:
+ # Patch distutils._msvccompiler.gen_lib_options for Numpy
+ patch_func(*msvc14('gen_lib_options'))
+ except ImportError:
+ pass
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/msvc.py b/monEnvTP/lib/python3.8/site-packages/setuptools/msvc.py
new file mode 100644
index 0000000000000000000000000000000000000000..2ffe1c81ee629c98246e9e72bf630431fa7905b6
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/msvc.py
@@ -0,0 +1,1679 @@
+"""
+Improved support for Microsoft Visual C++ compilers.
+
+Known supported compilers:
+--------------------------
+Microsoft Visual C++ 9.0:
+ Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64)
+ Microsoft Windows SDK 6.1 (x86, x64, ia64)
+ Microsoft Windows SDK 7.0 (x86, x64, ia64)
+
+Microsoft Visual C++ 10.0:
+ Microsoft Windows SDK 7.1 (x86, x64, ia64)
+
+Microsoft Visual C++ 14.X:
+ Microsoft Visual C++ Build Tools 2015 (x86, x64, arm)
+ Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64)
+ Microsoft Visual Studio Build Tools 2019 (x86, x64, arm, arm64)
+
+This may also support compilers shipped with compatible Visual Studio versions.
+"""
+
+import json
+from io import open
+from os import listdir, pathsep
+from os.path import join, isfile, isdir, dirname
+import sys
+import platform
+import itertools
+import distutils.errors
+from setuptools.extern.packaging.version import LegacyVersion
+
+from setuptools.extern.six.moves import filterfalse
+
+from .monkey import get_unpatched
+
+if platform.system() == 'Windows':
+ from setuptools.extern.six.moves import winreg
+ from os import environ
+else:
+ # Mock winreg and environ so the module can be imported on this platform.
+
+ class winreg:
+ HKEY_USERS = None
+ HKEY_CURRENT_USER = None
+ HKEY_LOCAL_MACHINE = None
+ HKEY_CLASSES_ROOT = None
+
+ environ = dict()
+
+_msvc9_suppress_errors = (
+ # msvc9compiler isn't available on some platforms
+ ImportError,
+
+ # msvc9compiler raises DistutilsPlatformError in some
+ # environments. See #1118.
+ distutils.errors.DistutilsPlatformError,
+)
+
+try:
+ from distutils.msvc9compiler import Reg
+except _msvc9_suppress_errors:
+ pass
+
+
+def msvc9_find_vcvarsall(version):
+ """
+ Patched "distutils.msvc9compiler.find_vcvarsall" to use the standalone
+ compiler build for Python
+ (VCForPython / Microsoft Visual C++ Compiler for Python 2.7).
+
+ Fall back to original behavior when the standalone compiler is not
+ available.
+
+ Redirect the path of "vcvarsall.bat".
+
+ Parameters
+ ----------
+ version: float
+ Required Microsoft Visual C++ version.
+
+ Return
+ ------
+ str
+ vcvarsall.bat path
+ """
+ vc_base = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f'
+ key = vc_base % ('', version)
+ try:
+ # Per-user installs register the compiler path here
+ productdir = Reg.get_value(key, "installdir")
+ except KeyError:
+ try:
+ # All-user installs on a 64-bit system register here
+ key = vc_base % ('Wow6432Node\\', version)
+ productdir = Reg.get_value(key, "installdir")
+ except KeyError:
+ productdir = None
+
+ if productdir:
+ vcvarsall = join(productdir, "vcvarsall.bat")
+ if isfile(vcvarsall):
+ return vcvarsall
+
+ return get_unpatched(msvc9_find_vcvarsall)(version)
+
+
+def msvc9_query_vcvarsall(ver, arch='x86', *args, **kwargs):
+ """
+ Patched "distutils.msvc9compiler.query_vcvarsall" for support extra
+ Microsoft Visual C++ 9.0 and 10.0 compilers.
+
+ Set environment without use of "vcvarsall.bat".
+
+ Parameters
+ ----------
+ ver: float
+ Required Microsoft Visual C++ version.
+ arch: str
+ Target architecture.
+
+ Return
+ ------
+ dict
+ environment
+ """
+ # Try to get environment from vcvarsall.bat (Classical way)
+ try:
+ orig = get_unpatched(msvc9_query_vcvarsall)
+ return orig(ver, arch, *args, **kwargs)
+ except distutils.errors.DistutilsPlatformError:
+ # Pass error if Vcvarsall.bat is missing
+ pass
+ except ValueError:
+ # Pass error if environment not set after executing vcvarsall.bat
+ pass
+
+ # If error, try to set environment directly
+ try:
+ return EnvironmentInfo(arch, ver).return_env()
+ except distutils.errors.DistutilsPlatformError as exc:
+ _augment_exception(exc, ver, arch)
+ raise
+
+
+def msvc14_get_vc_env(plat_spec):
+ """
+ Patched "distutils._msvccompiler._get_vc_env" for support extra
+ Microsoft Visual C++ 14.X compilers.
+
+ Set environment without use of "vcvarsall.bat".
+
+ Parameters
+ ----------
+ plat_spec: str
+ Target architecture.
+
+ Return
+ ------
+ dict
+ environment
+ """
+ # Try to get environment from vcvarsall.bat (Classical way)
+ try:
+ return get_unpatched(msvc14_get_vc_env)(plat_spec)
+ except distutils.errors.DistutilsPlatformError:
+ # Pass error Vcvarsall.bat is missing
+ pass
+
+ # If error, try to set environment directly
+ try:
+ return EnvironmentInfo(plat_spec, vc_min_ver=14.0).return_env()
+ except distutils.errors.DistutilsPlatformError as exc:
+ _augment_exception(exc, 14.0)
+ raise
+
+
+def msvc14_gen_lib_options(*args, **kwargs):
+ """
+ Patched "distutils._msvccompiler.gen_lib_options" for fix
+ compatibility between "numpy.distutils" and "distutils._msvccompiler"
+ (for Numpy < 1.11.2)
+ """
+ if "numpy.distutils" in sys.modules:
+ import numpy as np
+ if LegacyVersion(np.__version__) < LegacyVersion('1.11.2'):
+ return np.distutils.ccompiler.gen_lib_options(*args, **kwargs)
+ return get_unpatched(msvc14_gen_lib_options)(*args, **kwargs)
+
+
+def _augment_exception(exc, version, arch=''):
+ """
+ Add details to the exception message to help guide the user
+ as to what action will resolve it.
+ """
+ # Error if MSVC++ directory not found or environment not set
+ message = exc.args[0]
+
+ if "vcvarsall" in message.lower() or "visual c" in message.lower():
+ # Special error message if MSVC++ not installed
+ tmpl = 'Microsoft Visual C++ {version:0.1f} is required.'
+ message = tmpl.format(**locals())
+ msdownload = 'www.microsoft.com/download/details.aspx?id=%d'
+ if version == 9.0:
+ if arch.lower().find('ia64') > -1:
+ # For VC++ 9.0, if IA64 support is needed, redirect user
+ # to Windows SDK 7.0.
+ # Note: No download link available from Microsoft.
+ message += ' Get it with "Microsoft Windows SDK 7.0"'
+ else:
+ # For VC++ 9.0 redirect user to Vc++ for Python 2.7 :
+ # This redirection link is maintained by Microsoft.
+ # Contact vspython@microsoft.com if it needs updating.
+ message += ' Get it from http://aka.ms/vcpython27'
+ elif version == 10.0:
+ # For VC++ 10.0 Redirect user to Windows SDK 7.1
+ message += ' Get it with "Microsoft Windows SDK 7.1": '
+ message += msdownload % 8279
+ elif version >= 14.0:
+ # For VC++ 14.X Redirect user to latest Visual C++ Build Tools
+ message += (' Get it with "Build Tools for Visual Studio": '
+ r'https://visualstudio.microsoft.com/downloads/')
+
+ exc.args = (message, )
+
+
+class PlatformInfo:
+ """
+ Current and Target Architectures information.
+
+ Parameters
+ ----------
+ arch: str
+ Target architecture.
+ """
+ current_cpu = environ.get('processor_architecture', '').lower()
+
+ def __init__(self, arch):
+ self.arch = arch.lower().replace('x64', 'amd64')
+
+ @property
+ def target_cpu(self):
+ """
+ Return Target CPU architecture.
+
+ Return
+ ------
+ str
+ Target CPU
+ """
+ return self.arch[self.arch.find('_') + 1:]
+
+ def target_is_x86(self):
+ """
+ Return True if target CPU is x86 32 bits..
+
+ Return
+ ------
+ bool
+ CPU is x86 32 bits
+ """
+ return self.target_cpu == 'x86'
+
+ def current_is_x86(self):
+ """
+ Return True if current CPU is x86 32 bits..
+
+ Return
+ ------
+ bool
+ CPU is x86 32 bits
+ """
+ return self.current_cpu == 'x86'
+
+ def current_dir(self, hidex86=False, x64=False):
+ """
+ Current platform specific subfolder.
+
+ Parameters
+ ----------
+ hidex86: bool
+ return '' and not '\x86' if architecture is x86.
+ x64: bool
+ return '\x64' and not '\amd64' if architecture is amd64.
+
+ Return
+ ------
+ str
+ subfolder: '\target', or '' (see hidex86 parameter)
+ """
+ return (
+ '' if (self.current_cpu == 'x86' and hidex86) else
+ r'\x64' if (self.current_cpu == 'amd64' and x64) else
+ r'\%s' % self.current_cpu
+ )
+
+ def target_dir(self, hidex86=False, x64=False):
+ r"""
+ Target platform specific subfolder.
+
+ Parameters
+ ----------
+ hidex86: bool
+ return '' and not '\x86' if architecture is x86.
+ x64: bool
+ return '\x64' and not '\amd64' if architecture is amd64.
+
+ Return
+ ------
+ str
+ subfolder: '\current', or '' (see hidex86 parameter)
+ """
+ return (
+ '' if (self.target_cpu == 'x86' and hidex86) else
+ r'\x64' if (self.target_cpu == 'amd64' and x64) else
+ r'\%s' % self.target_cpu
+ )
+
+ def cross_dir(self, forcex86=False):
+ r"""
+ Cross platform specific subfolder.
+
+ Parameters
+ ----------
+ forcex86: bool
+ Use 'x86' as current architecture even if current architecture is
+ not x86.
+
+ Return
+ ------
+ str
+ subfolder: '' if target architecture is current architecture,
+ '\current_target' if not.
+ """
+ current = 'x86' if forcex86 else self.current_cpu
+ return (
+ '' if self.target_cpu == current else
+ self.target_dir().replace('\\', '\\%s_' % current)
+ )
+
+
+class RegistryInfo:
+ """
+ Microsoft Visual Studio related registry information.
+
+ Parameters
+ ----------
+ platform_info: PlatformInfo
+ "PlatformInfo" instance.
+ """
+ HKEYS = (winreg.HKEY_USERS,
+ winreg.HKEY_CURRENT_USER,
+ winreg.HKEY_LOCAL_MACHINE,
+ winreg.HKEY_CLASSES_ROOT)
+
+ def __init__(self, platform_info):
+ self.pi = platform_info
+
+ @property
+ def visualstudio(self):
+ """
+ Microsoft Visual Studio root registry key.
+
+ Return
+ ------
+ str
+ Registry key
+ """
+ return 'VisualStudio'
+
+ @property
+ def sxs(self):
+ """
+ Microsoft Visual Studio SxS registry key.
+
+ Return
+ ------
+ str
+ Registry key
+ """
+ return join(self.visualstudio, 'SxS')
+
+ @property
+ def vc(self):
+ """
+ Microsoft Visual C++ VC7 registry key.
+
+ Return
+ ------
+ str
+ Registry key
+ """
+ return join(self.sxs, 'VC7')
+
+ @property
+ def vs(self):
+ """
+ Microsoft Visual Studio VS7 registry key.
+
+ Return
+ ------
+ str
+ Registry key
+ """
+ return join(self.sxs, 'VS7')
+
+ @property
+ def vc_for_python(self):
+ """
+ Microsoft Visual C++ for Python registry key.
+
+ Return
+ ------
+ str
+ Registry key
+ """
+ return r'DevDiv\VCForPython'
+
+ @property
+ def microsoft_sdk(self):
+ """
+ Microsoft SDK registry key.
+
+ Return
+ ------
+ str
+ Registry key
+ """
+ return 'Microsoft SDKs'
+
+ @property
+ def windows_sdk(self):
+ """
+ Microsoft Windows/Platform SDK registry key.
+
+ Return
+ ------
+ str
+ Registry key
+ """
+ return join(self.microsoft_sdk, 'Windows')
+
+ @property
+ def netfx_sdk(self):
+ """
+ Microsoft .NET Framework SDK registry key.
+
+ Return
+ ------
+ str
+ Registry key
+ """
+ return join(self.microsoft_sdk, 'NETFXSDK')
+
+ @property
+ def windows_kits_roots(self):
+ """
+ Microsoft Windows Kits Roots registry key.
+
+ Return
+ ------
+ str
+ Registry key
+ """
+ return r'Windows Kits\Installed Roots'
+
+ def microsoft(self, key, x86=False):
+ """
+ Return key in Microsoft software registry.
+
+ Parameters
+ ----------
+ key: str
+ Registry key path where look.
+ x86: str
+ Force x86 software registry.
+
+ Return
+ ------
+ str
+ Registry key
+ """
+ node64 = '' if self.pi.current_is_x86() or x86 else 'Wow6432Node'
+ return join('Software', node64, 'Microsoft', key)
+
+ def lookup(self, key, name):
+ """
+ Look for values in registry in Microsoft software registry.
+
+ Parameters
+ ----------
+ key: str
+ Registry key path where look.
+ name: str
+ Value name to find.
+
+ Return
+ ------
+ str
+ value
+ """
+ key_read = winreg.KEY_READ
+ openkey = winreg.OpenKey
+ ms = self.microsoft
+ for hkey in self.HKEYS:
+ try:
+ bkey = openkey(hkey, ms(key), 0, key_read)
+ except (OSError, IOError):
+ if not self.pi.current_is_x86():
+ try:
+ bkey = openkey(hkey, ms(key, True), 0, key_read)
+ except (OSError, IOError):
+ continue
+ else:
+ continue
+ try:
+ return winreg.QueryValueEx(bkey, name)[0]
+ except (OSError, IOError):
+ pass
+
+
+class SystemInfo:
+ """
+ Microsoft Windows and Visual Studio related system information.
+
+ Parameters
+ ----------
+ registry_info: RegistryInfo
+ "RegistryInfo" instance.
+ vc_ver: float
+ Required Microsoft Visual C++ version.
+ """
+
+ # Variables and properties in this class use originals CamelCase variables
+ # names from Microsoft source files for more easy comparison.
+ WinDir = environ.get('WinDir', '')
+ ProgramFiles = environ.get('ProgramFiles', '')
+ ProgramFilesx86 = environ.get('ProgramFiles(x86)', ProgramFiles)
+
+ def __init__(self, registry_info, vc_ver=None):
+ self.ri = registry_info
+ self.pi = self.ri.pi
+
+ self.known_vs_paths = self.find_programdata_vs_vers()
+
+ # Except for VS15+, VC version is aligned with VS version
+ self.vs_ver = self.vc_ver = (
+ vc_ver or self._find_latest_available_vs_ver())
+
+ def _find_latest_available_vs_ver(self):
+ """
+ Find the latest VC version
+
+ Return
+ ------
+ float
+ version
+ """
+ reg_vc_vers = self.find_reg_vs_vers()
+
+ if not (reg_vc_vers or self.known_vs_paths):
+ raise distutils.errors.DistutilsPlatformError(
+ 'No Microsoft Visual C++ version found')
+
+ vc_vers = set(reg_vc_vers)
+ vc_vers.update(self.known_vs_paths)
+ return sorted(vc_vers)[-1]
+
+ def find_reg_vs_vers(self):
+ """
+ Find Microsoft Visual Studio versions available in registry.
+
+ Return
+ ------
+ list of float
+ Versions
+ """
+ ms = self.ri.microsoft
+ vckeys = (self.ri.vc, self.ri.vc_for_python, self.ri.vs)
+ vs_vers = []
+ for hkey in self.ri.HKEYS:
+ for key in vckeys:
+ try:
+ bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ)
+ except (OSError, IOError):
+ continue
+ subkeys, values, _ = winreg.QueryInfoKey(bkey)
+ for i in range(values):
+ try:
+ ver = float(winreg.EnumValue(bkey, i)[0])
+ if ver not in vs_vers:
+ vs_vers.append(ver)
+ except ValueError:
+ pass
+ for i in range(subkeys):
+ try:
+ ver = float(winreg.EnumKey(bkey, i))
+ if ver not in vs_vers:
+ vs_vers.append(ver)
+ except ValueError:
+ pass
+ return sorted(vs_vers)
+
+ def find_programdata_vs_vers(self):
+ r"""
+ Find Visual studio 2017+ versions from information in
+ "C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances".
+
+ Return
+ ------
+ dict
+ float version as key, path as value.
+ """
+ vs_versions = {}
+ instances_dir = \
+ r'C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances'
+
+ try:
+ hashed_names = listdir(instances_dir)
+
+ except (OSError, IOError):
+ # Directory not exists with all Visual Studio versions
+ return vs_versions
+
+ for name in hashed_names:
+ try:
+ # Get VS installation path from "state.json" file
+ state_path = join(instances_dir, name, 'state.json')
+ with open(state_path, 'rt', encoding='utf-8') as state_file:
+ state = json.load(state_file)
+ vs_path = state['installationPath']
+
+ # Raises OSError if this VS installation does not contain VC
+ listdir(join(vs_path, r'VC\Tools\MSVC'))
+
+ # Store version and path
+ vs_versions[self._as_float_version(
+ state['installationVersion'])] = vs_path
+
+ except (OSError, IOError, KeyError):
+ # Skip if "state.json" file is missing or bad format
+ continue
+
+ return vs_versions
+
+ @staticmethod
+ def _as_float_version(version):
+ """
+ Return a string version as a simplified float version (major.minor)
+
+ Parameters
+ ----------
+ version: str
+ Version.
+
+ Return
+ ------
+ float
+ version
+ """
+ return float('.'.join(version.split('.')[:2]))
+
+ @property
+ def VSInstallDir(self):
+ """
+ Microsoft Visual Studio directory.
+
+ Return
+ ------
+ str
+ path
+ """
+ # Default path
+ default = join(self.ProgramFilesx86,
+ 'Microsoft Visual Studio %0.1f' % self.vs_ver)
+
+ # Try to get path from registry, if fail use default path
+ return self.ri.lookup(self.ri.vs, '%0.1f' % self.vs_ver) or default
+
+ @property
+ def VCInstallDir(self):
+ """
+ Microsoft Visual C++ directory.
+
+ Return
+ ------
+ str
+ path
+ """
+ path = self._guess_vc() or self._guess_vc_legacy()
+
+ if not isdir(path):
+ msg = 'Microsoft Visual C++ directory not found'
+ raise distutils.errors.DistutilsPlatformError(msg)
+
+ return path
+
+ def _guess_vc(self):
+ """
+ Locate Visual C++ for VS2017+.
+
+ Return
+ ------
+ str
+ path
+ """
+ if self.vs_ver <= 14.0:
+ return ''
+
+ try:
+ # First search in known VS paths
+ vs_dir = self.known_vs_paths[self.vs_ver]
+ except KeyError:
+ # Else, search with path from registry
+ vs_dir = self.VSInstallDir
+
+ guess_vc = join(vs_dir, r'VC\Tools\MSVC')
+
+ # Subdir with VC exact version as name
+ try:
+ # Update the VC version with real one instead of VS version
+ vc_ver = listdir(guess_vc)[-1]
+ self.vc_ver = self._as_float_version(vc_ver)
+ return join(guess_vc, vc_ver)
+ except (OSError, IOError, IndexError):
+ return ''
+
+ def _guess_vc_legacy(self):
+ """
+ Locate Visual C++ for versions prior to 2017.
+
+ Return
+ ------
+ str
+ path
+ """
+ default = join(self.ProgramFilesx86,
+ r'Microsoft Visual Studio %0.1f\VC' % self.vs_ver)
+
+ # Try to get "VC++ for Python" path from registry as default path
+ reg_path = join(self.ri.vc_for_python, '%0.1f' % self.vs_ver)
+ python_vc = self.ri.lookup(reg_path, 'installdir')
+ default_vc = join(python_vc, 'VC') if python_vc else default
+
+ # Try to get path from registry, if fail use default path
+ return self.ri.lookup(self.ri.vc, '%0.1f' % self.vs_ver) or default_vc
+
+ @property
+ def WindowsSdkVersion(self):
+ """
+ Microsoft Windows SDK versions for specified MSVC++ version.
+
+ Return
+ ------
+ tuple of str
+ versions
+ """
+ if self.vs_ver <= 9.0:
+ return '7.0', '6.1', '6.0a'
+ elif self.vs_ver == 10.0:
+ return '7.1', '7.0a'
+ elif self.vs_ver == 11.0:
+ return '8.0', '8.0a'
+ elif self.vs_ver == 12.0:
+ return '8.1', '8.1a'
+ elif self.vs_ver >= 14.0:
+ return '10.0', '8.1'
+
+ @property
+ def WindowsSdkLastVersion(self):
+ """
+ Microsoft Windows SDK last version.
+
+ Return
+ ------
+ str
+ version
+ """
+ return self._use_last_dir_name(join(self.WindowsSdkDir, 'lib'))
+
+ @property
+ def WindowsSdkDir(self):
+ """
+ Microsoft Windows SDK directory.
+
+ Return
+ ------
+ str
+ path
+ """
+ sdkdir = ''
+ for ver in self.WindowsSdkVersion:
+ # Try to get it from registry
+ loc = join(self.ri.windows_sdk, 'v%s' % ver)
+ sdkdir = self.ri.lookup(loc, 'installationfolder')
+ if sdkdir:
+ break
+ if not sdkdir or not isdir(sdkdir):
+ # Try to get "VC++ for Python" version from registry
+ path = join(self.ri.vc_for_python, '%0.1f' % self.vc_ver)
+ install_base = self.ri.lookup(path, 'installdir')
+ if install_base:
+ sdkdir = join(install_base, 'WinSDK')
+ if not sdkdir or not isdir(sdkdir):
+ # If fail, use default new path
+ for ver in self.WindowsSdkVersion:
+ intver = ver[:ver.rfind('.')]
+ path = r'Microsoft SDKs\Windows Kits\%s' % intver
+ d = join(self.ProgramFiles, path)
+ if isdir(d):
+ sdkdir = d
+ if not sdkdir or not isdir(sdkdir):
+ # If fail, use default old path
+ for ver in self.WindowsSdkVersion:
+ path = r'Microsoft SDKs\Windows\v%s' % ver
+ d = join(self.ProgramFiles, path)
+ if isdir(d):
+ sdkdir = d
+ if not sdkdir:
+ # If fail, use Platform SDK
+ sdkdir = join(self.VCInstallDir, 'PlatformSDK')
+ return sdkdir
+
+ @property
+ def WindowsSDKExecutablePath(self):
+ """
+ Microsoft Windows SDK executable directory.
+
+ Return
+ ------
+ str
+ path
+ """
+ # Find WinSDK NetFx Tools registry dir name
+ if self.vs_ver <= 11.0:
+ netfxver = 35
+ arch = ''
+ else:
+ netfxver = 40
+ hidex86 = True if self.vs_ver <= 12.0 else False
+ arch = self.pi.current_dir(x64=True, hidex86=hidex86)
+ fx = 'WinSDK-NetFx%dTools%s' % (netfxver, arch.replace('\\', '-'))
+
+ # list all possibles registry paths
+ regpaths = []
+ if self.vs_ver >= 14.0:
+ for ver in self.NetFxSdkVersion:
+ regpaths += [join(self.ri.netfx_sdk, ver, fx)]
+
+ for ver in self.WindowsSdkVersion:
+ regpaths += [join(self.ri.windows_sdk, 'v%sA' % ver, fx)]
+
+ # Return installation folder from the more recent path
+ for path in regpaths:
+ execpath = self.ri.lookup(path, 'installationfolder')
+ if execpath:
+ return execpath
+
+ @property
+ def FSharpInstallDir(self):
+ """
+ Microsoft Visual F# directory.
+
+ Return
+ ------
+ str
+ path
+ """
+ path = join(self.ri.visualstudio, r'%0.1f\Setup\F#' % self.vs_ver)
+ return self.ri.lookup(path, 'productdir') or ''
+
+ @property
+ def UniversalCRTSdkDir(self):
+ """
+ Microsoft Universal CRT SDK directory.
+
+ Return
+ ------
+ str
+ path
+ """
+ # Set Kit Roots versions for specified MSVC++ version
+ vers = ('10', '81') if self.vs_ver >= 14.0 else ()
+
+ # Find path of the more recent Kit
+ for ver in vers:
+ sdkdir = self.ri.lookup(self.ri.windows_kits_roots,
+ 'kitsroot%s' % ver)
+ if sdkdir:
+ return sdkdir or ''
+
+ @property
+ def UniversalCRTSdkLastVersion(self):
+ """
+ Microsoft Universal C Runtime SDK last version.
+
+ Return
+ ------
+ str
+ version
+ """
+ return self._use_last_dir_name(join(self.UniversalCRTSdkDir, 'lib'))
+
+ @property
+ def NetFxSdkVersion(self):
+ """
+ Microsoft .NET Framework SDK versions.
+
+ Return
+ ------
+ tuple of str
+ versions
+ """
+ # Set FxSdk versions for specified VS version
+ return (('4.7.2', '4.7.1', '4.7',
+ '4.6.2', '4.6.1', '4.6',
+ '4.5.2', '4.5.1', '4.5')
+ if self.vs_ver >= 14.0 else ())
+
+ @property
+ def NetFxSdkDir(self):
+ """
+ Microsoft .NET Framework SDK directory.
+
+ Return
+ ------
+ str
+ path
+ """
+ sdkdir = ''
+ for ver in self.NetFxSdkVersion:
+ loc = join(self.ri.netfx_sdk, ver)
+ sdkdir = self.ri.lookup(loc, 'kitsinstallationfolder')
+ if sdkdir:
+ break
+ return sdkdir
+
+ @property
+ def FrameworkDir32(self):
+ """
+ Microsoft .NET Framework 32bit directory.
+
+ Return
+ ------
+ str
+ path
+ """
+ # Default path
+ guess_fw = join(self.WinDir, r'Microsoft.NET\Framework')
+
+ # Try to get path from registry, if fail use default path
+ return self.ri.lookup(self.ri.vc, 'frameworkdir32') or guess_fw
+
+ @property
+ def FrameworkDir64(self):
+ """
+ Microsoft .NET Framework 64bit directory.
+
+ Return
+ ------
+ str
+ path
+ """
+ # Default path
+ guess_fw = join(self.WinDir, r'Microsoft.NET\Framework64')
+
+ # Try to get path from registry, if fail use default path
+ return self.ri.lookup(self.ri.vc, 'frameworkdir64') or guess_fw
+
+ @property
+ def FrameworkVersion32(self):
+ """
+ Microsoft .NET Framework 32bit versions.
+
+ Return
+ ------
+ tuple of str
+ versions
+ """
+ return self._find_dot_net_versions(32)
+
+ @property
+ def FrameworkVersion64(self):
+ """
+ Microsoft .NET Framework 64bit versions.
+
+ Return
+ ------
+ tuple of str
+ versions
+ """
+ return self._find_dot_net_versions(64)
+
+ def _find_dot_net_versions(self, bits):
+ """
+ Find Microsoft .NET Framework versions.
+
+ Parameters
+ ----------
+ bits: int
+ Platform number of bits: 32 or 64.
+
+ Return
+ ------
+ tuple of str
+ versions
+ """
+ # Find actual .NET version in registry
+ reg_ver = self.ri.lookup(self.ri.vc, 'frameworkver%d' % bits)
+ dot_net_dir = getattr(self, 'FrameworkDir%d' % bits)
+ ver = reg_ver or self._use_last_dir_name(dot_net_dir, 'v') or ''
+
+ # Set .NET versions for specified MSVC++ version
+ if self.vs_ver >= 12.0:
+ return ver, 'v4.0'
+ elif self.vs_ver >= 10.0:
+ return 'v4.0.30319' if ver.lower()[:2] != 'v4' else ver, 'v3.5'
+ elif self.vs_ver == 9.0:
+ return 'v3.5', 'v2.0.50727'
+ elif self.vs_ver == 8.0:
+ return 'v3.0', 'v2.0.50727'
+
+ @staticmethod
+ def _use_last_dir_name(path, prefix=''):
+ """
+ Return name of the last dir in path or '' if no dir found.
+
+ Parameters
+ ----------
+ path: str
+ Use dirs in this path
+ prefix: str
+ Use only dirs starting by this prefix
+
+ Return
+ ------
+ str
+ name
+ """
+ matching_dirs = (
+ dir_name
+ for dir_name in reversed(listdir(path))
+ if isdir(join(path, dir_name)) and
+ dir_name.startswith(prefix)
+ )
+ return next(matching_dirs, None) or ''
+
+
+class EnvironmentInfo:
+ """
+ Return environment variables for specified Microsoft Visual C++ version
+ and platform : Lib, Include, Path and libpath.
+
+ This function is compatible with Microsoft Visual C++ 9.0 to 14.X.
+
+ Script created by analysing Microsoft environment configuration files like
+ "vcvars[...].bat", "SetEnv.Cmd", "vcbuildtools.bat", ...
+
+ Parameters
+ ----------
+ arch: str
+ Target architecture.
+ vc_ver: float
+ Required Microsoft Visual C++ version. If not set, autodetect the last
+ version.
+ vc_min_ver: float
+ Minimum Microsoft Visual C++ version.
+ """
+
+ # Variables and properties in this class use originals CamelCase variables
+ # names from Microsoft source files for more easy comparison.
+
+ def __init__(self, arch, vc_ver=None, vc_min_ver=0):
+ self.pi = PlatformInfo(arch)
+ self.ri = RegistryInfo(self.pi)
+ self.si = SystemInfo(self.ri, vc_ver)
+
+ if self.vc_ver < vc_min_ver:
+ err = 'No suitable Microsoft Visual C++ version found'
+ raise distutils.errors.DistutilsPlatformError(err)
+
+ @property
+ def vs_ver(self):
+ """
+ Microsoft Visual Studio.
+
+ Return
+ ------
+ float
+ version
+ """
+ return self.si.vs_ver
+
+ @property
+ def vc_ver(self):
+ """
+ Microsoft Visual C++ version.
+
+ Return
+ ------
+ float
+ version
+ """
+ return self.si.vc_ver
+
+ @property
+ def VSTools(self):
+ """
+ Microsoft Visual Studio Tools.
+
+ Return
+ ------
+ list of str
+ paths
+ """
+ paths = [r'Common7\IDE', r'Common7\Tools']
+
+ if self.vs_ver >= 14.0:
+ arch_subdir = self.pi.current_dir(hidex86=True, x64=True)
+ paths += [r'Common7\IDE\CommonExtensions\Microsoft\TestWindow']
+ paths += [r'Team Tools\Performance Tools']
+ paths += [r'Team Tools\Performance Tools%s' % arch_subdir]
+
+ return [join(self.si.VSInstallDir, path) for path in paths]
+
+ @property
+ def VCIncludes(self):
+ """
+ Microsoft Visual C++ & Microsoft Foundation Class Includes.
+
+ Return
+ ------
+ list of str
+ paths
+ """
+ return [join(self.si.VCInstallDir, 'Include'),
+ join(self.si.VCInstallDir, r'ATLMFC\Include')]
+
+ @property
+ def VCLibraries(self):
+ """
+ Microsoft Visual C++ & Microsoft Foundation Class Libraries.
+
+ Return
+ ------
+ list of str
+ paths
+ """
+ if self.vs_ver >= 15.0:
+ arch_subdir = self.pi.target_dir(x64=True)
+ else:
+ arch_subdir = self.pi.target_dir(hidex86=True)
+ paths = ['Lib%s' % arch_subdir, r'ATLMFC\Lib%s' % arch_subdir]
+
+ if self.vs_ver >= 14.0:
+ paths += [r'Lib\store%s' % arch_subdir]
+
+ return [join(self.si.VCInstallDir, path) for path in paths]
+
+ @property
+ def VCStoreRefs(self):
+ """
+ Microsoft Visual C++ store references Libraries.
+
+ Return
+ ------
+ list of str
+ paths
+ """
+ if self.vs_ver < 14.0:
+ return []
+ return [join(self.si.VCInstallDir, r'Lib\store\references')]
+
+ @property
+ def VCTools(self):
+ """
+ Microsoft Visual C++ Tools.
+
+ Return
+ ------
+ list of str
+ paths
+ """
+ si = self.si
+ tools = [join(si.VCInstallDir, 'VCPackages')]
+
+ forcex86 = True if self.vs_ver <= 10.0 else False
+ arch_subdir = self.pi.cross_dir(forcex86)
+ if arch_subdir:
+ tools += [join(si.VCInstallDir, 'Bin%s' % arch_subdir)]
+
+ if self.vs_ver == 14.0:
+ path = 'Bin%s' % self.pi.current_dir(hidex86=True)
+ tools += [join(si.VCInstallDir, path)]
+
+ elif self.vs_ver >= 15.0:
+ host_dir = (r'bin\HostX86%s' if self.pi.current_is_x86() else
+ r'bin\HostX64%s')
+ tools += [join(
+ si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))]
+
+ if self.pi.current_cpu != self.pi.target_cpu:
+ tools += [join(
+ si.VCInstallDir, host_dir % self.pi.current_dir(x64=True))]
+
+ else:
+ tools += [join(si.VCInstallDir, 'Bin')]
+
+ return tools
+
+ @property
+ def OSLibraries(self):
+ """
+ Microsoft Windows SDK Libraries.
+
+ Return
+ ------
+ list of str
+ paths
+ """
+ if self.vs_ver <= 10.0:
+ arch_subdir = self.pi.target_dir(hidex86=True, x64=True)
+ return [join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)]
+
+ else:
+ arch_subdir = self.pi.target_dir(x64=True)
+ lib = join(self.si.WindowsSdkDir, 'lib')
+ libver = self._sdk_subdir
+ return [join(lib, '%sum%s' % (libver , arch_subdir))]
+
+ @property
+ def OSIncludes(self):
+ """
+ Microsoft Windows SDK Include.
+
+ Return
+ ------
+ list of str
+ paths
+ """
+ include = join(self.si.WindowsSdkDir, 'include')
+
+ if self.vs_ver <= 10.0:
+ return [include, join(include, 'gl')]
+
+ else:
+ if self.vs_ver >= 14.0:
+ sdkver = self._sdk_subdir
+ else:
+ sdkver = ''
+ return [join(include, '%sshared' % sdkver),
+ join(include, '%sum' % sdkver),
+ join(include, '%swinrt' % sdkver)]
+
+ @property
+ def OSLibpath(self):
+ """
+ Microsoft Windows SDK Libraries Paths.
+
+ Return
+ ------
+ list of str
+ paths
+ """
+ ref = join(self.si.WindowsSdkDir, 'References')
+ libpath = []
+
+ if self.vs_ver <= 9.0:
+ libpath += self.OSLibraries
+
+ if self.vs_ver >= 11.0:
+ libpath += [join(ref, r'CommonConfiguration\Neutral')]
+
+ if self.vs_ver >= 14.0:
+ libpath += [
+ ref,
+ join(self.si.WindowsSdkDir, 'UnionMetadata'),
+ join(ref, 'Windows.Foundation.UniversalApiContract', '1.0.0.0'),
+ join(ref, 'Windows.Foundation.FoundationContract', '1.0.0.0'),
+ join(ref,'Windows.Networking.Connectivity.WwanContract',
+ '1.0.0.0'),
+ join(self.si.WindowsSdkDir, 'ExtensionSDKs', 'Microsoft.VCLibs',
+ '%0.1f' % self.vs_ver, 'References', 'CommonConfiguration',
+ 'neutral'),
+ ]
+ return libpath
+
+ @property
+ def SdkTools(self):
+ """
+ Microsoft Windows SDK Tools.
+
+ Return
+ ------
+ list of str
+ paths
+ """
+ return list(self._sdk_tools())
+
+ def _sdk_tools(self):
+ """
+ Microsoft Windows SDK Tools paths generator.
+
+ Return
+ ------
+ generator of str
+ paths
+ """
+ if self.vs_ver < 15.0:
+ bin_dir = 'Bin' if self.vs_ver <= 11.0 else r'Bin\x86'
+ yield join(self.si.WindowsSdkDir, bin_dir)
+
+ if not self.pi.current_is_x86():
+ arch_subdir = self.pi.current_dir(x64=True)
+ path = 'Bin%s' % arch_subdir
+ yield join(self.si.WindowsSdkDir, path)
+
+ if self.vs_ver in (10.0, 11.0):
+ if self.pi.target_is_x86():
+ arch_subdir = ''
+ else:
+ arch_subdir = self.pi.current_dir(hidex86=True, x64=True)
+ path = r'Bin\NETFX 4.0 Tools%s' % arch_subdir
+ yield join(self.si.WindowsSdkDir, path)
+
+ elif self.vs_ver >= 15.0:
+ path = join(self.si.WindowsSdkDir, 'Bin')
+ arch_subdir = self.pi.current_dir(x64=True)
+ sdkver = self.si.WindowsSdkLastVersion
+ yield join(path, '%s%s' % (sdkver, arch_subdir))
+
+ if self.si.WindowsSDKExecutablePath:
+ yield self.si.WindowsSDKExecutablePath
+
+ @property
+ def _sdk_subdir(self):
+ """
+ Microsoft Windows SDK version subdir.
+
+ Return
+ ------
+ str
+ subdir
+ """
+ ucrtver = self.si.WindowsSdkLastVersion
+ return ('%s\\' % ucrtver) if ucrtver else ''
+
+ @property
+ def SdkSetup(self):
+ """
+ Microsoft Windows SDK Setup.
+
+ Return
+ ------
+ list of str
+ paths
+ """
+ if self.vs_ver > 9.0:
+ return []
+
+ return [join(self.si.WindowsSdkDir, 'Setup')]
+
+ @property
+ def FxTools(self):
+ """
+ Microsoft .NET Framework Tools.
+
+ Return
+ ------
+ list of str
+ paths
+ """
+ pi = self.pi
+ si = self.si
+
+ if self.vs_ver <= 10.0:
+ include32 = True
+ include64 = not pi.target_is_x86() and not pi.current_is_x86()
+ else:
+ include32 = pi.target_is_x86() or pi.current_is_x86()
+ include64 = pi.current_cpu == 'amd64' or pi.target_cpu == 'amd64'
+
+ tools = []
+ if include32:
+ tools += [join(si.FrameworkDir32, ver)
+ for ver in si.FrameworkVersion32]
+ if include64:
+ tools += [join(si.FrameworkDir64, ver)
+ for ver in si.FrameworkVersion64]
+ return tools
+
+ @property
+ def NetFxSDKLibraries(self):
+ """
+ Microsoft .Net Framework SDK Libraries.
+
+ Return
+ ------
+ list of str
+ paths
+ """
+ if self.vs_ver < 14.0 or not self.si.NetFxSdkDir:
+ return []
+
+ arch_subdir = self.pi.target_dir(x64=True)
+ return [join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)]
+
+ @property
+ def NetFxSDKIncludes(self):
+ """
+ Microsoft .Net Framework SDK Includes.
+
+ Return
+ ------
+ list of str
+ paths
+ """
+ if self.vs_ver < 14.0 or not self.si.NetFxSdkDir:
+ return []
+
+ return [join(self.si.NetFxSdkDir, r'include\um')]
+
+ @property
+ def VsTDb(self):
+ """
+ Microsoft Visual Studio Team System Database.
+
+ Return
+ ------
+ list of str
+ paths
+ """
+ return [join(self.si.VSInstallDir, r'VSTSDB\Deploy')]
+
+ @property
+ def MSBuild(self):
+ """
+ Microsoft Build Engine.
+
+ Return
+ ------
+ list of str
+ paths
+ """
+ if self.vs_ver < 12.0:
+ return []
+ elif self.vs_ver < 15.0:
+ base_path = self.si.ProgramFilesx86
+ arch_subdir = self.pi.current_dir(hidex86=True)
+ else:
+ base_path = self.si.VSInstallDir
+ arch_subdir = ''
+
+ path = r'MSBuild\%0.1f\bin%s' % (self.vs_ver, arch_subdir)
+ build = [join(base_path, path)]
+
+ if self.vs_ver >= 15.0:
+ # Add Roslyn C# & Visual Basic Compiler
+ build += [join(base_path, path, 'Roslyn')]
+
+ return build
+
+ @property
+ def HTMLHelpWorkshop(self):
+ """
+ Microsoft HTML Help Workshop.
+
+ Return
+ ------
+ list of str
+ paths
+ """
+ if self.vs_ver < 11.0:
+ return []
+
+ return [join(self.si.ProgramFilesx86, 'HTML Help Workshop')]
+
+ @property
+ def UCRTLibraries(self):
+ """
+ Microsoft Universal C Runtime SDK Libraries.
+
+ Return
+ ------
+ list of str
+ paths
+ """
+ if self.vs_ver < 14.0:
+ return []
+
+ arch_subdir = self.pi.target_dir(x64=True)
+ lib = join(self.si.UniversalCRTSdkDir, 'lib')
+ ucrtver = self._ucrt_subdir
+ return [join(lib, '%sucrt%s' % (ucrtver, arch_subdir))]
+
+ @property
+ def UCRTIncludes(self):
+ """
+ Microsoft Universal C Runtime SDK Include.
+
+ Return
+ ------
+ list of str
+ paths
+ """
+ if self.vs_ver < 14.0:
+ return []
+
+ include = join(self.si.UniversalCRTSdkDir, 'include')
+ return [join(include, '%sucrt' % self._ucrt_subdir)]
+
+ @property
+ def _ucrt_subdir(self):
+ """
+ Microsoft Universal C Runtime SDK version subdir.
+
+ Return
+ ------
+ str
+ subdir
+ """
+ ucrtver = self.si.UniversalCRTSdkLastVersion
+ return ('%s\\' % ucrtver) if ucrtver else ''
+
+ @property
+ def FSharp(self):
+ """
+ Microsoft Visual F#.
+
+ Return
+ ------
+ list of str
+ paths
+ """
+ if 11.0 > self.vs_ver > 12.0:
+ return []
+
+ return [self.si.FSharpInstallDir]
+
+ @property
+ def VCRuntimeRedist(self):
+ """
+ Microsoft Visual C++ runtime redistributable dll.
+
+ Return
+ ------
+ str
+ path
+ """
+ vcruntime = 'vcruntime%d0.dll' % self.vc_ver
+ arch_subdir = self.pi.target_dir(x64=True).strip('\\')
+
+ # Installation prefixes candidates
+ prefixes = []
+ tools_path = self.si.VCInstallDir
+ redist_path = dirname(tools_path.replace(r'\Tools', r'\Redist'))
+ if isdir(redist_path):
+ # Redist version may not be exactly the same as tools
+ redist_path = join(redist_path, listdir(redist_path)[-1])
+ prefixes += [redist_path, join(redist_path, 'onecore')]
+
+ prefixes += [join(tools_path, 'redist')] # VS14 legacy path
+
+ # CRT directory
+ crt_dirs = ('Microsoft.VC%d.CRT' % (self.vc_ver * 10),
+ # Sometime store in directory with VS version instead of VC
+ 'Microsoft.VC%d.CRT' % (int(self.vs_ver) * 10))
+
+ # vcruntime path
+ for prefix, crt_dir in itertools.product(prefixes, crt_dirs):
+ path = join(prefix, arch_subdir, crt_dir, vcruntime)
+ if isfile(path):
+ return path
+
+ def return_env(self, exists=True):
+ """
+ Return environment dict.
+
+ Parameters
+ ----------
+ exists: bool
+ It True, only return existing paths.
+
+ Return
+ ------
+ dict
+ environment
+ """
+ env = dict(
+ include=self._build_paths('include',
+ [self.VCIncludes,
+ self.OSIncludes,
+ self.UCRTIncludes,
+ self.NetFxSDKIncludes],
+ exists),
+ lib=self._build_paths('lib',
+ [self.VCLibraries,
+ self.OSLibraries,
+ self.FxTools,
+ self.UCRTLibraries,
+ self.NetFxSDKLibraries],
+ exists),
+ libpath=self._build_paths('libpath',
+ [self.VCLibraries,
+ self.FxTools,
+ self.VCStoreRefs,
+ self.OSLibpath],
+ exists),
+ path=self._build_paths('path',
+ [self.VCTools,
+ self.VSTools,
+ self.VsTDb,
+ self.SdkTools,
+ self.SdkSetup,
+ self.FxTools,
+ self.MSBuild,
+ self.HTMLHelpWorkshop,
+ self.FSharp],
+ exists),
+ )
+ if self.vs_ver >= 14 and isfile(self.VCRuntimeRedist):
+ env['py_vcruntime_redist'] = self.VCRuntimeRedist
+ return env
+
+ def _build_paths(self, name, spec_path_lists, exists):
+ """
+ Given an environment variable name and specified paths,
+ return a pathsep-separated string of paths containing
+ unique, extant, directories from those paths and from
+ the environment variable. Raise an error if no paths
+ are resolved.
+
+ Parameters
+ ----------
+ name: str
+ Environment variable name
+ spec_path_lists: list of str
+ Paths
+ exists: bool
+ It True, only return existing paths.
+
+ Return
+ ------
+ str
+ Pathsep-separated paths
+ """
+ # flatten spec_path_lists
+ spec_paths = itertools.chain.from_iterable(spec_path_lists)
+ env_paths = environ.get(name, '').split(pathsep)
+ paths = itertools.chain(spec_paths, env_paths)
+ extant_paths = list(filter(isdir, paths)) if exists else paths
+ if not extant_paths:
+ msg = "%s environment variable is empty" % name.upper()
+ raise distutils.errors.DistutilsPlatformError(msg)
+ unique_paths = self._unique_everseen(extant_paths)
+ return pathsep.join(unique_paths)
+
+ # from Python docs
+ @staticmethod
+ def _unique_everseen(iterable, key=None):
+ """
+ List unique elements, preserving order.
+ Remember all elements ever seen.
+
+ _unique_everseen('AAAABBBCCDAABBB') --> A B C D
+
+ _unique_everseen('ABBCcAD', str.lower) --> A B C D
+ """
+ seen = set()
+ seen_add = seen.add
+ if key is None:
+ for element in filterfalse(seen.__contains__, iterable):
+ seen_add(element)
+ yield element
+ else:
+ for element in iterable:
+ k = key(element)
+ if k not in seen:
+ seen_add(k)
+ yield element
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/namespaces.py b/monEnvTP/lib/python3.8/site-packages/setuptools/namespaces.py
new file mode 100644
index 0000000000000000000000000000000000000000..dc16106d3dc7048a160129745756bbc9b1fb51d9
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/namespaces.py
@@ -0,0 +1,107 @@
+import os
+from distutils import log
+import itertools
+
+from setuptools.extern.six.moves import map
+
+
+flatten = itertools.chain.from_iterable
+
+
+class Installer:
+
+ nspkg_ext = '-nspkg.pth'
+
+ def install_namespaces(self):
+ nsp = self._get_all_ns_packages()
+ if not nsp:
+ return
+ filename, ext = os.path.splitext(self._get_target())
+ filename += self.nspkg_ext
+ self.outputs.append(filename)
+ log.info("Installing %s", filename)
+ lines = map(self._gen_nspkg_line, nsp)
+
+ if self.dry_run:
+ # always generate the lines, even in dry run
+ list(lines)
+ return
+
+ with open(filename, 'wt') as f:
+ f.writelines(lines)
+
+ def uninstall_namespaces(self):
+ filename, ext = os.path.splitext(self._get_target())
+ filename += self.nspkg_ext
+ if not os.path.exists(filename):
+ return
+ log.info("Removing %s", filename)
+ os.remove(filename)
+
+ def _get_target(self):
+ return self.target
+
+ _nspkg_tmpl = (
+ "import sys, types, os",
+ "has_mfs = sys.version_info > (3, 5)",
+ "p = os.path.join(%(root)s, *%(pth)r)",
+ "importlib = has_mfs and __import__('importlib.util')",
+ "has_mfs and __import__('importlib.machinery')",
+ "m = has_mfs and "
+ "sys.modules.setdefault(%(pkg)r, "
+ "importlib.util.module_from_spec("
+ "importlib.machinery.PathFinder.find_spec(%(pkg)r, "
+ "[os.path.dirname(p)])))",
+ "m = m or "
+ "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))",
+ "mp = (m or []) and m.__dict__.setdefault('__path__',[])",
+ "(p not in mp) and mp.append(p)",
+ )
+ "lines for the namespace installer"
+
+ _nspkg_tmpl_multi = (
+ 'm and setattr(sys.modules[%(parent)r], %(child)r, m)',
+ )
+ "additional line(s) when a parent package is indicated"
+
+ def _get_root(self):
+ return "sys._getframe(1).f_locals['sitedir']"
+
+ def _gen_nspkg_line(self, pkg):
+ # ensure pkg is not a unicode string under Python 2.7
+ pkg = str(pkg)
+ pth = tuple(pkg.split('.'))
+ root = self._get_root()
+ tmpl_lines = self._nspkg_tmpl
+ parent, sep, child = pkg.rpartition('.')
+ if parent:
+ tmpl_lines += self._nspkg_tmpl_multi
+ return ';'.join(tmpl_lines) % locals() + '\n'
+
+ def _get_all_ns_packages(self):
+ """Return sorted list of all package namespaces"""
+ pkgs = self.distribution.namespace_packages or []
+ return sorted(flatten(map(self._pkg_names, pkgs)))
+
+ @staticmethod
+ def _pkg_names(pkg):
+ """
+ Given a namespace package, yield the components of that
+ package.
+
+ >>> names = Installer._pkg_names('a.b.c')
+ >>> set(names) == set(['a', 'a.b', 'a.b.c'])
+ True
+ """
+ parts = pkg.split('.')
+ while parts:
+ yield '.'.join(parts)
+ parts.pop()
+
+
+class DevelopInstaller(Installer):
+ def _get_root(self):
+ return repr(str(self.egg_path))
+
+ def _get_target(self):
+ return self.egg_link
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/package_index.py b/monEnvTP/lib/python3.8/site-packages/setuptools/package_index.py
new file mode 100644
index 0000000000000000000000000000000000000000..f419d47167b39a71275744b2f2a78f85c9919a8d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/package_index.py
@@ -0,0 +1,1136 @@
+"""PyPI and direct package downloading"""
+import sys
+import os
+import re
+import shutil
+import socket
+import base64
+import hashlib
+import itertools
+import warnings
+from functools import wraps
+
+from setuptools.extern import six
+from setuptools.extern.six.moves import urllib, http_client, configparser, map
+
+import setuptools
+from pkg_resources import (
+ CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST,
+ Environment, find_distributions, safe_name, safe_version,
+ to_filename, Requirement, DEVELOP_DIST, EGG_DIST,
+)
+from setuptools import ssl_support
+from distutils import log
+from distutils.errors import DistutilsError
+from fnmatch import translate
+from setuptools.py27compat import get_all_headers
+from setuptools.py33compat import unescape
+from setuptools.wheel import Wheel
+
+__metaclass__ = type
+
+EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.+!]+)$')
+HREF = re.compile(r"""href\s*=\s*['"]?([^'"> ]+)""", re.I)
+PYPI_MD5 = re.compile(
+ r'<a href="([^"#]+)">([^<]+)</a>\n\s+\(<a (?:title="MD5 hash"\n\s+)'
+ r'href="[^?]+\?:action=show_md5&digest=([0-9a-f]{32})">md5</a>\)'
+)
+URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):', re.I).match
+EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split()
+
+__all__ = [
+ 'PackageIndex', 'distros_for_url', 'parse_bdist_wininst',
+ 'interpret_distro_name',
+]
+
+_SOCKET_TIMEOUT = 15
+
+_tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}"
+user_agent = _tmpl.format(py_major='{}.{}'.format(*sys.version_info), setuptools=setuptools)
+
+
+def parse_requirement_arg(spec):
+ try:
+ return Requirement.parse(spec)
+ except ValueError:
+ raise DistutilsError(
+ "Not a URL, existing file, or requirement spec: %r" % (spec,)
+ )
+
+
+def parse_bdist_wininst(name):
+ """Return (base,pyversion) or (None,None) for possible .exe name"""
+
+ lower = name.lower()
+ base, py_ver, plat = None, None, None
+
+ if lower.endswith('.exe'):
+ if lower.endswith('.win32.exe'):
+ base = name[:-10]
+ plat = 'win32'
+ elif lower.startswith('.win32-py', -16):
+ py_ver = name[-7:-4]
+ base = name[:-16]
+ plat = 'win32'
+ elif lower.endswith('.win-amd64.exe'):
+ base = name[:-14]
+ plat = 'win-amd64'
+ elif lower.startswith('.win-amd64-py', -20):
+ py_ver = name[-7:-4]
+ base = name[:-20]
+ plat = 'win-amd64'
+ return base, py_ver, plat
+
+
+def egg_info_for_url(url):
+ parts = urllib.parse.urlparse(url)
+ scheme, server, path, parameters, query, fragment = parts
+ base = urllib.parse.unquote(path.split('/')[-1])
+ if server == 'sourceforge.net' and base == 'download': # XXX Yuck
+ base = urllib.parse.unquote(path.split('/')[-2])
+ if '#' in base:
+ base, fragment = base.split('#', 1)
+ return base, fragment
+
+
+def distros_for_url(url, metadata=None):
+ """Yield egg or source distribution objects that might be found at a URL"""
+ base, fragment = egg_info_for_url(url)
+ for dist in distros_for_location(url, base, metadata):
+ yield dist
+ if fragment:
+ match = EGG_FRAGMENT.match(fragment)
+ if match:
+ for dist in interpret_distro_name(
+ url, match.group(1), metadata, precedence=CHECKOUT_DIST
+ ):
+ yield dist
+
+
+def distros_for_location(location, basename, metadata=None):
+ """Yield egg or source distribution objects based on basename"""
+ if basename.endswith('.egg.zip'):
+ basename = basename[:-4] # strip the .zip
+ if basename.endswith('.egg') and '-' in basename:
+ # only one, unambiguous interpretation
+ return [Distribution.from_location(location, basename, metadata)]
+ if basename.endswith('.whl') and '-' in basename:
+ wheel = Wheel(basename)
+ if not wheel.is_compatible():
+ return []
+ return [Distribution(
+ location=location,
+ project_name=wheel.project_name,
+ version=wheel.version,
+ # Increase priority over eggs.
+ precedence=EGG_DIST + 1,
+ )]
+ if basename.endswith('.exe'):
+ win_base, py_ver, platform = parse_bdist_wininst(basename)
+ if win_base is not None:
+ return interpret_distro_name(
+ location, win_base, metadata, py_ver, BINARY_DIST, platform
+ )
+ # Try source distro extensions (.zip, .tgz, etc.)
+ #
+ for ext in EXTENSIONS:
+ if basename.endswith(ext):
+ basename = basename[:-len(ext)]
+ return interpret_distro_name(location, basename, metadata)
+ return [] # no extension matched
+
+
+def distros_for_filename(filename, metadata=None):
+ """Yield possible egg or source distribution objects based on a filename"""
+ return distros_for_location(
+ normalize_path(filename), os.path.basename(filename), metadata
+ )
+
+
+def interpret_distro_name(
+ location, basename, metadata, py_version=None, precedence=SOURCE_DIST,
+ platform=None
+):
+ """Generate alternative interpretations of a source distro name
+
+ Note: if `location` is a filesystem filename, you should call
+ ``pkg_resources.normalize_path()`` on it before passing it to this
+ routine!
+ """
+ # Generate alternative interpretations of a source distro name
+ # Because some packages are ambiguous as to name/versions split
+ # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc.
+ # So, we generate each possible interepretation (e.g. "adns, python-1.1.0"
+ # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice,
+ # the spurious interpretations should be ignored, because in the event
+ # there's also an "adns" package, the spurious "python-1.1.0" version will
+ # compare lower than any numeric version number, and is therefore unlikely
+ # to match a request for it. It's still a potential problem, though, and
+ # in the long run PyPI and the distutils should go for "safe" names and
+ # versions in distribution archive names (sdist and bdist).
+
+ parts = basename.split('-')
+ if not py_version and any(re.match(r'py\d\.\d$', p) for p in parts[2:]):
+ # it is a bdist_dumb, not an sdist -- bail out
+ return
+
+ for p in range(1, len(parts) + 1):
+ yield Distribution(
+ location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]),
+ py_version=py_version, precedence=precedence,
+ platform=platform
+ )
+
+
+# From Python 2.7 docs
+def unique_everseen(iterable, key=None):
+ "List unique elements, preserving order. Remember all elements ever seen."
+ # unique_everseen('AAAABBBCCDAABBB') --> A B C D
+ # unique_everseen('ABBCcAD', str.lower) --> A B C D
+ seen = set()
+ seen_add = seen.add
+ if key is None:
+ for element in six.moves.filterfalse(seen.__contains__, iterable):
+ seen_add(element)
+ yield element
+ else:
+ for element in iterable:
+ k = key(element)
+ if k not in seen:
+ seen_add(k)
+ yield element
+
+
+def unique_values(func):
+ """
+ Wrap a function returning an iterable such that the resulting iterable
+ only ever yields unique items.
+ """
+
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ return unique_everseen(func(*args, **kwargs))
+
+ return wrapper
+
+
+REL = re.compile(r"""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I)
+# this line is here to fix emacs' cruddy broken syntax highlighting
+
+
+@unique_values
+def find_external_links(url, page):
+ """Find rel="homepage" and rel="download" links in `page`, yielding URLs"""
+
+ for match in REL.finditer(page):
+ tag, rel = match.groups()
+ rels = set(map(str.strip, rel.lower().split(',')))
+ if 'homepage' in rels or 'download' in rels:
+ for match in HREF.finditer(tag):
+ yield urllib.parse.urljoin(url, htmldecode(match.group(1)))
+
+ for tag in ("<th>Home Page", "<th>Download URL"):
+ pos = page.find(tag)
+ if pos != -1:
+ match = HREF.search(page, pos)
+ if match:
+ yield urllib.parse.urljoin(url, htmldecode(match.group(1)))
+
+
+class ContentChecker:
+ """
+ A null content checker that defines the interface for checking content
+ """
+
+ def feed(self, block):
+ """
+ Feed a block of data to the hash.
+ """
+ return
+
+ def is_valid(self):
+ """
+ Check the hash. Return False if validation fails.
+ """
+ return True
+
+ def report(self, reporter, template):
+ """
+ Call reporter with information about the checker (hash name)
+ substituted into the template.
+ """
+ return
+
+
+class HashChecker(ContentChecker):
+ pattern = re.compile(
+ r'(?P<hash_name>sha1|sha224|sha384|sha256|sha512|md5)='
+ r'(?P<expected>[a-f0-9]+)'
+ )
+
+ def __init__(self, hash_name, expected):
+ self.hash_name = hash_name
+ self.hash = hashlib.new(hash_name)
+ self.expected = expected
+
+ @classmethod
+ def from_url(cls, url):
+ "Construct a (possibly null) ContentChecker from a URL"
+ fragment = urllib.parse.urlparse(url)[-1]
+ if not fragment:
+ return ContentChecker()
+ match = cls.pattern.search(fragment)
+ if not match:
+ return ContentChecker()
+ return cls(**match.groupdict())
+
+ def feed(self, block):
+ self.hash.update(block)
+
+ def is_valid(self):
+ return self.hash.hexdigest() == self.expected
+
+ def report(self, reporter, template):
+ msg = template % self.hash_name
+ return reporter(msg)
+
+
+class PackageIndex(Environment):
+ """A distribution index that scans web pages for download URLs"""
+
+ def __init__(
+ self, index_url="https://pypi.org/simple/", hosts=('*',),
+ ca_bundle=None, verify_ssl=True, *args, **kw
+ ):
+ Environment.__init__(self, *args, **kw)
+ self.index_url = index_url + "/" [:not index_url.endswith('/')]
+ self.scanned_urls = {}
+ self.fetched_urls = {}
+ self.package_pages = {}
+ self.allows = re.compile('|'.join(map(translate, hosts))).match
+ self.to_scan = []
+ use_ssl = (
+ verify_ssl
+ and ssl_support.is_available
+ and (ca_bundle or ssl_support.find_ca_bundle())
+ )
+ if use_ssl:
+ self.opener = ssl_support.opener_for(ca_bundle)
+ else:
+ self.opener = urllib.request.urlopen
+
+ def process_url(self, url, retrieve=False):
+ """Evaluate a URL as a possible download, and maybe retrieve it"""
+ if url in self.scanned_urls and not retrieve:
+ return
+ self.scanned_urls[url] = True
+ if not URL_SCHEME(url):
+ self.process_filename(url)
+ return
+ else:
+ dists = list(distros_for_url(url))
+ if dists:
+ if not self.url_ok(url):
+ return
+ self.debug("Found link: %s", url)
+
+ if dists or not retrieve or url in self.fetched_urls:
+ list(map(self.add, dists))
+ return # don't need the actual page
+
+ if not self.url_ok(url):
+ self.fetched_urls[url] = True
+ return
+
+ self.info("Reading %s", url)
+ self.fetched_urls[url] = True # prevent multiple fetch attempts
+ tmpl = "Download error on %s: %%s -- Some packages may not be found!"
+ f = self.open_url(url, tmpl % url)
+ if f is None:
+ return
+ self.fetched_urls[f.url] = True
+ if 'html' not in f.headers.get('content-type', '').lower():
+ f.close() # not html, we can't process it
+ return
+
+ base = f.url # handle redirects
+ page = f.read()
+ if not isinstance(page, str):
+ # In Python 3 and got bytes but want str.
+ if isinstance(f, urllib.error.HTTPError):
+ # Errors have no charset, assume latin1:
+ charset = 'latin-1'
+ else:
+ charset = f.headers.get_param('charset') or 'latin-1'
+ page = page.decode(charset, "ignore")
+ f.close()
+ for match in HREF.finditer(page):
+ link = urllib.parse.urljoin(base, htmldecode(match.group(1)))
+ self.process_url(link)
+ if url.startswith(self.index_url) and getattr(f, 'code', None) != 404:
+ page = self.process_index(url, page)
+
+ def process_filename(self, fn, nested=False):
+ # process filenames or directories
+ if not os.path.exists(fn):
+ self.warn("Not found: %s", fn)
+ return
+
+ if os.path.isdir(fn) and not nested:
+ path = os.path.realpath(fn)
+ for item in os.listdir(path):
+ self.process_filename(os.path.join(path, item), True)
+
+ dists = distros_for_filename(fn)
+ if dists:
+ self.debug("Found: %s", fn)
+ list(map(self.add, dists))
+
+ def url_ok(self, url, fatal=False):
+ s = URL_SCHEME(url)
+ is_file = s and s.group(1).lower() == 'file'
+ if is_file or self.allows(urllib.parse.urlparse(url)[1]):
+ return True
+ msg = (
+ "\nNote: Bypassing %s (disallowed host; see "
+ "http://bit.ly/2hrImnY for details).\n")
+ if fatal:
+ raise DistutilsError(msg % url)
+ else:
+ self.warn(msg, url)
+
+ def scan_egg_links(self, search_path):
+ dirs = filter(os.path.isdir, search_path)
+ egg_links = (
+ (path, entry)
+ for path in dirs
+ for entry in os.listdir(path)
+ if entry.endswith('.egg-link')
+ )
+ list(itertools.starmap(self.scan_egg_link, egg_links))
+
+ def scan_egg_link(self, path, entry):
+ with open(os.path.join(path, entry)) as raw_lines:
+ # filter non-empty lines
+ lines = list(filter(None, map(str.strip, raw_lines)))
+
+ if len(lines) != 2:
+ # format is not recognized; punt
+ return
+
+ egg_path, setup_path = lines
+
+ for dist in find_distributions(os.path.join(path, egg_path)):
+ dist.location = os.path.join(path, *lines)
+ dist.precedence = SOURCE_DIST
+ self.add(dist)
+
+ def process_index(self, url, page):
+ """Process the contents of a PyPI page"""
+
+ def scan(link):
+ # Process a URL to see if it's for a package page
+ if link.startswith(self.index_url):
+ parts = list(map(
+ urllib.parse.unquote, link[len(self.index_url):].split('/')
+ ))
+ if len(parts) == 2 and '#' not in parts[1]:
+ # it's a package page, sanitize and index it
+ pkg = safe_name(parts[0])
+ ver = safe_version(parts[1])
+ self.package_pages.setdefault(pkg.lower(), {})[link] = True
+ return to_filename(pkg), to_filename(ver)
+ return None, None
+
+ # process an index page into the package-page index
+ for match in HREF.finditer(page):
+ try:
+ scan(urllib.parse.urljoin(url, htmldecode(match.group(1))))
+ except ValueError:
+ pass
+
+ pkg, ver = scan(url) # ensure this page is in the page index
+ if pkg:
+ # process individual package page
+ for new_url in find_external_links(url, page):
+ # Process the found URL
+ base, frag = egg_info_for_url(new_url)
+ if base.endswith('.py') and not frag:
+ if ver:
+ new_url += '#egg=%s-%s' % (pkg, ver)
+ else:
+ self.need_version_info(url)
+ self.scan_url(new_url)
+
+ return PYPI_MD5.sub(
+ lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1, 3, 2), page
+ )
+ else:
+ return "" # no sense double-scanning non-package pages
+
+ def need_version_info(self, url):
+ self.scan_all(
+ "Page at %s links to .py file(s) without version info; an index "
+ "scan is required.", url
+ )
+
+ def scan_all(self, msg=None, *args):
+ if self.index_url not in self.fetched_urls:
+ if msg:
+ self.warn(msg, *args)
+ self.info(
+ "Scanning index of all packages (this may take a while)"
+ )
+ self.scan_url(self.index_url)
+
+ def find_packages(self, requirement):
+ self.scan_url(self.index_url + requirement.unsafe_name + '/')
+
+ if not self.package_pages.get(requirement.key):
+ # Fall back to safe version of the name
+ self.scan_url(self.index_url + requirement.project_name + '/')
+
+ if not self.package_pages.get(requirement.key):
+ # We couldn't find the target package, so search the index page too
+ self.not_found_in_index(requirement)
+
+ for url in list(self.package_pages.get(requirement.key, ())):
+ # scan each page that might be related to the desired package
+ self.scan_url(url)
+
+ def obtain(self, requirement, installer=None):
+ self.prescan()
+ self.find_packages(requirement)
+ for dist in self[requirement.key]:
+ if dist in requirement:
+ return dist
+ self.debug("%s does not match %s", requirement, dist)
+ return super(PackageIndex, self).obtain(requirement, installer)
+
+ def check_hash(self, checker, filename, tfp):
+ """
+ checker is a ContentChecker
+ """
+ checker.report(
+ self.debug,
+ "Validating %%s checksum for %s" % filename)
+ if not checker.is_valid():
+ tfp.close()
+ os.unlink(filename)
+ raise DistutilsError(
+ "%s validation failed for %s; "
+ "possible download problem?"
+ % (checker.hash.name, os.path.basename(filename))
+ )
+
+ def add_find_links(self, urls):
+ """Add `urls` to the list that will be prescanned for searches"""
+ for url in urls:
+ if (
+ self.to_scan is None # if we have already "gone online"
+ or not URL_SCHEME(url) # or it's a local file/directory
+ or url.startswith('file:')
+ or list(distros_for_url(url)) # or a direct package link
+ ):
+ # then go ahead and process it now
+ self.scan_url(url)
+ else:
+ # otherwise, defer retrieval till later
+ self.to_scan.append(url)
+
+ def prescan(self):
+ """Scan urls scheduled for prescanning (e.g. --find-links)"""
+ if self.to_scan:
+ list(map(self.scan_url, self.to_scan))
+ self.to_scan = None # from now on, go ahead and process immediately
+
+ def not_found_in_index(self, requirement):
+ if self[requirement.key]: # we've seen at least one distro
+ meth, msg = self.info, "Couldn't retrieve index page for %r"
+ else: # no distros seen for this name, might be misspelled
+ meth, msg = (
+ self.warn,
+ "Couldn't find index page for %r (maybe misspelled?)")
+ meth(msg, requirement.unsafe_name)
+ self.scan_all()
+
+ def download(self, spec, tmpdir):
+ """Locate and/or download `spec` to `tmpdir`, returning a local path
+
+ `spec` may be a ``Requirement`` object, or a string containing a URL,
+ an existing local filename, or a project/version requirement spec
+ (i.e. the string form of a ``Requirement`` object). If it is the URL
+ of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one
+ that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is
+ automatically created alongside the downloaded file.
+
+ If `spec` is a ``Requirement`` object or a string containing a
+ project/version requirement spec, this method returns the location of
+ a matching distribution (possibly after downloading it to `tmpdir`).
+ If `spec` is a locally existing file or directory name, it is simply
+ returned unchanged. If `spec` is a URL, it is downloaded to a subpath
+ of `tmpdir`, and the local filename is returned. Various errors may be
+ raised if a problem occurs during downloading.
+ """
+ if not isinstance(spec, Requirement):
+ scheme = URL_SCHEME(spec)
+ if scheme:
+ # It's a url, download it to tmpdir
+ found = self._download_url(scheme.group(1), spec, tmpdir)
+ base, fragment = egg_info_for_url(spec)
+ if base.endswith('.py'):
+ found = self.gen_setup(found, fragment, tmpdir)
+ return found
+ elif os.path.exists(spec):
+ # Existing file or directory, just return it
+ return spec
+ else:
+ spec = parse_requirement_arg(spec)
+ return getattr(self.fetch_distribution(spec, tmpdir), 'location', None)
+
+ def fetch_distribution(
+ self, requirement, tmpdir, force_scan=False, source=False,
+ develop_ok=False, local_index=None):
+ """Obtain a distribution suitable for fulfilling `requirement`
+
+ `requirement` must be a ``pkg_resources.Requirement`` instance.
+ If necessary, or if the `force_scan` flag is set, the requirement is
+ searched for in the (online) package index as well as the locally
+ installed packages. If a distribution matching `requirement` is found,
+ the returned distribution's ``location`` is the value you would have
+ gotten from calling the ``download()`` method with the matching
+ distribution's URL or filename. If no matching distribution is found,
+ ``None`` is returned.
+
+ If the `source` flag is set, only source distributions and source
+ checkout links will be considered. Unless the `develop_ok` flag is
+ set, development and system eggs (i.e., those using the ``.egg-info``
+ format) will be ignored.
+ """
+ # process a Requirement
+ self.info("Searching for %s", requirement)
+ skipped = {}
+ dist = None
+
+ def find(req, env=None):
+ if env is None:
+ env = self
+ # Find a matching distribution; may be called more than once
+
+ for dist in env[req.key]:
+
+ if dist.precedence == DEVELOP_DIST and not develop_ok:
+ if dist not in skipped:
+ self.warn(
+ "Skipping development or system egg: %s", dist,
+ )
+ skipped[dist] = 1
+ continue
+
+ test = (
+ dist in req
+ and (dist.precedence <= SOURCE_DIST or not source)
+ )
+ if test:
+ loc = self.download(dist.location, tmpdir)
+ dist.download_location = loc
+ if os.path.exists(dist.download_location):
+ return dist
+
+ if force_scan:
+ self.prescan()
+ self.find_packages(requirement)
+ dist = find(requirement)
+
+ if not dist and local_index is not None:
+ dist = find(requirement, local_index)
+
+ if dist is None:
+ if self.to_scan is not None:
+ self.prescan()
+ dist = find(requirement)
+
+ if dist is None and not force_scan:
+ self.find_packages(requirement)
+ dist = find(requirement)
+
+ if dist is None:
+ self.warn(
+ "No local packages or working download links found for %s%s",
+ (source and "a source distribution of " or ""),
+ requirement,
+ )
+ else:
+ self.info("Best match: %s", dist)
+ return dist.clone(location=dist.download_location)
+
+ def fetch(self, requirement, tmpdir, force_scan=False, source=False):
+ """Obtain a file suitable for fulfilling `requirement`
+
+ DEPRECATED; use the ``fetch_distribution()`` method now instead. For
+ backward compatibility, this routine is identical but returns the
+ ``location`` of the downloaded distribution instead of a distribution
+ object.
+ """
+ dist = self.fetch_distribution(requirement, tmpdir, force_scan, source)
+ if dist is not None:
+ return dist.location
+ return None
+
+ def gen_setup(self, filename, fragment, tmpdir):
+ match = EGG_FRAGMENT.match(fragment)
+ dists = match and [
+ d for d in
+ interpret_distro_name(filename, match.group(1), None) if d.version
+ ] or []
+
+ if len(dists) == 1: # unambiguous ``#egg`` fragment
+ basename = os.path.basename(filename)
+
+ # Make sure the file has been downloaded to the temp dir.
+ if os.path.dirname(filename) != tmpdir:
+ dst = os.path.join(tmpdir, basename)
+ from setuptools.command.easy_install import samefile
+ if not samefile(filename, dst):
+ shutil.copy2(filename, dst)
+ filename = dst
+
+ with open(os.path.join(tmpdir, 'setup.py'), 'w') as file:
+ file.write(
+ "from setuptools import setup\n"
+ "setup(name=%r, version=%r, py_modules=[%r])\n"
+ % (
+ dists[0].project_name, dists[0].version,
+ os.path.splitext(basename)[0]
+ )
+ )
+ return filename
+
+ elif match:
+ raise DistutilsError(
+ "Can't unambiguously interpret project/version identifier %r; "
+ "any dashes in the name or version should be escaped using "
+ "underscores. %r" % (fragment, dists)
+ )
+ else:
+ raise DistutilsError(
+ "Can't process plain .py files without an '#egg=name-version'"
+ " suffix to enable automatic setup script generation."
+ )
+
+ dl_blocksize = 8192
+
+ def _download_to(self, url, filename):
+ self.info("Downloading %s", url)
+ # Download the file
+ fp = None
+ try:
+ checker = HashChecker.from_url(url)
+ fp = self.open_url(url)
+ if isinstance(fp, urllib.error.HTTPError):
+ raise DistutilsError(
+ "Can't download %s: %s %s" % (url, fp.code, fp.msg)
+ )
+ headers = fp.info()
+ blocknum = 0
+ bs = self.dl_blocksize
+ size = -1
+ if "content-length" in headers:
+ # Some servers return multiple Content-Length headers :(
+ sizes = get_all_headers(headers, 'Content-Length')
+ size = max(map(int, sizes))
+ self.reporthook(url, filename, blocknum, bs, size)
+ with open(filename, 'wb') as tfp:
+ while True:
+ block = fp.read(bs)
+ if block:
+ checker.feed(block)
+ tfp.write(block)
+ blocknum += 1
+ self.reporthook(url, filename, blocknum, bs, size)
+ else:
+ break
+ self.check_hash(checker, filename, tfp)
+ return headers
+ finally:
+ if fp:
+ fp.close()
+
+ def reporthook(self, url, filename, blocknum, blksize, size):
+ pass # no-op
+
+ def open_url(self, url, warning=None):
+ if url.startswith('file:'):
+ return local_open(url)
+ try:
+ return open_with_auth(url, self.opener)
+ except (ValueError, http_client.InvalidURL) as v:
+ msg = ' '.join([str(arg) for arg in v.args])
+ if warning:
+ self.warn(warning, msg)
+ else:
+ raise DistutilsError('%s %s' % (url, msg))
+ except urllib.error.HTTPError as v:
+ return v
+ except urllib.error.URLError as v:
+ if warning:
+ self.warn(warning, v.reason)
+ else:
+ raise DistutilsError("Download error for %s: %s"
+ % (url, v.reason))
+ except http_client.BadStatusLine as v:
+ if warning:
+ self.warn(warning, v.line)
+ else:
+ raise DistutilsError(
+ '%s returned a bad status line. The server might be '
+ 'down, %s' %
+ (url, v.line)
+ )
+ except (http_client.HTTPException, socket.error) as v:
+ if warning:
+ self.warn(warning, v)
+ else:
+ raise DistutilsError("Download error for %s: %s"
+ % (url, v))
+
+ def _download_url(self, scheme, url, tmpdir):
+ # Determine download filename
+ #
+ name, fragment = egg_info_for_url(url)
+ if name:
+ while '..' in name:
+ name = name.replace('..', '.').replace('\\', '_')
+ else:
+ name = "__downloaded__" # default if URL has no path contents
+
+ if name.endswith('.egg.zip'):
+ name = name[:-4] # strip the extra .zip before download
+
+ filename = os.path.join(tmpdir, name)
+
+ # Download the file
+ #
+ if scheme == 'svn' or scheme.startswith('svn+'):
+ return self._download_svn(url, filename)
+ elif scheme == 'git' or scheme.startswith('git+'):
+ return self._download_git(url, filename)
+ elif scheme.startswith('hg+'):
+ return self._download_hg(url, filename)
+ elif scheme == 'file':
+ return urllib.request.url2pathname(urllib.parse.urlparse(url)[2])
+ else:
+ self.url_ok(url, True) # raises error if not allowed
+ return self._attempt_download(url, filename)
+
+ def scan_url(self, url):
+ self.process_url(url, True)
+
+ def _attempt_download(self, url, filename):
+ headers = self._download_to(url, filename)
+ if 'html' in headers.get('content-type', '').lower():
+ return self._download_html(url, headers, filename)
+ else:
+ return filename
+
+ def _download_html(self, url, headers, filename):
+ file = open(filename)
+ for line in file:
+ if line.strip():
+ # Check for a subversion index page
+ if re.search(r'<title>([^- ]+ - )?Revision \d+:', line):
+ # it's a subversion index page:
+ file.close()
+ os.unlink(filename)
+ return self._download_svn(url, filename)
+ break # not an index page
+ file.close()
+ os.unlink(filename)
+ raise DistutilsError("Unexpected HTML page found at " + url)
+
+ def _download_svn(self, url, filename):
+ warnings.warn("SVN download support is deprecated", UserWarning)
+ url = url.split('#', 1)[0] # remove any fragment for svn's sake
+ creds = ''
+ if url.lower().startswith('svn:') and '@' in url:
+ scheme, netloc, path, p, q, f = urllib.parse.urlparse(url)
+ if not netloc and path.startswith('//') and '/' in path[2:]:
+ netloc, path = path[2:].split('/', 1)
+ auth, host = _splituser(netloc)
+ if auth:
+ if ':' in auth:
+ user, pw = auth.split(':', 1)
+ creds = " --username=%s --password=%s" % (user, pw)
+ else:
+ creds = " --username=" + auth
+ netloc = host
+ parts = scheme, netloc, url, p, q, f
+ url = urllib.parse.urlunparse(parts)
+ self.info("Doing subversion checkout from %s to %s", url, filename)
+ os.system("svn checkout%s -q %s %s" % (creds, url, filename))
+ return filename
+
+ @staticmethod
+ def _vcs_split_rev_from_url(url, pop_prefix=False):
+ scheme, netloc, path, query, frag = urllib.parse.urlsplit(url)
+
+ scheme = scheme.split('+', 1)[-1]
+
+ # Some fragment identification fails
+ path = path.split('#', 1)[0]
+
+ rev = None
+ if '@' in path:
+ path, rev = path.rsplit('@', 1)
+
+ # Also, discard fragment
+ url = urllib.parse.urlunsplit((scheme, netloc, path, query, ''))
+
+ return url, rev
+
+ def _download_git(self, url, filename):
+ filename = filename.split('#', 1)[0]
+ url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)
+
+ self.info("Doing git clone from %s to %s", url, filename)
+ os.system("git clone --quiet %s %s" % (url, filename))
+
+ if rev is not None:
+ self.info("Checking out %s", rev)
+ os.system("git -C %s checkout --quiet %s" % (
+ filename,
+ rev,
+ ))
+
+ return filename
+
+ def _download_hg(self, url, filename):
+ filename = filename.split('#', 1)[0]
+ url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)
+
+ self.info("Doing hg clone from %s to %s", url, filename)
+ os.system("hg clone --quiet %s %s" % (url, filename))
+
+ if rev is not None:
+ self.info("Updating to %s", rev)
+ os.system("hg --cwd %s up -C -r %s -q" % (
+ filename,
+ rev,
+ ))
+
+ return filename
+
+ def debug(self, msg, *args):
+ log.debug(msg, *args)
+
+ def info(self, msg, *args):
+ log.info(msg, *args)
+
+ def warn(self, msg, *args):
+ log.warn(msg, *args)
+
+
+# This pattern matches a character entity reference (a decimal numeric
+# references, a hexadecimal numeric reference, or a named reference).
+entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub
+
+
+def decode_entity(match):
+ what = match.group(0)
+ return unescape(what)
+
+
+def htmldecode(text):
+ """
+ Decode HTML entities in the given text.
+
+ >>> htmldecode(
+ ... 'https://../package_name-0.1.2.tar.gz'
+ ... '?tokena=A&tokenb=B">package_name-0.1.2.tar.gz')
+ 'https://../package_name-0.1.2.tar.gz?tokena=A&tokenb=B">package_name-0.1.2.tar.gz'
+ """
+ return entity_sub(decode_entity, text)
+
+
+def socket_timeout(timeout=15):
+ def _socket_timeout(func):
+ def _socket_timeout(*args, **kwargs):
+ old_timeout = socket.getdefaulttimeout()
+ socket.setdefaulttimeout(timeout)
+ try:
+ return func(*args, **kwargs)
+ finally:
+ socket.setdefaulttimeout(old_timeout)
+
+ return _socket_timeout
+
+ return _socket_timeout
+
+
+def _encode_auth(auth):
+ """
+ A function compatible with Python 2.3-3.3 that will encode
+ auth from a URL suitable for an HTTP header.
+ >>> str(_encode_auth('username%3Apassword'))
+ 'dXNlcm5hbWU6cGFzc3dvcmQ='
+
+ Long auth strings should not cause a newline to be inserted.
+ >>> long_auth = 'username:' + 'password'*10
+ >>> chr(10) in str(_encode_auth(long_auth))
+ False
+ """
+ auth_s = urllib.parse.unquote(auth)
+ # convert to bytes
+ auth_bytes = auth_s.encode()
+ encoded_bytes = base64.b64encode(auth_bytes)
+ # convert back to a string
+ encoded = encoded_bytes.decode()
+ # strip the trailing carriage return
+ return encoded.replace('\n', '')
+
+
+class Credential:
+ """
+ A username/password pair. Use like a namedtuple.
+ """
+
+ def __init__(self, username, password):
+ self.username = username
+ self.password = password
+
+ def __iter__(self):
+ yield self.username
+ yield self.password
+
+ def __str__(self):
+ return '%(username)s:%(password)s' % vars(self)
+
+
+class PyPIConfig(configparser.RawConfigParser):
+ def __init__(self):
+ """
+ Load from ~/.pypirc
+ """
+ defaults = dict.fromkeys(['username', 'password', 'repository'], '')
+ configparser.RawConfigParser.__init__(self, defaults)
+
+ rc = os.path.join(os.path.expanduser('~'), '.pypirc')
+ if os.path.exists(rc):
+ self.read(rc)
+
+ @property
+ def creds_by_repository(self):
+ sections_with_repositories = [
+ section for section in self.sections()
+ if self.get(section, 'repository').strip()
+ ]
+
+ return dict(map(self._get_repo_cred, sections_with_repositories))
+
+ def _get_repo_cred(self, section):
+ repo = self.get(section, 'repository').strip()
+ return repo, Credential(
+ self.get(section, 'username').strip(),
+ self.get(section, 'password').strip(),
+ )
+
+ def find_credential(self, url):
+ """
+ If the URL indicated appears to be a repository defined in this
+ config, return the credential for that repository.
+ """
+ for repository, cred in self.creds_by_repository.items():
+ if url.startswith(repository):
+ return cred
+
+
+def open_with_auth(url, opener=urllib.request.urlopen):
+ """Open a urllib2 request, handling HTTP authentication"""
+
+ parsed = urllib.parse.urlparse(url)
+ scheme, netloc, path, params, query, frag = parsed
+
+ # Double scheme does not raise on Mac OS X as revealed by a
+ # failing test. We would expect "nonnumeric port". Refs #20.
+ if netloc.endswith(':'):
+ raise http_client.InvalidURL("nonnumeric port: ''")
+
+ if scheme in ('http', 'https'):
+ auth, address = _splituser(netloc)
+ else:
+ auth = None
+
+ if not auth:
+ cred = PyPIConfig().find_credential(url)
+ if cred:
+ auth = str(cred)
+ info = cred.username, url
+ log.info('Authenticating as %s for %s (from .pypirc)', *info)
+
+ if auth:
+ auth = "Basic " + _encode_auth(auth)
+ parts = scheme, address, path, params, query, frag
+ new_url = urllib.parse.urlunparse(parts)
+ request = urllib.request.Request(new_url)
+ request.add_header("Authorization", auth)
+ else:
+ request = urllib.request.Request(url)
+
+ request.add_header('User-Agent', user_agent)
+ fp = opener(request)
+
+ if auth:
+ # Put authentication info back into request URL if same host,
+ # so that links found on the page will work
+ s2, h2, path2, param2, query2, frag2 = urllib.parse.urlparse(fp.url)
+ if s2 == scheme and h2 == address:
+ parts = s2, netloc, path2, param2, query2, frag2
+ fp.url = urllib.parse.urlunparse(parts)
+
+ return fp
+
+
+# copy of urllib.parse._splituser from Python 3.8
+def _splituser(host):
+ """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
+ user, delim, host = host.rpartition('@')
+ return (user if delim else None), host
+
+
+# adding a timeout to avoid freezing package_index
+open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth)
+
+
+def fix_sf_url(url):
+ return url # backward compatibility
+
+
+def local_open(url):
+ """Read a local path, with special support for directories"""
+ scheme, server, path, param, query, frag = urllib.parse.urlparse(url)
+ filename = urllib.request.url2pathname(path)
+ if os.path.isfile(filename):
+ return urllib.request.urlopen(url)
+ elif path.endswith('/') and os.path.isdir(filename):
+ files = []
+ for f in os.listdir(filename):
+ filepath = os.path.join(filename, f)
+ if f == 'index.html':
+ with open(filepath, 'r') as fp:
+ body = fp.read()
+ break
+ elif os.path.isdir(filepath):
+ f += '/'
+ files.append('<a href="{name}">{name}</a>'.format(name=f))
+ else:
+ tmpl = (
+ "<html><head><title>{url}</title>"
+ "</head><body>{files}</body></html>")
+ body = tmpl.format(url=url, files='\n'.join(files))
+ status, message = 200, "OK"
+ else:
+ status, message, body = 404, "Path not found", "Not found"
+
+ headers = {'content-type': 'text/html'}
+ body_stream = six.StringIO(body)
+ return urllib.error.HTTPError(url, status, message, headers, body_stream)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/py27compat.py b/monEnvTP/lib/python3.8/site-packages/setuptools/py27compat.py
new file mode 100644
index 0000000000000000000000000000000000000000..1d57360f4eff13cd94a25fec989036a0b0b80523
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/py27compat.py
@@ -0,0 +1,60 @@
+"""
+Compatibility Support for Python 2.7 and earlier
+"""
+
+import sys
+import platform
+
+from setuptools.extern import six
+
+
+def get_all_headers(message, key):
+ """
+ Given an HTTPMessage, return all headers matching a given key.
+ """
+ return message.get_all(key)
+
+
+if six.PY2:
+ def get_all_headers(message, key):
+ return message.getheaders(key)
+
+
+linux_py2_ascii = (
+ platform.system() == 'Linux' and
+ six.PY2
+)
+
+rmtree_safe = str if linux_py2_ascii else lambda x: x
+"""Workaround for http://bugs.python.org/issue24672"""
+
+
+try:
+ from ._imp import find_module, PY_COMPILED, PY_FROZEN, PY_SOURCE
+ from ._imp import get_frozen_object, get_module
+except ImportError:
+ import imp
+ from imp import PY_COMPILED, PY_FROZEN, PY_SOURCE # noqa
+
+ def find_module(module, paths=None):
+ """Just like 'imp.find_module()', but with package support"""
+ parts = module.split('.')
+ while parts:
+ part = parts.pop(0)
+ f, path, (suffix, mode, kind) = info = imp.find_module(part, paths)
+
+ if kind == imp.PKG_DIRECTORY:
+ parts = parts or ['__init__']
+ paths = [path]
+
+ elif parts:
+ raise ImportError("Can't find %r in %s" % (parts, module))
+
+ return info
+
+ def get_frozen_object(module, paths):
+ return imp.get_frozen_object(module)
+
+ def get_module(module, paths, info):
+ imp.load_module(module, *info)
+ return sys.modules[module]
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/py31compat.py b/monEnvTP/lib/python3.8/site-packages/setuptools/py31compat.py
new file mode 100644
index 0000000000000000000000000000000000000000..e1da7ee2a2c56e46e09665d98ba1bc5bfedd2c3e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/py31compat.py
@@ -0,0 +1,32 @@
+__all__ = []
+
+__metaclass__ = type
+
+
+try:
+ # Python >=3.2
+ from tempfile import TemporaryDirectory
+except ImportError:
+ import shutil
+ import tempfile
+
+ class TemporaryDirectory:
+ """
+ Very simple temporary directory context manager.
+ Will try to delete afterward, but will also ignore OS and similar
+ errors on deletion.
+ """
+
+ def __init__(self, **kwargs):
+ self.name = None # Handle mkdtemp raising an exception
+ self.name = tempfile.mkdtemp(**kwargs)
+
+ def __enter__(self):
+ return self.name
+
+ def __exit__(self, exctype, excvalue, exctrace):
+ try:
+ shutil.rmtree(self.name, True)
+ except OSError: # removal errors are not the only possible
+ pass
+ self.name = None
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/py33compat.py b/monEnvTP/lib/python3.8/site-packages/setuptools/py33compat.py
new file mode 100644
index 0000000000000000000000000000000000000000..cb69443638354b46b43da5bbf187b4f7cba301f1
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/py33compat.py
@@ -0,0 +1,59 @@
+import dis
+import array
+import collections
+
+try:
+ import html
+except ImportError:
+ html = None
+
+from setuptools.extern import six
+from setuptools.extern.six.moves import html_parser
+
+__metaclass__ = type
+
+OpArg = collections.namedtuple('OpArg', 'opcode arg')
+
+
+class Bytecode_compat:
+ def __init__(self, code):
+ self.code = code
+
+ def __iter__(self):
+ """Yield '(op,arg)' pair for each operation in code object 'code'"""
+
+ bytes = array.array('b', self.code.co_code)
+ eof = len(self.code.co_code)
+
+ ptr = 0
+ extended_arg = 0
+
+ while ptr < eof:
+
+ op = bytes[ptr]
+
+ if op >= dis.HAVE_ARGUMENT:
+
+ arg = bytes[ptr + 1] + bytes[ptr + 2] * 256 + extended_arg
+ ptr += 3
+
+ if op == dis.EXTENDED_ARG:
+ long_type = six.integer_types[-1]
+ extended_arg = arg * long_type(65536)
+ continue
+
+ else:
+ arg = None
+ ptr += 1
+
+ yield OpArg(op, arg)
+
+
+Bytecode = getattr(dis, 'Bytecode', Bytecode_compat)
+
+
+unescape = getattr(html, 'unescape', None)
+if unescape is None:
+ # HTMLParser.unescape is deprecated since Python 3.4, and will be removed
+ # from 3.9.
+ unescape = html_parser.HTMLParser().unescape
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/py34compat.py b/monEnvTP/lib/python3.8/site-packages/setuptools/py34compat.py
new file mode 100644
index 0000000000000000000000000000000000000000..3ad917222a4e5bb93fe1c9e8fe1713bcab3630b6
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/py34compat.py
@@ -0,0 +1,13 @@
+import importlib
+
+try:
+ import importlib.util
+except ImportError:
+ pass
+
+
+try:
+ module_from_spec = importlib.util.module_from_spec
+except AttributeError:
+ def module_from_spec(spec):
+ return spec.loader.load_module(spec.name)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/sandbox.py b/monEnvTP/lib/python3.8/site-packages/setuptools/sandbox.py
new file mode 100644
index 0000000000000000000000000000000000000000..685f3f72e3611a5fa99c999e233ffd179c431a6d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/sandbox.py
@@ -0,0 +1,491 @@
+import os
+import sys
+import tempfile
+import operator
+import functools
+import itertools
+import re
+import contextlib
+import pickle
+import textwrap
+
+from setuptools.extern import six
+from setuptools.extern.six.moves import builtins, map
+
+import pkg_resources.py31compat
+
+if sys.platform.startswith('java'):
+ import org.python.modules.posix.PosixModule as _os
+else:
+ _os = sys.modules[os.name]
+try:
+ _file = file
+except NameError:
+ _file = None
+_open = open
+from distutils.errors import DistutilsError
+from pkg_resources import working_set
+
+
+__all__ = [
+ "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",
+]
+
+
+def _execfile(filename, globals, locals=None):
+ """
+ Python 3 implementation of execfile.
+ """
+ mode = 'rb'
+ with open(filename, mode) as stream:
+ script = stream.read()
+ if locals is None:
+ locals = globals
+ code = compile(script, filename, 'exec')
+ exec(code, globals, locals)
+
+
+@contextlib.contextmanager
+def save_argv(repl=None):
+ saved = sys.argv[:]
+ if repl is not None:
+ sys.argv[:] = repl
+ try:
+ yield saved
+ finally:
+ sys.argv[:] = saved
+
+
+@contextlib.contextmanager
+def save_path():
+ saved = sys.path[:]
+ try:
+ yield saved
+ finally:
+ sys.path[:] = saved
+
+
+@contextlib.contextmanager
+def override_temp(replacement):
+ """
+ Monkey-patch tempfile.tempdir with replacement, ensuring it exists
+ """
+ pkg_resources.py31compat.makedirs(replacement, exist_ok=True)
+
+ saved = tempfile.tempdir
+
+ tempfile.tempdir = replacement
+
+ try:
+ yield
+ finally:
+ tempfile.tempdir = saved
+
+
+@contextlib.contextmanager
+def pushd(target):
+ saved = os.getcwd()
+ os.chdir(target)
+ try:
+ yield saved
+ finally:
+ os.chdir(saved)
+
+
+class UnpickleableException(Exception):
+ """
+ An exception representing another Exception that could not be pickled.
+ """
+
+ @staticmethod
+ def dump(type, exc):
+ """
+ Always return a dumped (pickled) type and exc. If exc can't be pickled,
+ wrap it in UnpickleableException first.
+ """
+ try:
+ return pickle.dumps(type), pickle.dumps(exc)
+ except Exception:
+ # get UnpickleableException inside the sandbox
+ from setuptools.sandbox import UnpickleableException as cls
+ return cls.dump(cls, cls(repr(exc)))
+
+
+class ExceptionSaver:
+ """
+ A Context Manager that will save an exception, serialized, and restore it
+ later.
+ """
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, exc, tb):
+ if not exc:
+ return
+
+ # dump the exception
+ self._saved = UnpickleableException.dump(type, exc)
+ self._tb = tb
+
+ # suppress the exception
+ return True
+
+ def resume(self):
+ "restore and re-raise any exception"
+
+ if '_saved' not in vars(self):
+ return
+
+ type, exc = map(pickle.loads, self._saved)
+ six.reraise(type, exc, self._tb)
+
+
+@contextlib.contextmanager
+def save_modules():
+ """
+ Context in which imported modules are saved.
+
+ Translates exceptions internal to the context into the equivalent exception
+ outside the context.
+ """
+ saved = sys.modules.copy()
+ with ExceptionSaver() as saved_exc:
+ yield saved
+
+ sys.modules.update(saved)
+ # remove any modules imported since
+ del_modules = (
+ mod_name for mod_name in sys.modules
+ if mod_name not in saved
+ # exclude any encodings modules. See #285
+ and not mod_name.startswith('encodings.')
+ )
+ _clear_modules(del_modules)
+
+ saved_exc.resume()
+
+
+def _clear_modules(module_names):
+ for mod_name in list(module_names):
+ del sys.modules[mod_name]
+
+
+@contextlib.contextmanager
+def save_pkg_resources_state():
+ saved = pkg_resources.__getstate__()
+ try:
+ yield saved
+ finally:
+ pkg_resources.__setstate__(saved)
+
+
+@contextlib.contextmanager
+def setup_context(setup_dir):
+ temp_dir = os.path.join(setup_dir, 'temp')
+ with save_pkg_resources_state():
+ with save_modules():
+ hide_setuptools()
+ with save_path():
+ with save_argv():
+ with override_temp(temp_dir):
+ with pushd(setup_dir):
+ # ensure setuptools commands are available
+ __import__('setuptools')
+ yield
+
+
+def _needs_hiding(mod_name):
+ """
+ >>> _needs_hiding('setuptools')
+ True
+ >>> _needs_hiding('pkg_resources')
+ True
+ >>> _needs_hiding('setuptools_plugin')
+ False
+ >>> _needs_hiding('setuptools.__init__')
+ True
+ >>> _needs_hiding('distutils')
+ True
+ >>> _needs_hiding('os')
+ False
+ >>> _needs_hiding('Cython')
+ True
+ """
+ pattern = re.compile(r'(setuptools|pkg_resources|distutils|Cython)(\.|$)')
+ return bool(pattern.match(mod_name))
+
+
+def hide_setuptools():
+ """
+ Remove references to setuptools' modules from sys.modules to allow the
+ invocation to import the most appropriate setuptools. This technique is
+ necessary to avoid issues such as #315 where setuptools upgrading itself
+ would fail to find a function declared in the metadata.
+ """
+ modules = filter(_needs_hiding, sys.modules)
+ _clear_modules(modules)
+
+
+def run_setup(setup_script, args):
+ """Run a distutils setup script, sandboxed in its directory"""
+ setup_dir = os.path.abspath(os.path.dirname(setup_script))
+ with setup_context(setup_dir):
+ try:
+ sys.argv[:] = [setup_script] + list(args)
+ sys.path.insert(0, setup_dir)
+ # reset to include setup dir, w/clean callback list
+ working_set.__init__()
+ working_set.callbacks.append(lambda dist: dist.activate())
+
+ # __file__ should be a byte string on Python 2 (#712)
+ dunder_file = (
+ setup_script
+ if isinstance(setup_script, str) else
+ setup_script.encode(sys.getfilesystemencoding())
+ )
+
+ with DirectorySandbox(setup_dir):
+ ns = dict(__file__=dunder_file, __name__='__main__')
+ _execfile(setup_script, ns)
+ except SystemExit as v:
+ if v.args and v.args[0]:
+ raise
+ # Normal exit, just return
+
+
+class AbstractSandbox:
+ """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts"""
+
+ _active = False
+
+ def __init__(self):
+ self._attrs = [
+ name for name in dir(_os)
+ if not name.startswith('_') and hasattr(self, name)
+ ]
+
+ def _copy(self, source):
+ for name in self._attrs:
+ setattr(os, name, getattr(source, name))
+
+ def __enter__(self):
+ self._copy(self)
+ if _file:
+ builtins.file = self._file
+ builtins.open = self._open
+ self._active = True
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self._active = False
+ if _file:
+ builtins.file = _file
+ builtins.open = _open
+ self._copy(_os)
+
+ def run(self, func):
+ """Run 'func' under os sandboxing"""
+ with self:
+ return func()
+
+ def _mk_dual_path_wrapper(name):
+ original = getattr(_os, name)
+
+ def wrap(self, src, dst, *args, **kw):
+ if self._active:
+ src, dst = self._remap_pair(name, src, dst, *args, **kw)
+ return original(src, dst, *args, **kw)
+
+ return wrap
+
+ for name in ["rename", "link", "symlink"]:
+ if hasattr(_os, name):
+ locals()[name] = _mk_dual_path_wrapper(name)
+
+ def _mk_single_path_wrapper(name, original=None):
+ original = original or getattr(_os, name)
+
+ def wrap(self, path, *args, **kw):
+ if self._active:
+ path = self._remap_input(name, path, *args, **kw)
+ return original(path, *args, **kw)
+
+ return wrap
+
+ if _file:
+ _file = _mk_single_path_wrapper('file', _file)
+ _open = _mk_single_path_wrapper('open', _open)
+ for name in [
+ "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir",
+ "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat",
+ "startfile", "mkfifo", "mknod", "pathconf", "access"
+ ]:
+ if hasattr(_os, name):
+ locals()[name] = _mk_single_path_wrapper(name)
+
+ def _mk_single_with_return(name):
+ original = getattr(_os, name)
+
+ def wrap(self, path, *args, **kw):
+ if self._active:
+ path = self._remap_input(name, path, *args, **kw)
+ return self._remap_output(name, original(path, *args, **kw))
+ return original(path, *args, **kw)
+
+ return wrap
+
+ for name in ['readlink', 'tempnam']:
+ if hasattr(_os, name):
+ locals()[name] = _mk_single_with_return(name)
+
+ def _mk_query(name):
+ original = getattr(_os, name)
+
+ def wrap(self, *args, **kw):
+ retval = original(*args, **kw)
+ if self._active:
+ return self._remap_output(name, retval)
+ return retval
+
+ return wrap
+
+ for name in ['getcwd', 'tmpnam']:
+ if hasattr(_os, name):
+ locals()[name] = _mk_query(name)
+
+ def _validate_path(self, path):
+ """Called to remap or validate any path, whether input or output"""
+ return path
+
+ def _remap_input(self, operation, path, *args, **kw):
+ """Called for path inputs"""
+ return self._validate_path(path)
+
+ def _remap_output(self, operation, path):
+ """Called for path outputs"""
+ return self._validate_path(path)
+
+ def _remap_pair(self, operation, src, dst, *args, **kw):
+ """Called for path pairs like rename, link, and symlink operations"""
+ return (
+ self._remap_input(operation + '-from', src, *args, **kw),
+ self._remap_input(operation + '-to', dst, *args, **kw)
+ )
+
+
+if hasattr(os, 'devnull'):
+ _EXCEPTIONS = [os.devnull,]
+else:
+ _EXCEPTIONS = []
+
+
+class DirectorySandbox(AbstractSandbox):
+ """Restrict operations to a single subdirectory - pseudo-chroot"""
+
+ write_ops = dict.fromkeys([
+ "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir",
+ "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam",
+ ])
+
+ _exception_patterns = [
+ # Allow lib2to3 to attempt to save a pickled grammar object (#121)
+ r'.*lib2to3.*\.pickle$',
+ ]
+ "exempt writing to paths that match the pattern"
+
+ def __init__(self, sandbox, exceptions=_EXCEPTIONS):
+ self._sandbox = os.path.normcase(os.path.realpath(sandbox))
+ self._prefix = os.path.join(self._sandbox, '')
+ self._exceptions = [
+ os.path.normcase(os.path.realpath(path))
+ for path in exceptions
+ ]
+ AbstractSandbox.__init__(self)
+
+ def _violation(self, operation, *args, **kw):
+ from setuptools.sandbox import SandboxViolation
+ raise SandboxViolation(operation, args, kw)
+
+ if _file:
+
+ def _file(self, path, mode='r', *args, **kw):
+ if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
+ self._violation("file", path, mode, *args, **kw)
+ return _file(path, mode, *args, **kw)
+
+ def _open(self, path, mode='r', *args, **kw):
+ if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path):
+ self._violation("open", path, mode, *args, **kw)
+ return _open(path, mode, *args, **kw)
+
+ def tmpnam(self):
+ self._violation("tmpnam")
+
+ def _ok(self, path):
+ active = self._active
+ try:
+ self._active = False
+ realpath = os.path.normcase(os.path.realpath(path))
+ return (
+ self._exempted(realpath)
+ or realpath == self._sandbox
+ or realpath.startswith(self._prefix)
+ )
+ finally:
+ self._active = active
+
+ def _exempted(self, filepath):
+ start_matches = (
+ filepath.startswith(exception)
+ for exception in self._exceptions
+ )
+ pattern_matches = (
+ re.match(pattern, filepath)
+ for pattern in self._exception_patterns
+ )
+ candidates = itertools.chain(start_matches, pattern_matches)
+ return any(candidates)
+
+ def _remap_input(self, operation, path, *args, **kw):
+ """Called for path inputs"""
+ if operation in self.write_ops and not self._ok(path):
+ self._violation(operation, os.path.realpath(path), *args, **kw)
+ return path
+
+ def _remap_pair(self, operation, src, dst, *args, **kw):
+ """Called for path pairs like rename, link, and symlink operations"""
+ if not self._ok(src) or not self._ok(dst):
+ self._violation(operation, src, dst, *args, **kw)
+ return (src, dst)
+
+ def open(self, file, flags, mode=0o777, *args, **kw):
+ """Called for low-level os.open()"""
+ if flags & WRITE_FLAGS and not self._ok(file):
+ self._violation("os.open", file, flags, mode, *args, **kw)
+ return _os.open(file, flags, mode, *args, **kw)
+
+
+WRITE_FLAGS = functools.reduce(
+ operator.or_, [getattr(_os, a, 0) for a in
+ "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()]
+)
+
+
+class SandboxViolation(DistutilsError):
+ """A setup script attempted to modify the filesystem outside the sandbox"""
+
+ tmpl = textwrap.dedent("""
+ SandboxViolation: {cmd}{args!r} {kwargs}
+
+ The package setup script has attempted to modify files on your system
+ that are not within the EasyInstall build area, and has been aborted.
+
+ This package cannot be safely installed by EasyInstall, and may not
+ support alternate installation locations even if you run its setup
+ script by hand. Please inform the package's author and the EasyInstall
+ maintainers to find out if a fix or workaround is available.
+ """).lstrip()
+
+ def __str__(self):
+ cmd, args, kwargs = self.args
+ return self.tmpl.format(**locals())
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/script (dev).tmpl b/monEnvTP/lib/python3.8/site-packages/setuptools/script (dev).tmpl
new file mode 100644
index 0000000000000000000000000000000000000000..39a24b04888e79df51e2237577b303a2f901be63
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/script (dev).tmpl
@@ -0,0 +1,6 @@
+# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r
+__requires__ = %(spec)r
+__import__('pkg_resources').require(%(spec)r)
+__file__ = %(dev_path)r
+with open(__file__) as f:
+ exec(compile(f.read(), __file__, 'exec'))
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/script.tmpl b/monEnvTP/lib/python3.8/site-packages/setuptools/script.tmpl
new file mode 100644
index 0000000000000000000000000000000000000000..ff5efbcab3b58063dd84787181c26a95fb663d94
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/script.tmpl
@@ -0,0 +1,3 @@
+# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r
+__requires__ = %(spec)r
+__import__('pkg_resources').run_script(%(spec)r, %(script_name)r)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/site-patch.py b/monEnvTP/lib/python3.8/site-packages/setuptools/site-patch.py
new file mode 100644
index 0000000000000000000000000000000000000000..40b00de0a799686485b266fd92abb9fb100ed718
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/site-patch.py
@@ -0,0 +1,74 @@
+def __boot():
+ import sys
+ import os
+ PYTHONPATH = os.environ.get('PYTHONPATH')
+ if PYTHONPATH is None or (sys.platform == 'win32' and not PYTHONPATH):
+ PYTHONPATH = []
+ else:
+ PYTHONPATH = PYTHONPATH.split(os.pathsep)
+
+ pic = getattr(sys, 'path_importer_cache', {})
+ stdpath = sys.path[len(PYTHONPATH):]
+ mydir = os.path.dirname(__file__)
+
+ for item in stdpath:
+ if item == mydir or not item:
+ continue # skip if current dir. on Windows, or my own directory
+ importer = pic.get(item)
+ if importer is not None:
+ loader = importer.find_module('site')
+ if loader is not None:
+ # This should actually reload the current module
+ loader.load_module('site')
+ break
+ else:
+ try:
+ import imp # Avoid import loop in Python 3
+ stream, path, descr = imp.find_module('site', [item])
+ except ImportError:
+ continue
+ if stream is None:
+ continue
+ try:
+ # This should actually reload the current module
+ imp.load_module('site', stream, path, descr)
+ finally:
+ stream.close()
+ break
+ else:
+ raise ImportError("Couldn't find the real 'site' module")
+
+ known_paths = dict([(makepath(item)[1], 1) for item in sys.path]) # 2.2 comp
+
+ oldpos = getattr(sys, '__egginsert', 0) # save old insertion position
+ sys.__egginsert = 0 # and reset the current one
+
+ for item in PYTHONPATH:
+ addsitedir(item)
+
+ sys.__egginsert += oldpos # restore effective old position
+
+ d, nd = makepath(stdpath[0])
+ insert_at = None
+ new_path = []
+
+ for item in sys.path:
+ p, np = makepath(item)
+
+ if np == nd and insert_at is None:
+ # We've hit the first 'system' path entry, so added entries go here
+ insert_at = len(new_path)
+
+ if np in known_paths or insert_at is None:
+ new_path.append(item)
+ else:
+ # new path after the insert point, back-insert it
+ new_path.insert(insert_at, item)
+ insert_at += 1
+
+ sys.path[:] = new_path
+
+
+if __name__ == 'site':
+ __boot()
+ del __boot
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/ssl_support.py b/monEnvTP/lib/python3.8/site-packages/setuptools/ssl_support.py
new file mode 100644
index 0000000000000000000000000000000000000000..226db694bb38791147c6bf2881c4b86025dd2f8f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/ssl_support.py
@@ -0,0 +1,260 @@
+import os
+import socket
+import atexit
+import re
+import functools
+
+from setuptools.extern.six.moves import urllib, http_client, map, filter
+
+from pkg_resources import ResolutionError, ExtractionError
+
+try:
+ import ssl
+except ImportError:
+ ssl = None
+
+__all__ = [
+ 'VerifyingHTTPSHandler', 'find_ca_bundle', 'is_available', 'cert_paths',
+ 'opener_for'
+]
+
+cert_paths = """
+/etc/pki/tls/certs/ca-bundle.crt
+/etc/ssl/certs/ca-certificates.crt
+/usr/share/ssl/certs/ca-bundle.crt
+/usr/local/share/certs/ca-root.crt
+/etc/ssl/cert.pem
+/System/Library/OpenSSL/certs/cert.pem
+/usr/local/share/certs/ca-root-nss.crt
+/etc/ssl/ca-bundle.pem
+""".strip().split()
+
+try:
+ HTTPSHandler = urllib.request.HTTPSHandler
+ HTTPSConnection = http_client.HTTPSConnection
+except AttributeError:
+ HTTPSHandler = HTTPSConnection = object
+
+is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection)
+
+
+try:
+ from ssl import CertificateError, match_hostname
+except ImportError:
+ try:
+ from backports.ssl_match_hostname import CertificateError
+ from backports.ssl_match_hostname import match_hostname
+ except ImportError:
+ CertificateError = None
+ match_hostname = None
+
+if not CertificateError:
+
+ class CertificateError(ValueError):
+ pass
+
+
+if not match_hostname:
+
+ def _dnsname_match(dn, hostname, max_wildcards=1):
+ """Matching according to RFC 6125, section 6.4.3
+
+ https://tools.ietf.org/html/rfc6125#section-6.4.3
+ """
+ pats = []
+ if not dn:
+ return False
+
+ # Ported from python3-syntax:
+ # leftmost, *remainder = dn.split(r'.')
+ parts = dn.split(r'.')
+ leftmost = parts[0]
+ remainder = parts[1:]
+
+ wildcards = leftmost.count('*')
+ if wildcards > max_wildcards:
+ # Issue #17980: avoid denials of service by refusing more
+ # than one wildcard per fragment. A survey of established
+ # policy among SSL implementations showed it to be a
+ # reasonable choice.
+ raise CertificateError(
+ "too many wildcards in certificate DNS name: " + repr(dn))
+
+ # speed up common case w/o wildcards
+ if not wildcards:
+ return dn.lower() == hostname.lower()
+
+ # RFC 6125, section 6.4.3, subitem 1.
+ # The client SHOULD NOT attempt to match a presented identifier in which
+ # the wildcard character comprises a label other than the left-most label.
+ if leftmost == '*':
+ # When '*' is a fragment by itself, it matches a non-empty dotless
+ # fragment.
+ pats.append('[^.]+')
+ elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
+ # RFC 6125, section 6.4.3, subitem 3.
+ # The client SHOULD NOT attempt to match a presented identifier
+ # where the wildcard character is embedded within an A-label or
+ # U-label of an internationalized domain name.
+ pats.append(re.escape(leftmost))
+ else:
+ # Otherwise, '*' matches any dotless string, e.g. www*
+ pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
+
+ # add the remaining fragments, ignore any wildcards
+ for frag in remainder:
+ pats.append(re.escape(frag))
+
+ pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
+ return pat.match(hostname)
+
+ def match_hostname(cert, hostname):
+ """Verify that *cert* (in decoded format as returned by
+ SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
+ rules are followed, but IP addresses are not accepted for *hostname*.
+
+ CertificateError is raised on failure. On success, the function
+ returns nothing.
+ """
+ if not cert:
+ raise ValueError("empty or no certificate")
+ dnsnames = []
+ san = cert.get('subjectAltName', ())
+ for key, value in san:
+ if key == 'DNS':
+ if _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value)
+ if not dnsnames:
+ # The subject is only checked when there is no dNSName entry
+ # in subjectAltName
+ for sub in cert.get('subject', ()):
+ for key, value in sub:
+ # XXX according to RFC 2818, the most specific Common Name
+ # must be used.
+ if key == 'commonName':
+ if _dnsname_match(value, hostname):
+ return
+ dnsnames.append(value)
+ if len(dnsnames) > 1:
+ raise CertificateError("hostname %r "
+ "doesn't match either of %s"
+ % (hostname, ', '.join(map(repr, dnsnames))))
+ elif len(dnsnames) == 1:
+ raise CertificateError("hostname %r "
+ "doesn't match %r"
+ % (hostname, dnsnames[0]))
+ else:
+ raise CertificateError("no appropriate commonName or "
+ "subjectAltName fields were found")
+
+
+class VerifyingHTTPSHandler(HTTPSHandler):
+ """Simple verifying handler: no auth, subclasses, timeouts, etc."""
+
+ def __init__(self, ca_bundle):
+ self.ca_bundle = ca_bundle
+ HTTPSHandler.__init__(self)
+
+ def https_open(self, req):
+ return self.do_open(
+ lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req
+ )
+
+
+class VerifyingHTTPSConn(HTTPSConnection):
+ """Simple verifying connection: no auth, subclasses, timeouts, etc."""
+
+ def __init__(self, host, ca_bundle, **kw):
+ HTTPSConnection.__init__(self, host, **kw)
+ self.ca_bundle = ca_bundle
+
+ def connect(self):
+ sock = socket.create_connection(
+ (self.host, self.port), getattr(self, 'source_address', None)
+ )
+
+ # Handle the socket if a (proxy) tunnel is present
+ if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None):
+ self.sock = sock
+ self._tunnel()
+ # http://bugs.python.org/issue7776: Python>=3.4.1 and >=2.7.7
+ # change self.host to mean the proxy server host when tunneling is
+ # being used. Adapt, since we are interested in the destination
+ # host for the match_hostname() comparison.
+ actual_host = self._tunnel_host
+ else:
+ actual_host = self.host
+
+ if hasattr(ssl, 'create_default_context'):
+ ctx = ssl.create_default_context(cafile=self.ca_bundle)
+ self.sock = ctx.wrap_socket(sock, server_hostname=actual_host)
+ else:
+ # This is for python < 2.7.9 and < 3.4?
+ self.sock = ssl.wrap_socket(
+ sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle
+ )
+ try:
+ match_hostname(self.sock.getpeercert(), actual_host)
+ except CertificateError:
+ self.sock.shutdown(socket.SHUT_RDWR)
+ self.sock.close()
+ raise
+
+
+def opener_for(ca_bundle=None):
+ """Get a urlopen() replacement that uses ca_bundle for verification"""
+ return urllib.request.build_opener(
+ VerifyingHTTPSHandler(ca_bundle or find_ca_bundle())
+ ).open
+
+
+# from jaraco.functools
+def once(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ if not hasattr(func, 'always_returns'):
+ func.always_returns = func(*args, **kwargs)
+ return func.always_returns
+ return wrapper
+
+
+@once
+def get_win_certfile():
+ try:
+ import wincertstore
+ except ImportError:
+ return None
+
+ class CertFile(wincertstore.CertFile):
+ def __init__(self):
+ super(CertFile, self).__init__()
+ atexit.register(self.close)
+
+ def close(self):
+ try:
+ super(CertFile, self).close()
+ except OSError:
+ pass
+
+ _wincerts = CertFile()
+ _wincerts.addstore('CA')
+ _wincerts.addstore('ROOT')
+ return _wincerts.name
+
+
+def find_ca_bundle():
+ """Return an existing CA bundle path, or None"""
+ extant_cert_paths = filter(os.path.isfile, cert_paths)
+ return (
+ get_win_certfile()
+ or next(extant_cert_paths, None)
+ or _certifi_where()
+ )
+
+
+def _certifi_where():
+ try:
+ return __import__('certifi').where()
+ except (ImportError, ResolutionError, ExtractionError):
+ pass
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/unicode_utils.py b/monEnvTP/lib/python3.8/site-packages/setuptools/unicode_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..7c63efd20b350358ab25c079166dbb00ef49f8d2
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/unicode_utils.py
@@ -0,0 +1,44 @@
+import unicodedata
+import sys
+
+from setuptools.extern import six
+
+
+# HFS Plus uses decomposed UTF-8
+def decompose(path):
+ if isinstance(path, six.text_type):
+ return unicodedata.normalize('NFD', path)
+ try:
+ path = path.decode('utf-8')
+ path = unicodedata.normalize('NFD', path)
+ path = path.encode('utf-8')
+ except UnicodeError:
+ pass # Not UTF-8
+ return path
+
+
+def filesys_decode(path):
+ """
+ Ensure that the given path is decoded,
+ NONE when no expected encoding works
+ """
+
+ if isinstance(path, six.text_type):
+ return path
+
+ fs_enc = sys.getfilesystemencoding() or 'utf-8'
+ candidates = fs_enc, 'utf-8'
+
+ for enc in candidates:
+ try:
+ return path.decode(enc)
+ except UnicodeDecodeError:
+ continue
+
+
+def try_encode(string, enc):
+ "turn unicode encoding into a functional routine"
+ try:
+ return string.encode(enc)
+ except UnicodeEncodeError:
+ return None
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/version.py b/monEnvTP/lib/python3.8/site-packages/setuptools/version.py
new file mode 100644
index 0000000000000000000000000000000000000000..95e1869658566aac3060562d8cd5a6b647887d1e
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/version.py
@@ -0,0 +1,6 @@
+import pkg_resources
+
+try:
+ __version__ = pkg_resources.get_distribution('setuptools').version
+except Exception:
+ __version__ = 'unknown'
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/wheel.py b/monEnvTP/lib/python3.8/site-packages/setuptools/wheel.py
new file mode 100644
index 0000000000000000000000000000000000000000..025aaa828a24cb7746e5fac9b66984d5b9794bc3
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/wheel.py
@@ -0,0 +1,220 @@
+"""Wheels support."""
+
+from distutils.util import get_platform
+from distutils import log
+import email
+import itertools
+import os
+import posixpath
+import re
+import zipfile
+
+import pkg_resources
+import setuptools
+from pkg_resources import parse_version
+from setuptools.extern.packaging.tags import sys_tags
+from setuptools.extern.packaging.utils import canonicalize_name
+from setuptools.extern.six import PY3
+from setuptools.command.egg_info import write_requirements
+
+
+__metaclass__ = type
+
+
+WHEEL_NAME = re.compile(
+ r"""^(?P<project_name>.+?)-(?P<version>\d.*?)
+ ((-(?P<build>\d.*?))?-(?P<py_version>.+?)-(?P<abi>.+?)-(?P<platform>.+?)
+ )\.whl$""",
+ re.VERBOSE).match
+
+NAMESPACE_PACKAGE_INIT = '''\
+try:
+ __import__('pkg_resources').declare_namespace(__name__)
+except ImportError:
+ __path__ = __import__('pkgutil').extend_path(__path__, __name__)
+'''
+
+
+def unpack(src_dir, dst_dir):
+ '''Move everything under `src_dir` to `dst_dir`, and delete the former.'''
+ for dirpath, dirnames, filenames in os.walk(src_dir):
+ subdir = os.path.relpath(dirpath, src_dir)
+ for f in filenames:
+ src = os.path.join(dirpath, f)
+ dst = os.path.join(dst_dir, subdir, f)
+ os.renames(src, dst)
+ for n, d in reversed(list(enumerate(dirnames))):
+ src = os.path.join(dirpath, d)
+ dst = os.path.join(dst_dir, subdir, d)
+ if not os.path.exists(dst):
+ # Directory does not exist in destination,
+ # rename it and prune it from os.walk list.
+ os.renames(src, dst)
+ del dirnames[n]
+ # Cleanup.
+ for dirpath, dirnames, filenames in os.walk(src_dir, topdown=True):
+ assert not filenames
+ os.rmdir(dirpath)
+
+
+class Wheel:
+
+ def __init__(self, filename):
+ match = WHEEL_NAME(os.path.basename(filename))
+ if match is None:
+ raise ValueError('invalid wheel name: %r' % filename)
+ self.filename = filename
+ for k, v in match.groupdict().items():
+ setattr(self, k, v)
+
+ def tags(self):
+ '''List tags (py_version, abi, platform) supported by this wheel.'''
+ return itertools.product(
+ self.py_version.split('.'),
+ self.abi.split('.'),
+ self.platform.split('.'),
+ )
+
+ def is_compatible(self):
+ '''Is the wheel is compatible with the current platform?'''
+ supported_tags = set((t.interpreter, t.abi, t.platform) for t in sys_tags())
+ return next((True for t in self.tags() if t in supported_tags), False)
+
+ def egg_name(self):
+ return pkg_resources.Distribution(
+ project_name=self.project_name, version=self.version,
+ platform=(None if self.platform == 'any' else get_platform()),
+ ).egg_name() + '.egg'
+
+ def get_dist_info(self, zf):
+ # find the correct name of the .dist-info dir in the wheel file
+ for member in zf.namelist():
+ dirname = posixpath.dirname(member)
+ if (dirname.endswith('.dist-info') and
+ canonicalize_name(dirname).startswith(
+ canonicalize_name(self.project_name))):
+ return dirname
+ raise ValueError("unsupported wheel format. .dist-info not found")
+
+ def install_as_egg(self, destination_eggdir):
+ '''Install wheel as an egg directory.'''
+ with zipfile.ZipFile(self.filename) as zf:
+ self._install_as_egg(destination_eggdir, zf)
+
+ def _install_as_egg(self, destination_eggdir, zf):
+ dist_basename = '%s-%s' % (self.project_name, self.version)
+ dist_info = self.get_dist_info(zf)
+ dist_data = '%s.data' % dist_basename
+ egg_info = os.path.join(destination_eggdir, 'EGG-INFO')
+
+ self._convert_metadata(zf, destination_eggdir, dist_info, egg_info)
+ self._move_data_entries(destination_eggdir, dist_data)
+ self._fix_namespace_packages(egg_info, destination_eggdir)
+
+ @staticmethod
+ def _convert_metadata(zf, destination_eggdir, dist_info, egg_info):
+ def get_metadata(name):
+ with zf.open(posixpath.join(dist_info, name)) as fp:
+ value = fp.read().decode('utf-8') if PY3 else fp.read()
+ return email.parser.Parser().parsestr(value)
+
+ wheel_metadata = get_metadata('WHEEL')
+ # Check wheel format version is supported.
+ wheel_version = parse_version(wheel_metadata.get('Wheel-Version'))
+ wheel_v1 = (
+ parse_version('1.0') <= wheel_version < parse_version('2.0dev0')
+ )
+ if not wheel_v1:
+ raise ValueError(
+ 'unsupported wheel format version: %s' % wheel_version)
+ # Extract to target directory.
+ os.mkdir(destination_eggdir)
+ zf.extractall(destination_eggdir)
+ # Convert metadata.
+ dist_info = os.path.join(destination_eggdir, dist_info)
+ dist = pkg_resources.Distribution.from_location(
+ destination_eggdir, dist_info,
+ metadata=pkg_resources.PathMetadata(destination_eggdir, dist_info),
+ )
+
+ # Note: Evaluate and strip markers now,
+ # as it's difficult to convert back from the syntax:
+ # foobar; "linux" in sys_platform and extra == 'test'
+ def raw_req(req):
+ req.marker = None
+ return str(req)
+ install_requires = list(sorted(map(raw_req, dist.requires())))
+ extras_require = {
+ extra: sorted(
+ req
+ for req in map(raw_req, dist.requires((extra,)))
+ if req not in install_requires
+ )
+ for extra in dist.extras
+ }
+ os.rename(dist_info, egg_info)
+ os.rename(
+ os.path.join(egg_info, 'METADATA'),
+ os.path.join(egg_info, 'PKG-INFO'),
+ )
+ setup_dist = setuptools.Distribution(
+ attrs=dict(
+ install_requires=install_requires,
+ extras_require=extras_require,
+ ),
+ )
+ # Temporarily disable info traces.
+ log_threshold = log._global_log.threshold
+ log.set_threshold(log.WARN)
+ try:
+ write_requirements(
+ setup_dist.get_command_obj('egg_info'),
+ None,
+ os.path.join(egg_info, 'requires.txt'),
+ )
+ finally:
+ log.set_threshold(log_threshold)
+
+ @staticmethod
+ def _move_data_entries(destination_eggdir, dist_data):
+ """Move data entries to their correct location."""
+ dist_data = os.path.join(destination_eggdir, dist_data)
+ dist_data_scripts = os.path.join(dist_data, 'scripts')
+ if os.path.exists(dist_data_scripts):
+ egg_info_scripts = os.path.join(
+ destination_eggdir, 'EGG-INFO', 'scripts')
+ os.mkdir(egg_info_scripts)
+ for entry in os.listdir(dist_data_scripts):
+ # Remove bytecode, as it's not properly handled
+ # during easy_install scripts install phase.
+ if entry.endswith('.pyc'):
+ os.unlink(os.path.join(dist_data_scripts, entry))
+ else:
+ os.rename(
+ os.path.join(dist_data_scripts, entry),
+ os.path.join(egg_info_scripts, entry),
+ )
+ os.rmdir(dist_data_scripts)
+ for subdir in filter(os.path.exists, (
+ os.path.join(dist_data, d)
+ for d in ('data', 'headers', 'purelib', 'platlib')
+ )):
+ unpack(subdir, destination_eggdir)
+ if os.path.exists(dist_data):
+ os.rmdir(dist_data)
+
+ @staticmethod
+ def _fix_namespace_packages(egg_info, destination_eggdir):
+ namespace_packages = os.path.join(
+ egg_info, 'namespace_packages.txt')
+ if os.path.exists(namespace_packages):
+ with open(namespace_packages) as fp:
+ namespace_packages = fp.read().split()
+ for mod in namespace_packages:
+ mod_dir = os.path.join(destination_eggdir, *mod.split('.'))
+ mod_init = os.path.join(mod_dir, '__init__.py')
+ if not os.path.exists(mod_dir):
+ os.mkdir(mod_dir)
+ if not os.path.exists(mod_init):
+ with open(mod_init, 'w') as fp:
+ fp.write(NAMESPACE_PACKAGE_INIT)
diff --git a/monEnvTP/lib/python3.8/site-packages/setuptools/windows_support.py b/monEnvTP/lib/python3.8/site-packages/setuptools/windows_support.py
new file mode 100644
index 0000000000000000000000000000000000000000..cb977cff9545ef5d48ad7cf13f2cbe1ebc3e7cd0
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/setuptools/windows_support.py
@@ -0,0 +1,29 @@
+import platform
+import ctypes
+
+
+def windows_only(func):
+ if platform.system() != 'Windows':
+ return lambda *args, **kwargs: None
+ return func
+
+
+@windows_only
+def hide_file(path):
+ """
+ Set the hidden attribute on a file or directory.
+
+ From http://stackoverflow.com/questions/19622133/
+
+ `path` must be text.
+ """
+ __import__('ctypes.wintypes')
+ SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW
+ SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD
+ SetFileAttributes.restype = ctypes.wintypes.BOOL
+
+ FILE_ATTRIBUTE_HIDDEN = 0x02
+
+ ret = SetFileAttributes(path, FILE_ATTRIBUTE_HIDDEN)
+ if not ret:
+ raise ctypes.WinError()
diff --git a/monEnvTP/lib/python3.8/site-packages/six-1.16.0.dist-info/INSTALLER b/monEnvTP/lib/python3.8/site-packages/six-1.16.0.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/six-1.16.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/six-1.16.0.dist-info/LICENSE b/monEnvTP/lib/python3.8/site-packages/six-1.16.0.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..de6633112c1f9951fd688e1fb43457a1ec11d6d8
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/six-1.16.0.dist-info/LICENSE
@@ -0,0 +1,18 @@
+Copyright (c) 2010-2020 Benjamin Peterson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/monEnvTP/lib/python3.8/site-packages/six-1.16.0.dist-info/METADATA b/monEnvTP/lib/python3.8/site-packages/six-1.16.0.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..6d7525c2ebcfe25cb6787579bf5324da1fd6f28a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/six-1.16.0.dist-info/METADATA
@@ -0,0 +1,49 @@
+Metadata-Version: 2.1
+Name: six
+Version: 1.16.0
+Summary: Python 2 and 3 compatibility utilities
+Home-page: https://github.com/benjaminp/six
+Author: Benjamin Peterson
+Author-email: benjamin@python.org
+License: MIT
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Utilities
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*
+
+.. image:: https://img.shields.io/pypi/v/six.svg
+ :target: https://pypi.org/project/six/
+ :alt: six on PyPI
+
+.. image:: https://travis-ci.org/benjaminp/six.svg?branch=master
+ :target: https://travis-ci.org/benjaminp/six
+ :alt: six on TravisCI
+
+.. image:: https://readthedocs.org/projects/six/badge/?version=latest
+ :target: https://six.readthedocs.io/
+ :alt: six's documentation on Read the Docs
+
+.. image:: https://img.shields.io/badge/license-MIT-green.svg
+ :target: https://github.com/benjaminp/six/blob/master/LICENSE
+ :alt: MIT License badge
+
+Six is a Python 2 and 3 compatibility library. It provides utility functions
+for smoothing over the differences between the Python versions with the goal of
+writing Python code that is compatible on both Python versions. See the
+documentation for more information on what is provided.
+
+Six supports Python 2.7 and 3.3+. It is contained in only one Python
+file, so it can be easily copied into your project. (The copyright and license
+notice must be retained.)
+
+Online documentation is at https://six.readthedocs.io/.
+
+Bugs can be reported to https://github.com/benjaminp/six. The code can also
+be found there.
+
+
diff --git a/monEnvTP/lib/python3.8/site-packages/six-1.16.0.dist-info/RECORD b/monEnvTP/lib/python3.8/site-packages/six-1.16.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..4de46ba2c6eaa5927d4952f084354321c523fc7d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/six-1.16.0.dist-info/RECORD
@@ -0,0 +1,8 @@
+__pycache__/six.cpython-38.pyc,,
+six-1.16.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+six-1.16.0.dist-info/LICENSE,sha256=i7hQxWWqOJ_cFvOkaWWtI9gq3_YPI5P8J2K2MYXo5sk,1066
+six-1.16.0.dist-info/METADATA,sha256=VQcGIFCAEmfZcl77E5riPCN4v2TIsc_qtacnjxKHJoI,1795
+six-1.16.0.dist-info/RECORD,,
+six-1.16.0.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110
+six-1.16.0.dist-info/top_level.txt,sha256=_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais,4
+six.py,sha256=TOOfQi7nFGfMrIvtdr6wX4wyHH8M7aknmuLfo2cBBrM,34549
diff --git a/monEnvTP/lib/python3.8/site-packages/six-1.16.0.dist-info/WHEEL b/monEnvTP/lib/python3.8/site-packages/six-1.16.0.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..01b8fc7d4a10cb8b4f1d21f11d3398d07d6b3478
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/six-1.16.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.36.2)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/monEnvTP/lib/python3.8/site-packages/six-1.16.0.dist-info/top_level.txt b/monEnvTP/lib/python3.8/site-packages/six-1.16.0.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..ffe2fce498955b628014618b28c6bcf152466a4a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/six-1.16.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+six
diff --git a/monEnvTP/lib/python3.8/site-packages/six.py b/monEnvTP/lib/python3.8/site-packages/six.py
new file mode 100644
index 0000000000000000000000000000000000000000..4e15675d8b5caa33255fe37271700f587bd26671
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/six.py
@@ -0,0 +1,998 @@
+# Copyright (c) 2010-2020 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+from __future__ import absolute_import
+
+import functools
+import itertools
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson <benjamin@python.org>"
+__version__ = "1.16.0"
+
+
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+PY34 = sys.version_info[0:2] >= (3, 4)
+
+if PY3:
+ string_types = str,
+ integer_types = int,
+ class_types = type,
+ text_type = str
+ binary_type = bytes
+
+ MAXSIZE = sys.maxsize
+else:
+ string_types = basestring,
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+ if sys.platform.startswith("java"):
+ # Jython always uses 32 bits.
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+ class X(object):
+
+ def __len__(self):
+ return 1 << 31
+ try:
+ len(X())
+ except OverflowError:
+ # 32-bit
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # 64-bit
+ MAXSIZE = int((1 << 63) - 1)
+ del X
+
+if PY34:
+ from importlib.util import spec_from_loader
+else:
+ spec_from_loader = None
+
+
+def _add_doc(func, doc):
+ """Add documentation to a function."""
+ func.__doc__ = doc
+
+
+def _import_module(name):
+ """Import module, returning the module after the last dot."""
+ __import__(name)
+ return sys.modules[name]
+
+
+class _LazyDescr(object):
+
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, tp):
+ result = self._resolve()
+ setattr(obj, self.name, result) # Invokes __set__.
+ try:
+ # This is a bit ugly, but it avoids running this again by
+ # removing this descriptor.
+ delattr(obj.__class__, self.name)
+ except AttributeError:
+ pass
+ return result
+
+
+class MovedModule(_LazyDescr):
+
+ def __init__(self, name, old, new=None):
+ super(MovedModule, self).__init__(name)
+ if PY3:
+ if new is None:
+ new = name
+ self.mod = new
+ else:
+ self.mod = old
+
+ def _resolve(self):
+ return _import_module(self.mod)
+
+ def __getattr__(self, attr):
+ _module = self._resolve()
+ value = getattr(_module, attr)
+ setattr(self, attr, value)
+ return value
+
+
+class _LazyModule(types.ModuleType):
+
+ def __init__(self, name):
+ super(_LazyModule, self).__init__(name)
+ self.__doc__ = self.__class__.__doc__
+
+ def __dir__(self):
+ attrs = ["__doc__", "__name__"]
+ attrs += [attr.name for attr in self._moved_attributes]
+ return attrs
+
+ # Subclasses should override this
+ _moved_attributes = []
+
+
+class MovedAttribute(_LazyDescr):
+
+ def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+ super(MovedAttribute, self).__init__(name)
+ if PY3:
+ if new_mod is None:
+ new_mod = name
+ self.mod = new_mod
+ if new_attr is None:
+ if old_attr is None:
+ new_attr = name
+ else:
+ new_attr = old_attr
+ self.attr = new_attr
+ else:
+ self.mod = old_mod
+ if old_attr is None:
+ old_attr = name
+ self.attr = old_attr
+
+ def _resolve(self):
+ module = _import_module(self.mod)
+ return getattr(module, self.attr)
+
+
+class _SixMetaPathImporter(object):
+
+ """
+ A meta path importer to import six.moves and its submodules.
+
+ This class implements a PEP302 finder and loader. It should be compatible
+ with Python 2.5 and all existing versions of Python3
+ """
+
+ def __init__(self, six_module_name):
+ self.name = six_module_name
+ self.known_modules = {}
+
+ def _add_module(self, mod, *fullnames):
+ for fullname in fullnames:
+ self.known_modules[self.name + "." + fullname] = mod
+
+ def _get_module(self, fullname):
+ return self.known_modules[self.name + "." + fullname]
+
+ def find_module(self, fullname, path=None):
+ if fullname in self.known_modules:
+ return self
+ return None
+
+ def find_spec(self, fullname, path, target=None):
+ if fullname in self.known_modules:
+ return spec_from_loader(fullname, self)
+ return None
+
+ def __get_module(self, fullname):
+ try:
+ return self.known_modules[fullname]
+ except KeyError:
+ raise ImportError("This loader does not know module " + fullname)
+
+ def load_module(self, fullname):
+ try:
+ # in case of a reload
+ return sys.modules[fullname]
+ except KeyError:
+ pass
+ mod = self.__get_module(fullname)
+ if isinstance(mod, MovedModule):
+ mod = mod._resolve()
+ else:
+ mod.__loader__ = self
+ sys.modules[fullname] = mod
+ return mod
+
+ def is_package(self, fullname):
+ """
+ Return true, if the named module is a package.
+
+ We need this method to get correct spec objects with
+ Python 3.4 (see PEP451)
+ """
+ return hasattr(self.__get_module(fullname), "__path__")
+
+ def get_code(self, fullname):
+ """Return None
+
+ Required, if is_package is implemented"""
+ self.__get_module(fullname) # eventually raises ImportError
+ return None
+ get_source = get_code # same as get_code
+
+ def create_module(self, spec):
+ return self.load_module(spec.name)
+
+ def exec_module(self, module):
+ pass
+
+_importer = _SixMetaPathImporter(__name__)
+
+
+class _MovedItems(_LazyModule):
+
+ """Lazy loading of moved objects"""
+ __path__ = [] # mark as package
+
+
+_moved_attributes = [
+ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+ MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
+ MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+ MovedAttribute("intern", "__builtin__", "sys"),
+ MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
+ MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
+ MovedAttribute("getoutput", "commands", "subprocess"),
+ MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
+ MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
+ MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("UserDict", "UserDict", "collections"),
+ MovedAttribute("UserList", "UserList", "collections"),
+ MovedAttribute("UserString", "UserString", "collections"),
+ MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+ MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
+ MovedModule("builtins", "__builtin__"),
+ MovedModule("configparser", "ConfigParser"),
+ MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"),
+ MovedModule("copyreg", "copy_reg"),
+ MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+ MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"),
+ MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"),
+ MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+ MovedModule("http_cookies", "Cookie", "http.cookies"),
+ MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+ MovedModule("html_parser", "HTMLParser", "html.parser"),
+ MovedModule("http_client", "httplib", "http.client"),
+ MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+ MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
+ MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+ MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
+ MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
+ MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+ MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+ MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+ MovedModule("cPickle", "cPickle", "pickle"),
+ MovedModule("queue", "Queue"),
+ MovedModule("reprlib", "repr"),
+ MovedModule("socketserver", "SocketServer"),
+ MovedModule("_thread", "thread", "_thread"),
+ MovedModule("tkinter", "Tkinter"),
+ MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+ MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+ MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
+ MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+ MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+ MovedModule("tkinter_colorchooser", "tkColorChooser",
+ "tkinter.colorchooser"),
+ MovedModule("tkinter_commondialog", "tkCommonDialog",
+ "tkinter.commondialog"),
+ MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+ MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
+ "tkinter.simpledialog"),
+ MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+ MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+ MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
+ MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+ MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+ MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
+]
+# Add windows specific modules.
+if sys.platform == "win32":
+ _moved_attributes += [
+ MovedModule("winreg", "_winreg"),
+ ]
+
+for attr in _moved_attributes:
+ setattr(_MovedItems, attr.name, attr)
+ if isinstance(attr, MovedModule):
+ _importer._add_module(attr, "moves." + attr.name)
+del attr
+
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = _MovedItems(__name__ + ".moves")
+_importer._add_module(moves, "moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+ MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+ MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+ MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+ MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+ MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("quote", "urllib", "urllib.parse"),
+ MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
+ MovedAttribute("urlencode", "urllib", "urllib.parse"),
+ MovedAttribute("splitquery", "urllib", "urllib.parse"),
+ MovedAttribute("splittag", "urllib", "urllib.parse"),
+ MovedAttribute("splituser", "urllib", "urllib.parse"),
+ MovedAttribute("splitvalue", "urllib", "urllib.parse"),
+ MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_params", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_query", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+ setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
+ "moves.urllib_parse", "moves.urllib.parse")
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+ MovedAttribute("URLError", "urllib2", "urllib.error"),
+ MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+ MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+ setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
+ "moves.urllib_error", "moves.urllib.error")
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+ MovedAttribute("urlopen", "urllib2", "urllib.request"),
+ MovedAttribute("install_opener", "urllib2", "urllib.request"),
+ MovedAttribute("build_opener", "urllib2", "urllib.request"),
+ MovedAttribute("pathname2url", "urllib", "urllib.request"),
+ MovedAttribute("url2pathname", "urllib", "urllib.request"),
+ MovedAttribute("getproxies", "urllib", "urllib.request"),
+ MovedAttribute("Request", "urllib2", "urllib.request"),
+ MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+ MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+ MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+ MovedAttribute("URLopener", "urllib", "urllib.request"),
+ MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+ MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+ MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
+ MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+ setattr(Module_six_moves_urllib_request, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
+ "moves.urllib_request", "moves.urllib.request")
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+ MovedAttribute("addbase", "urllib", "urllib.response"),
+ MovedAttribute("addclosehook", "urllib", "urllib.response"),
+ MovedAttribute("addinfo", "urllib", "urllib.response"),
+ MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+ setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
+ "moves.urllib_response", "moves.urllib.response")
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+]
+for attr in _urllib_robotparser_moved_attributes:
+ setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
+ "moves.urllib_robotparser", "moves.urllib.robotparser")
+
+
+class Module_six_moves_urllib(types.ModuleType):
+
+ """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+ __path__ = [] # mark as package
+ parse = _importer._get_module("moves.urllib_parse")
+ error = _importer._get_module("moves.urllib_error")
+ request = _importer._get_module("moves.urllib_request")
+ response = _importer._get_module("moves.urllib_response")
+ robotparser = _importer._get_module("moves.urllib_robotparser")
+
+ def __dir__(self):
+ return ['parse', 'error', 'request', 'response', 'robotparser']
+
+_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
+ "moves.urllib")
+
+
+def add_move(move):
+ """Add an item to six.moves."""
+ setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+ """Remove item from six.moves."""
+ try:
+ delattr(_MovedItems, name)
+ except AttributeError:
+ try:
+ del moves.__dict__[name]
+ except KeyError:
+ raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+ _meth_func = "__func__"
+ _meth_self = "__self__"
+
+ _func_closure = "__closure__"
+ _func_code = "__code__"
+ _func_defaults = "__defaults__"
+ _func_globals = "__globals__"
+else:
+ _meth_func = "im_func"
+ _meth_self = "im_self"
+
+ _func_closure = "func_closure"
+ _func_code = "func_code"
+ _func_defaults = "func_defaults"
+ _func_globals = "func_globals"
+
+
+try:
+ advance_iterator = next
+except NameError:
+ def advance_iterator(it):
+ return it.next()
+next = advance_iterator
+
+
+try:
+ callable = callable
+except NameError:
+ def callable(obj):
+ return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+
+
+if PY3:
+ def get_unbound_function(unbound):
+ return unbound
+
+ create_bound_method = types.MethodType
+
+ def create_unbound_method(func, cls):
+ return func
+
+ Iterator = object
+else:
+ def get_unbound_function(unbound):
+ return unbound.im_func
+
+ def create_bound_method(func, obj):
+ return types.MethodType(func, obj, obj.__class__)
+
+ def create_unbound_method(func, cls):
+ return types.MethodType(func, None, cls)
+
+ class Iterator(object):
+
+ def next(self):
+ return type(self).__next__(self)
+
+ callable = callable
+_add_doc(get_unbound_function,
+ """Get the function out of a possibly unbound function""")
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_closure = operator.attrgetter(_func_closure)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+get_function_globals = operator.attrgetter(_func_globals)
+
+
+if PY3:
+ def iterkeys(d, **kw):
+ return iter(d.keys(**kw))
+
+ def itervalues(d, **kw):
+ return iter(d.values(**kw))
+
+ def iteritems(d, **kw):
+ return iter(d.items(**kw))
+
+ def iterlists(d, **kw):
+ return iter(d.lists(**kw))
+
+ viewkeys = operator.methodcaller("keys")
+
+ viewvalues = operator.methodcaller("values")
+
+ viewitems = operator.methodcaller("items")
+else:
+ def iterkeys(d, **kw):
+ return d.iterkeys(**kw)
+
+ def itervalues(d, **kw):
+ return d.itervalues(**kw)
+
+ def iteritems(d, **kw):
+ return d.iteritems(**kw)
+
+ def iterlists(d, **kw):
+ return d.iterlists(**kw)
+
+ viewkeys = operator.methodcaller("viewkeys")
+
+ viewvalues = operator.methodcaller("viewvalues")
+
+ viewitems = operator.methodcaller("viewitems")
+
+_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
+_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
+_add_doc(iteritems,
+ "Return an iterator over the (key, value) pairs of a dictionary.")
+_add_doc(iterlists,
+ "Return an iterator over the (key, [values]) pairs of a dictionary.")
+
+
+if PY3:
+ def b(s):
+ return s.encode("latin-1")
+
+ def u(s):
+ return s
+ unichr = chr
+ import struct
+ int2byte = struct.Struct(">B").pack
+ del struct
+ byte2int = operator.itemgetter(0)
+ indexbytes = operator.getitem
+ iterbytes = iter
+ import io
+ StringIO = io.StringIO
+ BytesIO = io.BytesIO
+ del io
+ _assertCountEqual = "assertCountEqual"
+ if sys.version_info[1] <= 1:
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ _assertNotRegex = "assertNotRegexpMatches"
+ else:
+ _assertRaisesRegex = "assertRaisesRegex"
+ _assertRegex = "assertRegex"
+ _assertNotRegex = "assertNotRegex"
+else:
+ def b(s):
+ return s
+ # Workaround for standalone backslash
+
+ def u(s):
+ return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
+ unichr = unichr
+ int2byte = chr
+
+ def byte2int(bs):
+ return ord(bs[0])
+
+ def indexbytes(buf, i):
+ return ord(buf[i])
+ iterbytes = functools.partial(itertools.imap, ord)
+ import StringIO
+ StringIO = BytesIO = StringIO.StringIO
+ _assertCountEqual = "assertItemsEqual"
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ _assertNotRegex = "assertNotRegexpMatches"
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+def assertCountEqual(self, *args, **kwargs):
+ return getattr(self, _assertCountEqual)(*args, **kwargs)
+
+
+def assertRaisesRegex(self, *args, **kwargs):
+ return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+ return getattr(self, _assertRegex)(*args, **kwargs)
+
+
+def assertNotRegex(self, *args, **kwargs):
+ return getattr(self, _assertNotRegex)(*args, **kwargs)
+
+
+if PY3:
+ exec_ = getattr(moves.builtins, "exec")
+
+ def reraise(tp, value, tb=None):
+ try:
+ if value is None:
+ value = tp()
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+ finally:
+ value = None
+ tb = None
+
+else:
+ def exec_(_code_, _globs_=None, _locs_=None):
+ """Execute code in a namespace."""
+ if _globs_ is None:
+ frame = sys._getframe(1)
+ _globs_ = frame.f_globals
+ if _locs_ is None:
+ _locs_ = frame.f_locals
+ del frame
+ elif _locs_ is None:
+ _locs_ = _globs_
+ exec("""exec _code_ in _globs_, _locs_""")
+
+ exec_("""def reraise(tp, value, tb=None):
+ try:
+ raise tp, value, tb
+ finally:
+ tb = None
+""")
+
+
+if sys.version_info[:2] > (3,):
+ exec_("""def raise_from(value, from_value):
+ try:
+ raise value from from_value
+ finally:
+ value = None
+""")
+else:
+ def raise_from(value, from_value):
+ raise value
+
+
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
+ def print_(*args, **kwargs):
+ """The new-style print function for Python 2.4 and 2.5."""
+ fp = kwargs.pop("file", sys.stdout)
+ if fp is None:
+ return
+
+ def write(data):
+ if not isinstance(data, basestring):
+ data = str(data)
+ # If the file has an encoding, encode unicode with it.
+ if (isinstance(fp, file) and
+ isinstance(data, unicode) and
+ fp.encoding is not None):
+ errors = getattr(fp, "errors", None)
+ if errors is None:
+ errors = "strict"
+ data = data.encode(fp.encoding, errors)
+ fp.write(data)
+ want_unicode = False
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if isinstance(sep, unicode):
+ want_unicode = True
+ elif not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if isinstance(end, unicode):
+ want_unicode = True
+ elif not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+ if kwargs:
+ raise TypeError("invalid keyword arguments to print()")
+ if not want_unicode:
+ for arg in args:
+ if isinstance(arg, unicode):
+ want_unicode = True
+ break
+ if want_unicode:
+ newline = unicode("\n")
+ space = unicode(" ")
+ else:
+ newline = "\n"
+ space = " "
+ if sep is None:
+ sep = space
+ if end is None:
+ end = newline
+ for i, arg in enumerate(args):
+ if i:
+ write(sep)
+ write(arg)
+ write(end)
+if sys.version_info[:2] < (3, 3):
+ _print = print_
+
+ def print_(*args, **kwargs):
+ fp = kwargs.get("file", sys.stdout)
+ flush = kwargs.pop("flush", False)
+ _print(*args, **kwargs)
+ if flush and fp is not None:
+ fp.flush()
+
+_add_doc(reraise, """Reraise an exception.""")
+
+if sys.version_info[0:2] < (3, 4):
+ # This does exactly the same what the :func:`py3:functools.update_wrapper`
+ # function does on Python versions after 3.2. It sets the ``__wrapped__``
+ # attribute on ``wrapper`` object and it doesn't raise an error if any of
+ # the attributes mentioned in ``assigned`` and ``updated`` are missing on
+ # ``wrapped`` object.
+ def _update_wrapper(wrapper, wrapped,
+ assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES):
+ for attr in assigned:
+ try:
+ value = getattr(wrapped, attr)
+ except AttributeError:
+ continue
+ else:
+ setattr(wrapper, attr, value)
+ for attr in updated:
+ getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
+ wrapper.__wrapped__ = wrapped
+ return wrapper
+ _update_wrapper.__doc__ = functools.update_wrapper.__doc__
+
+ def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES):
+ return functools.partial(_update_wrapper, wrapped=wrapped,
+ assigned=assigned, updated=updated)
+ wraps.__doc__ = functools.wraps.__doc__
+
+else:
+ wraps = functools.wraps
+
+
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass."""
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
+ class metaclass(type):
+
+ def __new__(cls, name, this_bases, d):
+ if sys.version_info[:2] >= (3, 7):
+ # This version introduced PEP 560 that requires a bit
+ # of extra care (we mimic what is done by __build_class__).
+ resolved_bases = types.resolve_bases(bases)
+ if resolved_bases is not bases:
+ d['__orig_bases__'] = bases
+ else:
+ resolved_bases = bases
+ return meta(name, resolved_bases, d)
+
+ @classmethod
+ def __prepare__(cls, name, this_bases):
+ return meta.__prepare__(name, bases)
+ return type.__new__(metaclass, 'temporary_class', (), {})
+
+
+def add_metaclass(metaclass):
+ """Class decorator for creating a class with a metaclass."""
+ def wrapper(cls):
+ orig_vars = cls.__dict__.copy()
+ slots = orig_vars.get('__slots__')
+ if slots is not None:
+ if isinstance(slots, str):
+ slots = [slots]
+ for slots_var in slots:
+ orig_vars.pop(slots_var)
+ orig_vars.pop('__dict__', None)
+ orig_vars.pop('__weakref__', None)
+ if hasattr(cls, '__qualname__'):
+ orig_vars['__qualname__'] = cls.__qualname__
+ return metaclass(cls.__name__, cls.__bases__, orig_vars)
+ return wrapper
+
+
+def ensure_binary(s, encoding='utf-8', errors='strict'):
+ """Coerce **s** to six.binary_type.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> encoded to `bytes`
+ - `bytes` -> `bytes`
+ """
+ if isinstance(s, binary_type):
+ return s
+ if isinstance(s, text_type):
+ return s.encode(encoding, errors)
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+def ensure_str(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to `str`.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ # Optimization: Fast return for the common case.
+ if type(s) is str:
+ return s
+ if PY2 and isinstance(s, text_type):
+ return s.encode(encoding, errors)
+ elif PY3 and isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif not isinstance(s, (text_type, binary_type)):
+ raise TypeError("not expecting type '%s'" % type(s))
+ return s
+
+
+def ensure_text(s, encoding='utf-8', errors='strict'):
+ """Coerce *s* to six.text_type.
+
+ For Python 2:
+ - `unicode` -> `unicode`
+ - `str` -> `unicode`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif isinstance(s, text_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+def python_2_unicode_compatible(klass):
+ """
+ A class decorator that defines __unicode__ and __str__ methods under Python 2.
+ Under Python 3 it does nothing.
+
+ To support Python 2 and 3 with a single code base, define a __str__ method
+ returning text and apply this decorator to the class.
+ """
+ if PY2:
+ if '__str__' not in klass.__dict__:
+ raise ValueError("@python_2_unicode_compatible cannot be applied "
+ "to %s because it doesn't define __str__()." %
+ klass.__name__)
+ klass.__unicode__ = klass.__str__
+ klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
+ return klass
+
+
+# Complete the moves implementation.
+# This code is at the end of this module to speed up module loading.
+# Turn this module into a package.
+__path__ = [] # required for PEP 302 and PEP 451
+__package__ = __name__ # see PEP 366 @ReservedAssignment
+if globals().get("__spec__") is not None:
+ __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
+# Remove other six meta path importers, since they cause problems. This can
+# happen if six is removed from sys.modules and then reloaded. (Setuptools does
+# this for some reason.)
+if sys.meta_path:
+ for i, importer in enumerate(sys.meta_path):
+ # Here's some real nastiness: Another "instance" of the six module might
+ # be floating around. Therefore, we can't use isinstance() to check for
+ # the six meta path importer, since the other six instance will have
+ # inserted an importer with different class.
+ if (type(importer).__name__ == "_SixMetaPathImporter" and
+ importer.name == __name__):
+ del sys.meta_path[i]
+ break
+ del i, importer
+# Finally, add the importer to the meta path import hook.
+sys.meta_path.append(_importer)
diff --git a/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/INSTALLER b/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/LICENSE b/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..353924be0e59b9ad7e6c22848c2189398481821d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/LICENSE
@@ -0,0 +1,19 @@
+Copyright Jason R. Coombs
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/METADATA b/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..e7c54181bef703bd745d2fc843e6ab5aab39868f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/METADATA
@@ -0,0 +1,70 @@
+Metadata-Version: 2.1
+Name: tempora
+Version: 5.0.0
+Summary: Objects and routines pertaining to date and time (tempora)
+Home-page: https://github.com/jaraco/tempora
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+License: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.7
+License-File: LICENSE
+Requires-Dist: pytz
+Requires-Dist: jaraco.functools (>=1.20)
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs'
+Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest (>=6) ; extra == 'testing'
+Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
+Requires-Dist: pytest-flake8 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing'
+Requires-Dist: backports.unittest-mock ; extra == 'testing'
+Requires-Dist: freezegun ; extra == 'testing'
+Requires-Dist: pytest-freezegun ; extra == 'testing'
+Requires-Dist: types-freezegun ; extra == 'testing'
+Requires-Dist: types-pytz ; extra == 'testing'
+Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/tempora.svg
+ :target: `PyPI link`_
+
+.. image:: https://img.shields.io/pypi/pyversions/tempora.svg
+ :target: `PyPI link`_
+
+.. _PyPI link: https://pypi.org/project/tempora
+
+.. image:: https://github.com/jaraco/tempora/workflows/tests/badge.svg
+ :target: https://github.com/jaraco/tempora/actions?query=workflow%3A%22tests%22
+ :alt: tests
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: Black
+
+.. image:: https://readthedocs.org/projects/tempora/badge/?version=latest
+ :target: https://tempora.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2021-informational
+ :target: https://blog.jaraco.com/skeleton
+
+
+Objects and routines pertaining to date and time (tempora).
+
+Modules include:
+
+- tempora (top level package module) contains miscellaneous
+ utilities and constants.
+- timing contains routines for measuring and profiling.
+- schedule contains an event scheduler.
+- utc contains routines for getting datetime-aware UTC values.
+
+
diff --git a/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/RECORD b/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..72e327b1c865a9f758518db7e2f4660470f89849
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/RECORD
@@ -0,0 +1,20 @@
+../../../bin/calc-prorate,sha256=LuasUlWWzz25BI-ni0o6ijGnY6jva3WmFn6kBJz-zbQ,286
+tempora-5.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+tempora-5.0.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
+tempora-5.0.0.dist-info/METADATA,sha256=NpqUR26J9pBGgjhOzxa7ykjJwrk2_bHeg5iJLe3UIHI,2649
+tempora-5.0.0.dist-info/RECORD,,
+tempora-5.0.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
+tempora-5.0.0.dist-info/entry_points.txt,sha256=GQDucOVurn8ggg_pvUrIdneKtQqSpGnXtGArBdgZByA,68
+tempora-5.0.0.dist-info/top_level.txt,sha256=s9TJ4uutFRCBEzoCykLLwjIjAPBtiF7ny_ut4MZvC5Q,8
+tempora/__init__.py,sha256=HUHR60byTspxqQcJyfXG1mkIdzvi-csu9KLEuY-wX1U,17684
+tempora/__pycache__/__init__.cpython-38.pyc,,
+tempora/__pycache__/schedule.cpython-38.pyc,,
+tempora/__pycache__/timing.cpython-38.pyc,,
+tempora/__pycache__/utc.cpython-38.pyc,,
+tempora/schedule.py,sha256=d8tmUktTCwMN1AEL4zaV0cqzyisMuoWGZwPI05oU6Vc,6065
+tempora/tests/__pycache__/test_schedule.cpython-38.pyc,,
+tempora/tests/__pycache__/test_timing.cpython-38.pyc,,
+tempora/tests/test_schedule.py,sha256=aL5OuRJHAlNTE2LC6Pdl5HV3O5E0DzaRFzJIgwMFiSA,4790
+tempora/tests/test_timing.py,sha256=WCmNwC5lT61mIS5gPadOPR__25V9Jv_oupPFAXfMpY0,1271
+tempora/timing.py,sha256=jlpRQl7c4yl6qjb6oRRpozyWDDzBMYC0KF2ZmwoI9mk,6197
+tempora/utc.py,sha256=WSIWCZghadB7D8y84JYVAJ8QbLy5AzooDi5X4kiy-24,846
diff --git a/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/WHEEL b/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..becc9a66ea739ba941d48a749e248761cc6e658a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.1)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/entry_points.txt b/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/entry_points.txt
new file mode 100644
index 0000000000000000000000000000000000000000..1b96f5333a6fb5d26ceee43b88119294ec43e103
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/entry_points.txt
@@ -0,0 +1,3 @@
+[console_scripts]
+calc-prorate = tempora:calculate_prorated_values
+
diff --git a/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/top_level.txt b/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..ea5686868cfcd9240b23191e243249ac2218301a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/tempora-5.0.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+tempora
diff --git a/monEnvTP/lib/python3.8/site-packages/tempora/__init__.py b/monEnvTP/lib/python3.8/site-packages/tempora/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..6652b5aefbe168d60202a65ba06fb8278e5663cc
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/tempora/__init__.py
@@ -0,0 +1,604 @@
+"Objects and routines pertaining to date and time (tempora)"
+
+import datetime
+import time
+import re
+import numbers
+import functools
+import contextlib
+
+from jaraco.functools import once
+
+
+# some useful constants
+osc_per_year = 290_091_329_207_984_000
+"""
+mean vernal equinox year expressed in oscillations of atomic cesium at the
+year 2000 (see http://webexhibits.org/calendars/timeline.html for more info).
+"""
+osc_per_second = 9_192_631_770
+seconds_per_second = 1
+seconds_per_year = 31_556_940
+seconds_per_minute = 60
+minutes_per_hour = 60
+hours_per_day = 24
+seconds_per_hour = seconds_per_minute * minutes_per_hour
+seconds_per_day = seconds_per_hour * hours_per_day
+days_per_year = seconds_per_year / seconds_per_day
+thirty_days = datetime.timedelta(days=30)
+# these values provide useful averages
+six_months = datetime.timedelta(days=days_per_year / 2)
+seconds_per_month = seconds_per_year / 12
+hours_per_month = hours_per_day * days_per_year / 12
+
+
+@once
+def _needs_year_help():
+ """
+ Some versions of Python render %Y with only three characters :(
+ https://bugs.python.org/issue39103
+ """
+ return len(datetime.date(900, 1, 1).strftime('%Y')) != 4
+
+
+def ensure_datetime(ob):
+ """
+ Given a datetime or date or time object from the ``datetime``
+ module, always return a datetime using default values.
+ """
+ if isinstance(ob, datetime.datetime):
+ return ob
+ date = time = ob
+ if isinstance(ob, datetime.date):
+ time = datetime.time()
+ if isinstance(ob, datetime.time):
+ date = datetime.date(1900, 1, 1)
+ return datetime.datetime.combine(date, time)
+
+
+def strftime(fmt, t):
+ """
+ Portable strftime.
+
+ In the stdlib, strftime has `known portability problems
+ <https://bugs.python.org/issue13305>`_. This function
+ aims to smooth over those issues and provide a
+ consistent experience across the major platforms.
+
+ >>> strftime('%Y', datetime.datetime(1890, 1, 1))
+ '1890'
+ >>> strftime('%Y', datetime.datetime(900, 1, 1))
+ '0900'
+
+ Supports time.struct_time, tuples, and datetime.datetime objects.
+
+ >>> strftime('%Y-%m-%d', (1976, 5, 7))
+ '1976-05-07'
+
+ Also supports date objects
+
+ >>> strftime('%Y', datetime.date(1976, 5, 7))
+ '1976'
+
+ Also supports milliseconds using %s.
+
+ >>> strftime('%s', datetime.time(microsecond=20000))
+ '020'
+
+ Also supports microseconds (3 digits) using %µ
+
+ >>> strftime('%µ', datetime.time(microsecond=123456))
+ '456'
+
+ Historically, %u was used for microseconds, but now
+ it honors the value rendered by stdlib.
+
+ >>> strftime('%u', datetime.date(1976, 5, 7))
+ '5'
+
+ Also supports microseconds (6 digits) using %f
+
+ >>> strftime('%f', datetime.time(microsecond=23456))
+ '023456'
+
+ Even supports time values on date objects (discouraged):
+
+ >>> strftime('%f', datetime.date(1976, 1, 1))
+ '000000'
+ >>> strftime('%µ', datetime.date(1976, 1, 1))
+ '000'
+ >>> strftime('%s', datetime.date(1976, 1, 1))
+ '000'
+
+ And vice-versa:
+
+ >>> strftime('%Y', datetime.time())
+ '1900'
+ """
+ if isinstance(t, (time.struct_time, tuple)):
+ t = datetime.datetime(*t[:6])
+ t = ensure_datetime(t)
+ subs = (
+ ('%s', '%03d' % (t.microsecond // 1000)),
+ ('%µ', '%03d' % (t.microsecond % 1000)),
+ )
+ if _needs_year_help(): # pragma: nocover
+ subs += (('%Y', '%04d' % t.year),)
+
+ def doSub(s, sub):
+ return s.replace(*sub)
+
+ def doSubs(s):
+ return functools.reduce(doSub, subs, s)
+
+ fmt = '%%'.join(map(doSubs, fmt.split('%%')))
+ return t.strftime(fmt)
+
+
+def datetime_mod(dt, period, start=None):
+ """
+ Find the time which is the specified date/time truncated to the time delta
+ relative to the start date/time.
+ By default, the start time is midnight of the same day as the specified
+ date/time.
+
+ >>> datetime_mod(datetime.datetime(2004, 1, 2, 3),
+ ... datetime.timedelta(days = 1.5),
+ ... start = datetime.datetime(2004, 1, 1))
+ datetime.datetime(2004, 1, 1, 0, 0)
+ >>> datetime_mod(datetime.datetime(2004, 1, 2, 13),
+ ... datetime.timedelta(days = 1.5),
+ ... start = datetime.datetime(2004, 1, 1))
+ datetime.datetime(2004, 1, 2, 12, 0)
+ >>> datetime_mod(datetime.datetime(2004, 1, 2, 13),
+ ... datetime.timedelta(days = 7),
+ ... start = datetime.datetime(2004, 1, 1))
+ datetime.datetime(2004, 1, 1, 0, 0)
+ >>> datetime_mod(datetime.datetime(2004, 1, 10, 13),
+ ... datetime.timedelta(days = 7),
+ ... start = datetime.datetime(2004, 1, 1))
+ datetime.datetime(2004, 1, 8, 0, 0)
+ """
+ if start is None:
+ # use midnight of the same day
+ start = datetime.datetime.combine(dt.date(), datetime.time())
+ # calculate the difference between the specified time and the start date.
+ delta = dt - start
+
+ # now aggregate the delta and the period into microseconds
+ # Use microseconds because that's the highest precision of these time
+ # pieces. Also, using microseconds ensures perfect precision (no floating
+ # point errors).
+ def get_time_delta_microseconds(td):
+ return (td.days * seconds_per_day + td.seconds) * 1000000 + td.microseconds
+
+ delta, period = map(get_time_delta_microseconds, (delta, period))
+ offset = datetime.timedelta(microseconds=delta % period)
+ # the result is the original specified time minus the offset
+ result = dt - offset
+ return result
+
+
+def datetime_round(dt, period, start=None):
+ """
+ Find the nearest even period for the specified date/time.
+
+ >>> datetime_round(datetime.datetime(2004, 11, 13, 8, 11, 13),
+ ... datetime.timedelta(hours = 1))
+ datetime.datetime(2004, 11, 13, 8, 0)
+ >>> datetime_round(datetime.datetime(2004, 11, 13, 8, 31, 13),
+ ... datetime.timedelta(hours = 1))
+ datetime.datetime(2004, 11, 13, 9, 0)
+ >>> datetime_round(datetime.datetime(2004, 11, 13, 8, 30),
+ ... datetime.timedelta(hours = 1))
+ datetime.datetime(2004, 11, 13, 9, 0)
+ """
+ result = datetime_mod(dt, period, start)
+ if abs(dt - result) >= period // 2:
+ result += period
+ return result
+
+
+def get_nearest_year_for_day(day):
+ """
+ Returns the nearest year to now inferred from a Julian date.
+
+ >>> freezer = getfixture('freezer')
+ >>> freezer.move_to('2019-05-20')
+ >>> get_nearest_year_for_day(20)
+ 2019
+ >>> get_nearest_year_for_day(340)
+ 2018
+ >>> freezer.move_to('2019-12-15')
+ >>> get_nearest_year_for_day(20)
+ 2020
+ """
+ now = time.gmtime()
+ result = now.tm_year
+ # if the day is far greater than today, it must be from last year
+ if day - now.tm_yday > 365 // 2:
+ result -= 1
+ # if the day is far less than today, it must be for next year.
+ if now.tm_yday - day > 365 // 2:
+ result += 1
+ return result
+
+
+def gregorian_date(year, julian_day):
+ """
+ Gregorian Date is defined as a year and a julian day (1-based
+ index into the days of the year).
+
+ >>> gregorian_date(2007, 15)
+ datetime.date(2007, 1, 15)
+ """
+ result = datetime.date(year, 1, 1)
+ result += datetime.timedelta(days=julian_day - 1)
+ return result
+
+
+def get_period_seconds(period):
+ """
+ return the number of seconds in the specified period
+
+ >>> get_period_seconds('day')
+ 86400
+ >>> get_period_seconds(86400)
+ 86400
+ >>> get_period_seconds(datetime.timedelta(hours=24))
+ 86400
+ >>> get_period_seconds('day + os.system("rm -Rf *")')
+ Traceback (most recent call last):
+ ...
+ ValueError: period not in (second, minute, hour, day, month, year)
+ """
+ if isinstance(period, str):
+ try:
+ name = 'seconds_per_' + period.lower()
+ result = globals()[name]
+ except KeyError:
+ msg = "period not in (second, minute, hour, day, month, year)"
+ raise ValueError(msg)
+ elif isinstance(period, numbers.Number):
+ result = period
+ elif isinstance(period, datetime.timedelta):
+ result = period.days * get_period_seconds('day') + period.seconds
+ else:
+ raise TypeError('period must be a string or integer')
+ return result
+
+
+def get_date_format_string(period):
+ """
+ For a given period (e.g. 'month', 'day', or some numeric interval
+ such as 3600 (in secs)), return the format string that can be
+ used with strftime to format that time to specify the times
+ across that interval, but no more detailed.
+ For example,
+
+ >>> get_date_format_string('month')
+ '%Y-%m'
+ >>> get_date_format_string(3600)
+ '%Y-%m-%d %H'
+ >>> get_date_format_string('hour')
+ '%Y-%m-%d %H'
+ >>> get_date_format_string(None)
+ Traceback (most recent call last):
+ ...
+ TypeError: period must be a string or integer
+ >>> get_date_format_string('garbage')
+ Traceback (most recent call last):
+ ...
+ ValueError: period not in (second, minute, hour, day, month, year)
+ """
+ # handle the special case of 'month' which doesn't have
+ # a static interval in seconds
+ if isinstance(period, str) and period.lower() == 'month':
+ return '%Y-%m'
+ file_period_secs = get_period_seconds(period)
+ format_pieces = ('%Y', '-%m-%d', ' %H', '-%M', '-%S')
+ seconds_per_second = 1
+ intervals = (
+ seconds_per_year,
+ seconds_per_day,
+ seconds_per_hour,
+ seconds_per_minute,
+ seconds_per_second,
+ )
+ mods = list(map(lambda interval: file_period_secs % interval, intervals))
+ format_pieces = format_pieces[: mods.index(0) + 1]
+ return ''.join(format_pieces)
+
+
+def calculate_prorated_values():
+ """
+ >>> monkeypatch = getfixture('monkeypatch')
+ >>> import builtins
+ >>> monkeypatch.setattr(builtins, 'input', lambda prompt: '3/hour')
+ >>> calculate_prorated_values()
+ per minute: 0.05
+ per hour: 3.0
+ per day: 72.0
+ per month: 2191.454166666667
+ per year: 26297.45
+ """
+ rate = input("Enter the rate (3/hour, 50/month)> ")
+ for period, value in _prorated_values(rate):
+ print("per {period}: {value}".format(**locals()))
+
+
+def _prorated_values(rate):
+ """
+ Given a rate (a string in units per unit time), and return that same
+ rate for various time periods.
+
+ >>> for period, value in _prorated_values('20/hour'):
+ ... print('{period}: {value:0.3f}'.format(**locals()))
+ minute: 0.333
+ hour: 20.000
+ day: 480.000
+ month: 14609.694
+ year: 175316.333
+
+ """
+ res = re.match(r'(?P<value>[\d.]+)/(?P<period>\w+)$', rate).groupdict()
+ value = float(res['value'])
+ value_per_second = value / get_period_seconds(res['period'])
+ for period in ('minute', 'hour', 'day', 'month', 'year'):
+ period_value = value_per_second * get_period_seconds(period)
+ yield period, period_value
+
+
+def parse_timedelta(str):
+ """
+ Take a string representing a span of time and parse it to a time delta.
+ Accepts any string of comma-separated numbers each with a unit indicator.
+
+ >>> parse_timedelta('1 day')
+ datetime.timedelta(days=1)
+
+ >>> parse_timedelta('1 day, 30 seconds')
+ datetime.timedelta(days=1, seconds=30)
+
+ >>> parse_timedelta('47.32 days, 20 minutes, 15.4 milliseconds')
+ datetime.timedelta(days=47, seconds=28848, microseconds=15400)
+
+ Supports weeks, months, years
+
+ >>> parse_timedelta('1 week')
+ datetime.timedelta(days=7)
+
+ >>> parse_timedelta('1 year, 1 month')
+ datetime.timedelta(days=395, seconds=58685)
+
+ Note that months and years strict intervals, not aligned
+ to a calendar:
+
+ >>> now = datetime.datetime.now()
+ >>> later = now + parse_timedelta('1 year')
+ >>> diff = later.replace(year=now.year) - now
+ >>> diff.seconds
+ 20940
+
+ >>> parse_timedelta('foo')
+ Traceback (most recent call last):
+ ...
+ ValueError: Unexpected 'foo'
+
+ >>> parse_timedelta('14 seconds foo')
+ Traceback (most recent call last):
+ ...
+ ValueError: Unexpected 'foo'
+
+ Supports abbreviations:
+
+ >>> parse_timedelta('1s')
+ datetime.timedelta(seconds=1)
+
+ >>> parse_timedelta('1sec')
+ datetime.timedelta(seconds=1)
+
+ >>> parse_timedelta('5min1sec')
+ datetime.timedelta(seconds=301)
+
+ >>> parse_timedelta('1 ms')
+ datetime.timedelta(microseconds=1000)
+
+ >>> parse_timedelta('1 µs')
+ datetime.timedelta(microseconds=1)
+
+ >>> parse_timedelta('1 us')
+ datetime.timedelta(microseconds=1)
+
+ And supports the common colon-separated duration:
+
+ >>> parse_timedelta('14:00:35.362')
+ datetime.timedelta(seconds=50435, microseconds=362000)
+
+ TODO: Should this be 14 hours or 14 minutes?
+
+ >>> parse_timedelta('14:00')
+ datetime.timedelta(seconds=50400)
+
+ >>> parse_timedelta('14:00 minutes')
+ Traceback (most recent call last):
+ ...
+ ValueError: Cannot specify units with composite delta
+
+ Nanoseconds get rounded to the nearest microsecond:
+
+ >>> parse_timedelta('600 ns')
+ datetime.timedelta(microseconds=1)
+
+ >>> parse_timedelta('.002 µs, 499 ns')
+ datetime.timedelta(microseconds=1)
+
+ Expect ValueError for other invalid inputs.
+
+ >>> parse_timedelta('13 feet')
+ Traceback (most recent call last):
+ ...
+ ValueError: Invalid unit feets
+ """
+ return _parse_timedelta_nanos(str).resolve()
+
+
+def _parse_timedelta_nanos(str):
+ parts = re.finditer(r'(?P<value>[\d.:]+)\s?(?P<unit>[^\W\d_]+)?', str)
+ chk_parts = _check_unmatched(parts, str)
+ deltas = map(_parse_timedelta_part, chk_parts)
+ return sum(deltas, _Saved_NS())
+
+
+def _check_unmatched(matches, text):
+ """
+ Ensure no words appear in unmatched text.
+ """
+
+ def check_unmatched(unmatched):
+ found = re.search(r'\w+', unmatched)
+ if found:
+ raise ValueError(f"Unexpected {found.group(0)!r}")
+
+ pos = 0
+ for match in matches:
+ check_unmatched(text[pos : match.start()])
+ yield match
+ pos = match.end()
+ check_unmatched(text[pos:])
+
+
+_unit_lookup = {
+ 'µs': 'microsecond',
+ 'µsec': 'microsecond',
+ 'us': 'microsecond',
+ 'usec': 'microsecond',
+ 'micros': 'microsecond',
+ 'ms': 'millisecond',
+ 'msec': 'millisecond',
+ 'millis': 'millisecond',
+ 's': 'second',
+ 'sec': 'second',
+ 'h': 'hour',
+ 'hr': 'hour',
+ 'm': 'minute',
+ 'min': 'minute',
+ 'w': 'week',
+ 'wk': 'week',
+ 'd': 'day',
+ 'ns': 'nanosecond',
+ 'nsec': 'nanosecond',
+ 'nanos': 'nanosecond',
+}
+
+
+def _resolve_unit(raw_match):
+ if raw_match is None:
+ return 'second'
+ text = raw_match.lower()
+ return _unit_lookup.get(text, text)
+
+
+def _parse_timedelta_composite(raw_value, unit):
+ if unit != 'seconds':
+ raise ValueError("Cannot specify units with composite delta")
+ values = raw_value.split(':')
+ units = 'hours', 'minutes', 'seconds'
+ composed = ' '.join(f'{value} {unit}' for value, unit in zip(values, units))
+ return _parse_timedelta_nanos(composed)
+
+
+def _parse_timedelta_part(match):
+ unit = _resolve_unit(match.group('unit'))
+ if not unit.endswith('s'):
+ unit += 's'
+ raw_value = match.group('value')
+ if ':' in raw_value:
+ return _parse_timedelta_composite(raw_value, unit)
+ value = float(raw_value)
+ if unit == 'months':
+ unit = 'years'
+ value = value / 12
+ if unit == 'years':
+ unit = 'days'
+ value = value * days_per_year
+ return _Saved_NS.derive(unit, value)
+
+
+class _Saved_NS:
+ """
+ Bundle a timedelta with nanoseconds.
+
+ >>> _Saved_NS.derive('microseconds', .001)
+ _Saved_NS(td=datetime.timedelta(0), nanoseconds=1)
+ """
+
+ td = datetime.timedelta()
+ nanoseconds = 0
+ multiplier = dict(
+ seconds=1000000000,
+ milliseconds=1000000,
+ microseconds=1000,
+ )
+
+ def __init__(self, **kwargs):
+ vars(self).update(kwargs)
+
+ @classmethod
+ def derive(cls, unit, value):
+ if unit == 'nanoseconds':
+ return _Saved_NS(nanoseconds=value)
+
+ try:
+ raw_td = datetime.timedelta(**{unit: value})
+ except TypeError:
+ raise ValueError(f"Invalid unit {unit}")
+ res = _Saved_NS(td=raw_td)
+ with contextlib.suppress(KeyError):
+ res.nanoseconds = int(value * cls.multiplier[unit]) % 1000
+ return res
+
+ def __add__(self, other):
+ return _Saved_NS(
+ td=self.td + other.td, nanoseconds=self.nanoseconds + other.nanoseconds
+ )
+
+ def resolve(self):
+ """
+ Resolve any nanoseconds into the microseconds field,
+ discarding any nanosecond resolution (but honoring partial
+ microseconds).
+ """
+ addl_micros = round(self.nanoseconds / 1000)
+ return self.td + datetime.timedelta(microseconds=addl_micros)
+
+ def __repr__(self):
+ return f'_Saved_NS(td={self.td!r}, nanoseconds={self.nanoseconds!r})'
+
+
+def date_range(start=None, stop=None, step=None):
+ """
+ Much like the built-in function range, but works with dates
+
+ >>> range_items = date_range(
+ ... datetime.datetime(2005,12,21),
+ ... datetime.datetime(2005,12,25),
+ ... )
+ >>> my_range = tuple(range_items)
+ >>> datetime.datetime(2005,12,21) in my_range
+ True
+ >>> datetime.datetime(2005,12,22) in my_range
+ True
+ >>> datetime.datetime(2005,12,25) in my_range
+ False
+ >>> from_now = date_range(stop=datetime.datetime(2099, 12, 31))
+ >>> next(from_now)
+ datetime.datetime(...)
+ """
+ if step is None:
+ step = datetime.timedelta(days=1)
+ if start is None:
+ start = datetime.datetime.now()
+ while start < stop:
+ yield start
+ start += step
diff --git a/monEnvTP/lib/python3.8/site-packages/tempora/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/tempora/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..87e8ac8a39a376b4af8e82c45030eea3cda6827c
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/tempora/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/tempora/__pycache__/schedule.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/tempora/__pycache__/schedule.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2e58717cddd6c50a043dd1a051162c7cb465369d
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/tempora/__pycache__/schedule.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/tempora/__pycache__/timing.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/tempora/__pycache__/timing.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f6fe7a3a2a413febf78dc2381280db54a02184e1
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/tempora/__pycache__/timing.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/tempora/__pycache__/utc.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/tempora/__pycache__/utc.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..17c2a01937bca562edf00aa72984c9a9c15f77f0
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/tempora/__pycache__/utc.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/tempora/schedule.py b/monEnvTP/lib/python3.8/site-packages/tempora/schedule.py
new file mode 100644
index 0000000000000000000000000000000000000000..a94c981935fe7cea32b5ad78d664909ae913f933
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/tempora/schedule.py
@@ -0,0 +1,227 @@
+"""
+Classes for calling functions a schedule. Has time zone support.
+
+For example, to run a job at 08:00 every morning in 'Asia/Calcutta':
+
+>>> job = lambda: print("time is now", datetime.datetime())
+>>> time = datetime.time(8, tzinfo=pytz.timezone('Asia/Calcutta'))
+>>> cmd = PeriodicCommandFixedDelay.daily_at(time, job)
+>>> sched = InvokeScheduler()
+>>> sched.add(cmd)
+>>> while True: # doctest: +SKIP
+... sched.run_pending()
+... time.sleep(.1)
+"""
+
+import datetime
+import numbers
+import abc
+import bisect
+
+import pytz
+
+
+def now():
+ """
+ Provide the current timezone-aware datetime.
+
+ A client may override this function to change the default behavior,
+ such as to use local time or timezone-naïve times.
+ """
+ return datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
+
+
+def from_timestamp(ts):
+ """
+ Convert a numeric timestamp to a timezone-aware datetime.
+
+ A client may override this function to change the default behavior,
+ such as to use local time or timezone-naïve times.
+ """
+ return datetime.datetime.utcfromtimestamp(ts).replace(tzinfo=pytz.utc)
+
+
+class DelayedCommand(datetime.datetime):
+ """
+ A command to be executed after some delay (seconds or timedelta).
+ """
+
+ @classmethod
+ def from_datetime(cls, other):
+ return cls(
+ other.year,
+ other.month,
+ other.day,
+ other.hour,
+ other.minute,
+ other.second,
+ other.microsecond,
+ other.tzinfo,
+ )
+
+ @classmethod
+ def after(cls, delay, target):
+ if not isinstance(delay, datetime.timedelta):
+ delay = datetime.timedelta(seconds=delay)
+ due_time = now() + delay
+ cmd = cls.from_datetime(due_time)
+ cmd.delay = delay
+ cmd.target = target
+ return cmd
+
+ @staticmethod
+ def _from_timestamp(input):
+ """
+ If input is a real number, interpret it as a Unix timestamp
+ (seconds sinc Epoch in UTC) and return a timezone-aware
+ datetime object. Otherwise return input unchanged.
+ """
+ if not isinstance(input, numbers.Real):
+ return input
+ return from_timestamp(input)
+
+ @classmethod
+ def at_time(cls, at, target):
+ """
+ Construct a DelayedCommand to come due at `at`, where `at` may be
+ a datetime or timestamp.
+ """
+ at = cls._from_timestamp(at)
+ cmd = cls.from_datetime(at)
+ cmd.delay = at - now()
+ cmd.target = target
+ return cmd
+
+ def due(self):
+ return now() >= self
+
+
+class PeriodicCommand(DelayedCommand):
+ """
+ Like a delayed command, but expect this command to run every delay
+ seconds.
+ """
+
+ def _next_time(self):
+ """
+ Add delay to self, localized
+ """
+ return self._localize(self + self.delay)
+
+ @staticmethod
+ def _localize(dt):
+ """
+ Rely on pytz.localize to ensure new result honors DST.
+ """
+ try:
+ tz = dt.tzinfo
+ return tz.localize(dt.replace(tzinfo=None))
+ except AttributeError:
+ return dt
+
+ def next(self):
+ cmd = self.__class__.from_datetime(self._next_time())
+ cmd.delay = self.delay
+ cmd.target = self.target
+ return cmd
+
+ def __setattr__(self, key, value):
+ if key == 'delay' and not value > datetime.timedelta():
+ raise ValueError(
+ "A PeriodicCommand must have a positive, " "non-zero delay."
+ )
+ super(PeriodicCommand, self).__setattr__(key, value)
+
+
+class PeriodicCommandFixedDelay(PeriodicCommand):
+ """
+ Like a periodic command, but don't calculate the delay based on
+ the current time. Instead use a fixed delay following the initial
+ run.
+ """
+
+ @classmethod
+ def at_time(cls, at, delay, target):
+ """
+ >>> cmd = PeriodicCommandFixedDelay.at_time(0, 30, None)
+ >>> cmd.delay.total_seconds()
+ 30.0
+ """
+ at = cls._from_timestamp(at)
+ cmd = cls.from_datetime(at)
+ if isinstance(delay, numbers.Number):
+ delay = datetime.timedelta(seconds=delay)
+ cmd.delay = delay
+ cmd.target = target
+ return cmd
+
+ @classmethod
+ def daily_at(cls, at, target):
+ """
+ Schedule a command to run at a specific time each day.
+
+ >>> from tempora import utc
+ >>> noon = utc.time(12, 0)
+ >>> cmd = PeriodicCommandFixedDelay.daily_at(noon, None)
+ >>> cmd.delay.total_seconds()
+ 86400.0
+ """
+ daily = datetime.timedelta(days=1)
+ # convert when to the next datetime matching this time
+ when = datetime.datetime.combine(datetime.date.today(), at)
+ when -= daily
+ while when < now():
+ when += daily
+ return cls.at_time(cls._localize(when), daily, target)
+
+
+class Scheduler:
+ """
+ A rudimentary abstract scheduler accepting DelayedCommands
+ and dispatching them on schedule.
+ """
+
+ def __init__(self):
+ self.queue = []
+
+ def add(self, command):
+ assert isinstance(command, DelayedCommand)
+ bisect.insort(self.queue, command)
+
+ def run_pending(self):
+ while self.queue:
+ command = self.queue[0]
+ if not command.due():
+ break
+ self.run(command)
+ if isinstance(command, PeriodicCommand):
+ self.add(command.next())
+ del self.queue[0]
+
+ @abc.abstractmethod
+ def run(self, command):
+ """
+ Run the command
+ """
+
+
+class InvokeScheduler(Scheduler):
+ """
+ Command targets are functions to be invoked on schedule.
+ """
+
+ def run(self, command):
+ command.target()
+
+
+class CallbackScheduler(Scheduler):
+ """
+ Command targets are passed to a dispatch callable on schedule.
+ """
+
+ def __init__(self, dispatch):
+ super().__init__()
+ self.dispatch = dispatch
+
+ def run(self, command):
+ self.dispatch(command.target)
diff --git a/monEnvTP/lib/python3.8/site-packages/tempora/tests/__pycache__/test_schedule.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/tempora/tests/__pycache__/test_schedule.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fa62d95adada89ba3cc163842d0d117ba9980c73
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/tempora/tests/__pycache__/test_schedule.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/tempora/tests/__pycache__/test_timing.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/tempora/tests/__pycache__/test_timing.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9c3f385b7c2962da5f778c41a42755d3a0b72e00
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/tempora/tests/__pycache__/test_timing.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/tempora/tests/test_schedule.py b/monEnvTP/lib/python3.8/site-packages/tempora/tests/test_schedule.py
new file mode 100644
index 0000000000000000000000000000000000000000..0ce35435dd376c89414176be22b8f6491608e0ba
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/tempora/tests/test_schedule.py
@@ -0,0 +1,149 @@
+import time
+import random
+import datetime
+from unittest import mock
+
+import pytest
+import pytz
+import freezegun
+
+from tempora import schedule
+
+
+do_nothing = type(None)
+
+
+def test_delayed_command_order():
+ """
+ delayed commands should be sorted by delay time
+ """
+ delays = [random.randint(0, 99) for x in range(5)]
+ cmds = sorted(
+ [schedule.DelayedCommand.after(delay, do_nothing) for delay in delays]
+ )
+ assert [c.delay.seconds for c in cmds] == sorted(delays)
+
+
+def test_periodic_command_delay():
+ "A PeriodicCommand must have a positive, non-zero delay."
+ with pytest.raises(ValueError) as exc_info:
+ schedule.PeriodicCommand.after(0, None)
+ assert str(exc_info.value) == test_periodic_command_delay.__doc__
+
+
+def test_periodic_command_fixed_delay():
+ """
+ Test that we can construct a periodic command with a fixed initial
+ delay.
+ """
+ fd = schedule.PeriodicCommandFixedDelay.at_time(
+ at=schedule.now(), delay=datetime.timedelta(seconds=2), target=lambda: None
+ )
+ assert fd.due() is True
+ assert fd.next().due() is False
+
+
+class TestCommands:
+ def test_delayed_command_from_timestamp(self):
+ """
+ Ensure a delayed command can be constructed from a timestamp.
+ """
+ t = time.time()
+ schedule.DelayedCommand.at_time(t, do_nothing)
+
+ def test_command_at_noon(self):
+ """
+ Create a periodic command that's run at noon every day.
+ """
+ when = datetime.time(12, 0, tzinfo=pytz.utc)
+ cmd = schedule.PeriodicCommandFixedDelay.daily_at(when, target=None)
+ assert cmd.due() is False
+ next_cmd = cmd.next()
+ daily = datetime.timedelta(days=1)
+ day_from_now = schedule.now() + daily
+ two_days_from_now = day_from_now + daily
+ assert day_from_now < next_cmd < two_days_from_now
+
+ @pytest.mark.parametrize("hour", range(10, 14))
+ @pytest.mark.parametrize("tz_offset", (14, -14))
+ def test_command_at_noon_distant_local(self, hour, tz_offset):
+ """
+ Run test_command_at_noon, but with the local timezone
+ more than 12 hours away from UTC.
+ """
+ with freezegun.freeze_time(f"2020-01-10 {hour:02}:01", tz_offset=tz_offset):
+ self.test_command_at_noon()
+
+
+class TestTimezones:
+ def test_alternate_timezone_west(self):
+ target_tz = pytz.timezone('US/Pacific')
+ target = schedule.now().astimezone(target_tz)
+ cmd = schedule.DelayedCommand.at_time(target, target=None)
+ assert cmd.due()
+
+ def test_alternate_timezone_east(self):
+ target_tz = pytz.timezone('Europe/Amsterdam')
+ target = schedule.now().astimezone(target_tz)
+ cmd = schedule.DelayedCommand.at_time(target, target=None)
+ assert cmd.due()
+
+ def test_daylight_savings(self):
+ """
+ A command at 9am should always be 9am regardless of
+ a DST boundary.
+ """
+ with freezegun.freeze_time('2018-03-10 08:00:00'):
+ target_tz = pytz.timezone('US/Eastern')
+ target_time = datetime.time(9, tzinfo=target_tz)
+ cmd = schedule.PeriodicCommandFixedDelay.daily_at(
+ target_time, target=lambda: None
+ )
+
+ def naive(dt):
+ return dt.replace(tzinfo=None)
+
+ assert naive(cmd) == datetime.datetime(2018, 3, 10, 9, 0, 0)
+ next_ = cmd.next()
+ assert naive(next_) == datetime.datetime(2018, 3, 11, 9, 0, 0)
+ assert next_ - cmd == datetime.timedelta(hours=23)
+
+
+class TestScheduler:
+ def test_invoke_scheduler(self):
+ sched = schedule.InvokeScheduler()
+ target = mock.MagicMock()
+ cmd = schedule.DelayedCommand.after(0, target)
+ sched.add(cmd)
+ sched.run_pending()
+ target.assert_called_once()
+ assert not sched.queue
+
+ def test_callback_scheduler(self):
+ callback = mock.MagicMock()
+ sched = schedule.CallbackScheduler(callback)
+ target = mock.MagicMock()
+ cmd = schedule.DelayedCommand.after(0, target)
+ sched.add(cmd)
+ sched.run_pending()
+ callback.assert_called_once_with(target)
+
+ def test_periodic_command(self):
+ sched = schedule.InvokeScheduler()
+ target = mock.MagicMock()
+
+ before = datetime.datetime.utcnow()
+
+ cmd = schedule.PeriodicCommand.after(10, target)
+ sched.add(cmd)
+ sched.run_pending()
+ target.assert_not_called()
+
+ with freezegun.freeze_time(before + datetime.timedelta(seconds=15)):
+ sched.run_pending()
+ assert sched.queue
+ target.assert_called_once()
+
+ with freezegun.freeze_time(before + datetime.timedelta(seconds=25)):
+ sched.run_pending()
+ assert target.call_count == 2
diff --git a/monEnvTP/lib/python3.8/site-packages/tempora/tests/test_timing.py b/monEnvTP/lib/python3.8/site-packages/tempora/tests/test_timing.py
new file mode 100644
index 0000000000000000000000000000000000000000..43bf7efcf8aa1e5191466ba0018280eba7ff0614
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/tempora/tests/test_timing.py
@@ -0,0 +1,50 @@
+import datetime
+import time
+import contextlib
+import os
+from unittest import mock
+
+import pytest
+from tempora import timing
+
+
+def test_IntervalGovernor():
+ """
+ IntervalGovernor should prevent a function from being called more than
+ once per interval.
+ """
+ func_under_test = mock.MagicMock()
+ # to look like a function, it needs a __name__ attribute
+ func_under_test.__name__ = 'func_under_test'
+ interval = datetime.timedelta(seconds=1)
+ governed = timing.IntervalGovernor(interval)(func_under_test)
+ governed('a')
+ governed('b')
+ governed(3, 'sir')
+ func_under_test.assert_called_once_with('a')
+
+
+@pytest.fixture
+def alt_tz(monkeypatch):
+ hasattr(time, 'tzset') or pytest.skip("tzset not available")
+
+ @contextlib.contextmanager
+ def change():
+ val = 'AEST-10AEDT-11,M10.5.0,M3.5.0'
+ with monkeypatch.context() as ctx:
+ ctx.setitem(os.environ, 'TZ', val)
+ time.tzset()
+ yield
+ time.tzset()
+
+ return change()
+
+
+def test_Stopwatch_timezone_change(alt_tz):
+ """
+ The stopwatch should provide a consistent duration even
+ if the timezone changes.
+ """
+ watch = timing.Stopwatch()
+ with alt_tz:
+ assert abs(watch.split().total_seconds()) < 0.1
diff --git a/monEnvTP/lib/python3.8/site-packages/tempora/timing.py b/monEnvTP/lib/python3.8/site-packages/tempora/timing.py
new file mode 100644
index 0000000000000000000000000000000000000000..6b3147a9d1e753cd51a6ee361c79193aa3a1361d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/tempora/timing.py
@@ -0,0 +1,266 @@
+import datetime
+import functools
+import numbers
+import time
+import collections.abc
+import contextlib
+
+import jaraco.functools
+
+
+class Stopwatch:
+ """
+ A simple stopwatch which starts automatically.
+
+ >>> w = Stopwatch()
+ >>> _1_sec = datetime.timedelta(seconds=1)
+ >>> w.split() < _1_sec
+ True
+ >>> import time
+ >>> time.sleep(1.0)
+ >>> w.split() >= _1_sec
+ True
+ >>> w.stop() >= _1_sec
+ True
+ >>> w.reset()
+ >>> w.start()
+ >>> w.split() < _1_sec
+ True
+
+ It should be possible to launch the Stopwatch in a context:
+
+ >>> with Stopwatch() as watch:
+ ... assert isinstance(watch.split(), datetime.timedelta)
+
+ In that case, the watch is stopped when the context is exited,
+ so to read the elapsed time:
+
+ >>> watch.elapsed
+ datetime.timedelta(...)
+ >>> watch.elapsed.seconds
+ 0
+ """
+
+ def __init__(self):
+ self.reset()
+ self.start()
+
+ def reset(self):
+ self.elapsed = datetime.timedelta(0)
+ with contextlib.suppress(AttributeError):
+ del self.start_time
+
+ def start(self):
+ self.start_time = datetime.datetime.utcnow()
+
+ def stop(self):
+ stop_time = datetime.datetime.utcnow()
+ self.elapsed += stop_time - self.start_time
+ del self.start_time
+ return self.elapsed
+
+ def split(self):
+ local_duration = datetime.datetime.utcnow() - self.start_time
+ return self.elapsed + local_duration
+
+ # context manager support
+ def __enter__(self):
+ self.start()
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.stop()
+
+
+class IntervalGovernor:
+ """
+ Decorate a function to only allow it to be called once per
+ min_interval. Otherwise, it returns None.
+
+ >>> gov = IntervalGovernor(30)
+ >>> gov.min_interval.total_seconds()
+ 30.0
+ """
+
+ def __init__(self, min_interval):
+ if isinstance(min_interval, numbers.Number):
+ min_interval = datetime.timedelta(seconds=min_interval)
+ self.min_interval = min_interval
+ self.last_call = None
+
+ def decorate(self, func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ allow = not self.last_call or self.last_call.split() > self.min_interval
+ if allow:
+ self.last_call = Stopwatch()
+ return func(*args, **kwargs)
+
+ return wrapper
+
+ __call__ = decorate
+
+
+class Timer(Stopwatch):
+ """
+ Watch for a target elapsed time.
+
+ >>> t = Timer(0.1)
+ >>> t.expired()
+ False
+ >>> __import__('time').sleep(0.15)
+ >>> t.expired()
+ True
+ """
+
+ def __init__(self, target=float('Inf')):
+ self.target = self._accept(target)
+ super(Timer, self).__init__()
+
+ @staticmethod
+ def _accept(target):
+ """
+ Accept None or ∞ or datetime or numeric for target
+
+ >>> Timer._accept(datetime.timedelta(seconds=30))
+ 30.0
+ >>> Timer._accept(None)
+ inf
+ """
+ if isinstance(target, datetime.timedelta):
+ target = target.total_seconds()
+
+ if target is None:
+ # treat None as infinite target
+ target = float('Inf')
+
+ return target
+
+ def expired(self):
+ return self.split().total_seconds() > self.target
+
+
+class BackoffDelay(collections.abc.Iterator):
+ """
+ Exponential backoff delay.
+
+ Useful for defining delays between retries. Consider for use
+ with ``jaraco.functools.retry_call`` as the cleanup.
+
+ Default behavior has no effect; a delay or jitter must
+ be supplied for the call to be non-degenerate.
+
+ >>> bd = BackoffDelay()
+ >>> bd()
+ >>> bd()
+
+ The following instance will delay 10ms for the first call,
+ 20ms for the second, etc.
+
+ >>> bd = BackoffDelay(delay=0.01, factor=2)
+ >>> bd()
+ >>> bd()
+
+ Inspect and adjust the state of the delay anytime.
+
+ >>> bd.delay
+ 0.04
+ >>> bd.delay = 0.01
+
+ Set limit to prevent the delay from exceeding bounds.
+
+ >>> bd = BackoffDelay(delay=0.01, factor=2, limit=0.015)
+ >>> bd()
+ >>> bd.delay
+ 0.015
+
+ To reset the backoff, simply call ``.reset()``:
+
+ >>> bd.reset()
+ >>> bd.delay
+ 0.01
+
+ Iterate on the object to retrieve/advance the delay values.
+
+ >>> next(bd)
+ 0.01
+ >>> next(bd)
+ 0.015
+ >>> import itertools
+ >>> tuple(itertools.islice(bd, 3))
+ (0.015, 0.015, 0.015)
+
+ Limit may be a callable taking a number and returning
+ the limited number.
+
+ >>> at_least_one = lambda n: max(n, 1)
+ >>> bd = BackoffDelay(delay=0.01, factor=2, limit=at_least_one)
+ >>> next(bd)
+ 0.01
+ >>> next(bd)
+ 1
+
+ Pass a jitter to add or subtract seconds to the delay.
+
+ >>> bd = BackoffDelay(jitter=0.01)
+ >>> next(bd)
+ 0
+ >>> next(bd)
+ 0.01
+
+ Jitter may be a callable. To supply a non-deterministic jitter
+ between -0.5 and 0.5, consider:
+
+ >>> import random
+ >>> jitter=functools.partial(random.uniform, -0.5, 0.5)
+ >>> bd = BackoffDelay(jitter=jitter)
+ >>> next(bd)
+ 0
+ >>> 0 <= next(bd) <= 0.5
+ True
+ """
+
+ delay = 0
+
+ factor = 1
+ "Multiplier applied to delay"
+
+ jitter = 0
+ "Number or callable returning extra seconds to add to delay"
+
+ @jaraco.functools.save_method_args
+ def __init__(self, delay=0, factor=1, limit=float('inf'), jitter=0):
+ self.delay = delay
+ self.factor = factor
+ if isinstance(limit, numbers.Number):
+ limit_ = limit
+
+ def limit(n):
+ return max(0, min(limit_, n))
+
+ self.limit = limit
+ if isinstance(jitter, numbers.Number):
+ jitter_ = jitter
+
+ def jitter():
+ return jitter_
+
+ self.jitter = jitter
+
+ def __call__(self):
+ time.sleep(next(self))
+
+ def __next__(self):
+ delay = self.delay
+ self.bump()
+ return delay
+
+ def __iter__(self):
+ return self
+
+ def bump(self):
+ self.delay = self.limit(self.delay * self.factor + self.jitter())
+
+ def reset(self):
+ saved = self._saved___init__
+ self.__init__(*saved.args, **saved.kwargs)
diff --git a/monEnvTP/lib/python3.8/site-packages/tempora/utc.py b/monEnvTP/lib/python3.8/site-packages/tempora/utc.py
new file mode 100644
index 0000000000000000000000000000000000000000..a585fb54bc2bc2f74016570a5f5f17a1816b8d7c
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/tempora/utc.py
@@ -0,0 +1,36 @@
+"""
+Facilities for common time operations in UTC.
+
+Inspired by the `utc project <https://pypi.org/project/utc>`_.
+
+>>> dt = now()
+>>> dt == fromtimestamp(dt.timestamp())
+True
+>>> dt.tzinfo
+datetime.timezone.utc
+
+>>> from time import time as timestamp
+>>> now().timestamp() - timestamp() < 0.1
+True
+
+>>> (now() - fromtimestamp(timestamp())).total_seconds() < 0.1
+True
+
+>>> datetime(2018, 6, 26, 0).tzinfo
+datetime.timezone.utc
+
+>>> time(0, 0).tzinfo
+datetime.timezone.utc
+"""
+
+import datetime as std
+import functools
+
+
+__all__ = ['now', 'fromtimestamp', 'datetime', 'time']
+
+
+now = functools.partial(std.datetime.now, std.timezone.utc)
+fromtimestamp = functools.partial(std.datetime.fromtimestamp, tz=std.timezone.utc)
+datetime = functools.partial(std.datetime, tzinfo=std.timezone.utc)
+time = functools.partial(std.time, tzinfo=std.timezone.utc)
diff --git a/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0-py3.7-nspkg.pth b/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0-py3.7-nspkg.pth
new file mode 100644
index 0000000000000000000000000000000000000000..25541e3e76b213cd20a3bb818261432a71e8a5a2
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0-py3.7-nspkg.pth
@@ -0,0 +1 @@
+import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('zc',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('zc', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('zc', [os.path.dirname(p)])));m = m or sys.modules.setdefault('zc', types.ModuleType('zc'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)
diff --git a/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/INSTALLER b/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/LICENSE.txt b/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/LICENSE.txt
new file mode 100644
index 0000000000000000000000000000000000000000..e1f9ad7b3b48377bfbfaa1b380c384ddea24060f
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/LICENSE.txt
@@ -0,0 +1,44 @@
+Zope Public License (ZPL) Version 2.1
+
+A copyright notice accompanies this license document that identifies the
+copyright holders.
+
+This license has been certified as open source. It has also been designated as
+GPL compatible by the Free Software Foundation (FSF).
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions in source code must retain the accompanying copyright
+notice, this list of conditions, and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the accompanying copyright
+notice, this list of conditions, and the following disclaimer in the
+documentation and/or other materials provided with the distribution.
+
+3. Names of the copyright holders must not be used to endorse or promote
+products derived from this software without prior written permission from the
+copyright holders.
+
+4. The right to distribute this software or to use it for any purpose does not
+give you the right to use Servicemarks (sm) or Trademarks (tm) of the
+copyright
+holders. Use of them is covered by separate agreement with the copyright
+holders.
+
+5. If any files are modified, you must cause the modified files to carry
+prominent notices stating that you changed the files and the date of any
+change.
+
+Disclaimer
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY EXPRESSED
+OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY DIRECT, INDIRECT,
+INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
+EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/METADATA b/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..c9d2a860aaaa42c0d04db3525fb99300678aa879
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/METADATA
@@ -0,0 +1,211 @@
+Metadata-Version: 2.1
+Name: zc.lockfile
+Version: 2.0
+Summary: Basic inter-process locks
+Home-page: https://github.com/zopefoundation/zc.lockfile
+Author: Zope Foundation
+Author-email: zope-dev@zope.org
+License: ZPL 2.1
+Keywords: lock
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Zope Public License
+Classifier: Natural Language :: English
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Software Development
+Requires-Dist: setuptools
+Provides-Extra: test
+Requires-Dist: zope.testing ; extra == 'test'
+
+*************************
+Basic inter-process locks
+*************************
+
+The zc.lockfile package provides a basic portable implementation of
+interprocess locks using lock files. The purpose if not specifically
+to lock files, but to simply provide locks with an implementation
+based on file-locking primitives. Of course, these locks could be
+used to mediate access to *other* files. For example, the ZODB file
+storage implementation uses file locks to mediate access to
+file-storage database files. The database files and lock file files
+are separate files.
+
+.. contents::
+
+Detailed Documentation
+**********************
+
+Lock file support
+=================
+
+The ZODB lock_file module provides support for creating file system
+locks. These are locks that are implemented with lock files and
+OS-provided locking facilities. To create a lock, instantiate a
+LockFile object with a file name:
+
+ >>> import zc.lockfile
+ >>> lock = zc.lockfile.LockFile('lock')
+
+If we try to lock the same name, we'll get a lock error:
+
+ >>> import zope.testing.loggingsupport
+ >>> handler = zope.testing.loggingsupport.InstalledHandler('zc.lockfile')
+ >>> try:
+ ... zc.lockfile.LockFile('lock')
+ ... except zc.lockfile.LockError:
+ ... print("Can't lock file")
+ Can't lock file
+
+.. We don't log failure to acquire.
+
+ >>> for record in handler.records: # doctest: +ELLIPSIS
+ ... print(record.levelname+' '+record.getMessage())
+
+To release the lock, use it's close method:
+
+ >>> lock.close()
+
+The lock file is not removed. It is left behind:
+
+ >>> import os
+ >>> os.path.exists('lock')
+ True
+
+Of course, now that we've released the lock, we can create it again:
+
+ >>> lock = zc.lockfile.LockFile('lock')
+ >>> lock.close()
+
+.. Cleanup
+
+ >>> import os
+ >>> os.remove('lock')
+
+Hostname in lock file
+=====================
+
+In a container environment (e.g. Docker), the PID is typically always
+identical even if multiple containers are running under the same operating
+system instance.
+
+Clearly, inspecting lock files doesn't then help much in debugging. To identify
+the container which created the lock file, we need information about the
+container in the lock file. Since Docker uses the container identifier or name
+as the hostname, this information can be stored in the lock file in addition to
+or instead of the PID.
+
+Use the ``content_template`` keyword argument to ``LockFile`` to specify a
+custom lock file content format:
+
+ >>> lock = zc.lockfile.LockFile('lock', content_template='{pid};{hostname}')
+ >>> lock.close()
+
+If you now inspected the lock file, you would see e.g.:
+
+ $ cat lock
+ 123;myhostname
+
+
+Change History
+***************
+
+2.0 (2019-08-08)
+================
+
+- Extracted new ``SimpleLockFile`` that removes implicit behavior
+ writing to the lock file, and instead allows a subclass to define
+ that behavior.
+ (`#15 <https://github.com/zopefoundation/zc.lockfile/issues/15>`_)
+
+- ``SimpleLockFile`` and thus ``LockFile`` are now new-style classes.
+ Any clients relying on ``LockFile`` being an old-style class will
+ need to be adapted.
+
+- Drop support for Python 3.4.
+
+- Add support for Python 3.8b3.
+
+
+1.4 (2018-11-12)
+================
+
+- Claim support for Python 3.6 and 3.7.
+
+- Drop Python 2.6 and 3.3.
+
+
+1.3.0 (2018-04-23)
+==================
+
+- Stop logging failure to acquire locks. Clients can do that if they wish.
+
+- Claim support for Python 3.4 and 3.5.
+
+- Drop Python 3.2 support because pip no longer supports it.
+
+1.2.1 (2016-06-19)
+==================
+
+- Fixed: unlocking and locking didn't work when a multiprocessing
+ process was running (and presumably other conditions).
+
+1.2.0 (2016-06-09)
+==================
+
+- Added the ability to include the hostname in the lock file content.
+
+- Code and ReST markup cosmetics.
+ [alecghica]
+
+1.1.0 (2013-02-12)
+==================
+
+- Added Trove classifiers and made setup.py zest.releaser friendly.
+
+- Added Python 3.2, 3.3 and PyPy 1.9 support.
+
+- Removed Python 2.4 and Python 2.5 support.
+
+1.0.2 (2012-12-02)
+==================
+
+- Fixed: the fix included in 1.0.1 caused multiple pids to be written
+ to the lock file
+
+1.0.1 (2012-11-30)
+==================
+
+- Fixed: when there was lock contention, the pid in the lock file was
+ lost.
+
+ Thanks to Daniel Moisset reporting the problem and providing a fix
+ with tests.
+
+- Added test extra to declare test dependency on ``zope.testing``.
+
+- Using Python's ``doctest`` module instead of depreacted
+ ``zope.testing.doctest``.
+
+1.0.0 (2008-10-18)
+==================
+
+- Fixed a small bug in error logging.
+
+1.0.0b1 (2007-07-18)
+====================
+
+- Initial release
+
+
diff --git a/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/RECORD b/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..f3e14c8a182b00605513d9c9834ace478a4b8495
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/RECORD
@@ -0,0 +1,13 @@
+zc.lockfile-2.0-py3.7-nspkg.pth,sha256=i7g33RdQzJ33LZYHfQ_YmQhLOchcWGnBnib2zxQJCiY,519
+zc.lockfile-2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+zc.lockfile-2.0.dist-info/LICENSE.txt,sha256=PmcdsR32h1FswdtbPWXkqjg-rKPCDOo_r1Og9zNdCjw,2070
+zc.lockfile-2.0.dist-info/METADATA,sha256=63fZ0KCGJ5uSkXwwIWzo30rl2AKos6kPkVtI8v9b7vE,5875
+zc.lockfile-2.0.dist-info/RECORD,,
+zc.lockfile-2.0.dist-info/WHEEL,sha256=h_aVn5OB2IERUjMbi2pucmR_zzWJtk303YXvhh60NJ8,110
+zc.lockfile-2.0.dist-info/namespace_packages.txt,sha256=nptwHX5ivUBBZ49av8trjckjjvj_7OZ_f0bSLnluz1k,3
+zc.lockfile-2.0.dist-info/top_level.txt,sha256=nptwHX5ivUBBZ49av8trjckjjvj_7OZ_f0bSLnluz1k,3
+zc/lockfile/README.txt,sha256=ZkMWQvqi5yiaJU7MHIUT1TIRYz63Dfv7osT2lWVKaZw,2048
+zc/lockfile/__init__.py,sha256=1jB07j-pkHRKagoIcOqg2lJLsvdvzFAfQkFYu4oQWuI,3521
+zc/lockfile/__pycache__/__init__.cpython-38.pyc,,
+zc/lockfile/__pycache__/tests.cpython-38.pyc,,
+zc/lockfile/tests.py,sha256=X85pKXHk52H-kq80SuaWGw_dIwDskou5ih4_gaAEpE8,5814
diff --git a/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/WHEEL b/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..78e6f69d1d8fe46bdd9dd3bbfdee02380aaede3b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.33.4)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/namespace_packages.txt b/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/namespace_packages.txt
new file mode 100644
index 0000000000000000000000000000000000000000..7647cfac831dc6ad7162f3a7e0251ef19bde8ba8
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/namespace_packages.txt
@@ -0,0 +1 @@
+zc
diff --git a/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/top_level.txt b/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..7647cfac831dc6ad7162f3a7e0251ef19bde8ba8
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/zc.lockfile-2.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+zc
diff --git a/monEnvTP/lib/python3.8/site-packages/zc/lockfile/README.txt b/monEnvTP/lib/python3.8/site-packages/zc/lockfile/README.txt
new file mode 100644
index 0000000000000000000000000000000000000000..89ef33e9c88fb17140ab6f94d39ad4a1e4e533c8
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/zc/lockfile/README.txt
@@ -0,0 +1,70 @@
+Lock file support
+=================
+
+The ZODB lock_file module provides support for creating file system
+locks. These are locks that are implemented with lock files and
+OS-provided locking facilities. To create a lock, instantiate a
+LockFile object with a file name:
+
+ >>> import zc.lockfile
+ >>> lock = zc.lockfile.LockFile('lock')
+
+If we try to lock the same name, we'll get a lock error:
+
+ >>> import zope.testing.loggingsupport
+ >>> handler = zope.testing.loggingsupport.InstalledHandler('zc.lockfile')
+ >>> try:
+ ... zc.lockfile.LockFile('lock')
+ ... except zc.lockfile.LockError:
+ ... print("Can't lock file")
+ Can't lock file
+
+.. We don't log failure to acquire.
+
+ >>> for record in handler.records: # doctest: +ELLIPSIS
+ ... print(record.levelname+' '+record.getMessage())
+
+To release the lock, use it's close method:
+
+ >>> lock.close()
+
+The lock file is not removed. It is left behind:
+
+ >>> import os
+ >>> os.path.exists('lock')
+ True
+
+Of course, now that we've released the lock, we can create it again:
+
+ >>> lock = zc.lockfile.LockFile('lock')
+ >>> lock.close()
+
+.. Cleanup
+
+ >>> import os
+ >>> os.remove('lock')
+
+Hostname in lock file
+=====================
+
+In a container environment (e.g. Docker), the PID is typically always
+identical even if multiple containers are running under the same operating
+system instance.
+
+Clearly, inspecting lock files doesn't then help much in debugging. To identify
+the container which created the lock file, we need information about the
+container in the lock file. Since Docker uses the container identifier or name
+as the hostname, this information can be stored in the lock file in addition to
+or instead of the PID.
+
+Use the ``content_template`` keyword argument to ``LockFile`` to specify a
+custom lock file content format:
+
+ >>> lock = zc.lockfile.LockFile('lock', content_template='{pid};{hostname}')
+ >>> lock.close()
+
+If you now inspected the lock file, you would see e.g.:
+
+ $ cat lock
+ 123;myhostname
+
diff --git a/monEnvTP/lib/python3.8/site-packages/zc/lockfile/__init__.py b/monEnvTP/lib/python3.8/site-packages/zc/lockfile/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..b541fa2ddfd094cb4bcc3ded23999e74a19f2107
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/zc/lockfile/__init__.py
@@ -0,0 +1,125 @@
+##############################################################################
+#
+# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE
+#
+##############################################################################
+
+import os
+import errno
+import logging
+logger = logging.getLogger("zc.lockfile")
+
+__metaclass__ = type
+
+class LockError(Exception):
+ """Couldn't get a lock
+ """
+
+try:
+ import fcntl
+except ImportError:
+ try:
+ import msvcrt
+ except ImportError:
+ def _lock_file(file):
+ raise TypeError('No file-locking support on this platform')
+ def _unlock_file(file):
+ raise TypeError('No file-locking support on this platform')
+
+ else:
+ # Windows
+ def _lock_file(file):
+ # Lock just the first byte
+ try:
+ msvcrt.locking(file.fileno(), msvcrt.LK_NBLCK, 1)
+ except IOError:
+ raise LockError("Couldn't lock %r" % file.name)
+
+ def _unlock_file(file):
+ try:
+ file.seek(0)
+ msvcrt.locking(file.fileno(), msvcrt.LK_UNLCK, 1)
+ except IOError:
+ raise LockError("Couldn't unlock %r" % file.name)
+
+else:
+ # Unix
+ _flags = fcntl.LOCK_EX | fcntl.LOCK_NB
+
+ def _lock_file(file):
+ try:
+ fcntl.flock(file.fileno(), _flags)
+ except IOError:
+ raise LockError("Couldn't lock %r" % file.name)
+
+ def _unlock_file(file):
+ fcntl.flock(file.fileno(), fcntl.LOCK_UN)
+
+class LazyHostName:
+ """Avoid importing socket and calling gethostname() unnecessarily"""
+ def __str__(self):
+ import socket
+ return socket.gethostname()
+
+
+class SimpleLockFile:
+
+ _fp = None
+
+ def __init__(self, path):
+ self._path = path
+ try:
+ # Try to open for writing without truncation:
+ fp = open(path, 'r+')
+ except IOError:
+ # If the file doesn't exist, we'll get an IO error, try a+
+ # Note that there may be a race here. Multiple processes
+ # could fail on the r+ open and open the file a+, but only
+ # one will get the the lock and write a pid.
+ fp = open(path, 'a+')
+
+ try:
+ _lock_file(fp)
+ self._fp = fp
+ except:
+ fp.close()
+ raise
+
+ # Lock acquired
+ self._on_lock()
+ fp.flush()
+
+ def close(self):
+ if self._fp is not None:
+ _unlock_file(self._fp)
+ self._fp.close()
+ self._fp = None
+
+ def _on_lock(self):
+ """
+ Allow subclasses to supply behavior to occur following
+ lock acquisition.
+ """
+
+
+class LockFile(SimpleLockFile):
+
+ def __init__(self, path, content_template='{pid}'):
+ self._content_template = content_template
+ super(LockFile, self).__init__(path)
+
+ def _on_lock(self):
+ content = self._content_template.format(
+ pid=os.getpid(),
+ hostname=LazyHostName(),
+ )
+ self._fp.write(" %s\n" % content)
+ self._fp.truncate()
diff --git a/monEnvTP/lib/python3.8/site-packages/zc/lockfile/__pycache__/__init__.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/zc/lockfile/__pycache__/__init__.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b10eb3cff89515164a2a920245fea390a62c6144
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/zc/lockfile/__pycache__/__init__.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/zc/lockfile/__pycache__/tests.cpython-38.pyc b/monEnvTP/lib/python3.8/site-packages/zc/lockfile/__pycache__/tests.cpython-38.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0ab881ae0fb89507aa2fe50898a311364ee8ffee
Binary files /dev/null and b/monEnvTP/lib/python3.8/site-packages/zc/lockfile/__pycache__/tests.cpython-38.pyc differ
diff --git a/monEnvTP/lib/python3.8/site-packages/zc/lockfile/tests.py b/monEnvTP/lib/python3.8/site-packages/zc/lockfile/tests.py
new file mode 100644
index 0000000000000000000000000000000000000000..4c890539c03b9da615fe7dd3ea846dd7dfd8a6f8
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/zc/lockfile/tests.py
@@ -0,0 +1,201 @@
+##############################################################################
+#
+# Copyright (c) 2004 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+import os, re, sys, unittest, doctest
+import zc.lockfile, time, threading
+from zope.testing import renormalizing, setupstack
+import tempfile
+try:
+ from unittest.mock import Mock, patch
+except ImportError:
+ from mock import Mock, patch
+
+checker = renormalizing.RENormalizing([
+ # Python 3 adds module path to error class name.
+ (re.compile("zc\.lockfile\.LockError:"),
+ r"LockError:"),
+ ])
+
+def inc():
+ while 1:
+ try:
+ lock = zc.lockfile.LockFile('f.lock')
+ except zc.lockfile.LockError:
+ continue
+ else:
+ break
+ f = open('f', 'r+b')
+ v = int(f.readline().strip())
+ time.sleep(0.01)
+ v += 1
+ f.seek(0)
+ f.write(('%d\n' % v).encode('ASCII'))
+ f.close()
+ lock.close()
+
+def many_threads_read_and_write():
+ r"""
+ >>> with open('f', 'w+b') as file:
+ ... _ = file.write(b'0\n')
+ >>> with open('f.lock', 'w+b') as file:
+ ... _ = file.write(b'0\n')
+
+ >>> n = 50
+ >>> threads = [threading.Thread(target=inc) for i in range(n)]
+ >>> _ = [thread.start() for thread in threads]
+ >>> _ = [thread.join() for thread in threads]
+ >>> with open('f', 'rb') as file:
+ ... saved = int(file.read().strip())
+ >>> saved == n
+ True
+
+ >>> os.remove('f')
+
+ We should only have one pid in the lock file:
+
+ >>> f = open('f.lock')
+ >>> len(f.read().strip().split())
+ 1
+ >>> f.close()
+
+ >>> os.remove('f.lock')
+
+ """
+
+def pid_in_lockfile():
+ r"""
+ >>> import os, zc.lockfile
+ >>> pid = os.getpid()
+ >>> lock = zc.lockfile.LockFile("f.lock")
+ >>> f = open("f.lock")
+ >>> _ = f.seek(1)
+ >>> f.read().strip() == str(pid)
+ True
+ >>> f.close()
+
+ Make sure that locking twice does not overwrite the old pid:
+
+ >>> lock = zc.lockfile.LockFile("f.lock")
+ Traceback (most recent call last):
+ ...
+ LockError: Couldn't lock 'f.lock'
+
+ >>> f = open("f.lock")
+ >>> _ = f.seek(1)
+ >>> f.read().strip() == str(pid)
+ True
+ >>> f.close()
+
+ >>> lock.close()
+ """
+
+
+def hostname_in_lockfile():
+ r"""
+ hostname is correctly written into the lock file when it's included in the
+ lock file content template
+
+ >>> import zc.lockfile
+ >>> with patch('socket.gethostname', Mock(return_value='myhostname')):
+ ... lock = zc.lockfile.LockFile("f.lock", content_template='{hostname}')
+ >>> f = open("f.lock")
+ >>> _ = f.seek(1)
+ >>> f.read().rstrip()
+ 'myhostname'
+ >>> f.close()
+
+ Make sure that locking twice does not overwrite the old hostname:
+
+ >>> lock = zc.lockfile.LockFile("f.lock", content_template='{hostname}')
+ Traceback (most recent call last):
+ ...
+ LockError: Couldn't lock 'f.lock'
+
+ >>> f = open("f.lock")
+ >>> _ = f.seek(1)
+ >>> f.read().rstrip()
+ 'myhostname'
+ >>> f.close()
+
+ >>> lock.close()
+ """
+
+
+class TestLogger(object):
+ def __init__(self):
+ self.log_entries = []
+
+ def exception(self, msg, *args):
+ self.log_entries.append((msg,) + args)
+
+
+class LockFileLogEntryTestCase(unittest.TestCase):
+ """Tests for logging in case of lock failure"""
+ def setUp(self):
+ self.here = os.getcwd()
+ self.tmp = tempfile.mkdtemp(prefix='zc.lockfile-test-')
+ os.chdir(self.tmp)
+
+ def tearDown(self):
+ os.chdir(self.here)
+ setupstack.rmtree(self.tmp)
+
+ def test_log_formatting(self):
+ # PID and hostname are parsed and logged from lock file on failure
+ with patch('os.getpid', Mock(return_value=123)):
+ with patch('socket.gethostname', Mock(return_value='myhostname')):
+ lock = zc.lockfile.LockFile('f.lock',
+ content_template='{pid}/{hostname}')
+ with open('f.lock') as f:
+ self.assertEqual(' 123/myhostname\n', f.read())
+
+ lock.close()
+
+ def test_unlock_and_lock_while_multiprocessing_process_running(self):
+ import multiprocessing
+
+ lock = zc.lockfile.LockFile('l')
+ q = multiprocessing.Queue()
+ p = multiprocessing.Process(target=q.get)
+ p.daemon = True
+ p.start()
+
+ # release and re-acquire should work (obviously)
+ lock.close()
+ lock = zc.lockfile.LockFile('l')
+ self.assertTrue(p.is_alive())
+
+ q.put(0)
+ lock.close()
+ p.join()
+
+ def test_simple_lock(self):
+ assert isinstance(zc.lockfile.SimpleLockFile, type)
+ lock = zc.lockfile.SimpleLockFile('s')
+ with self.assertRaises(zc.lockfile.LockError):
+ zc.lockfile.SimpleLockFile('s')
+ lock.close()
+ zc.lockfile.SimpleLockFile('s').close()
+
+
+def test_suite():
+ suite = unittest.TestSuite()
+ suite.addTest(doctest.DocFileSuite(
+ 'README.txt', checker=checker,
+ setUp=setupstack.setUpDirectory, tearDown=setupstack.tearDown))
+ suite.addTest(doctest.DocTestSuite(
+ setUp=setupstack.setUpDirectory, tearDown=setupstack.tearDown,
+ checker=checker))
+ # Add unittest test cases from this module
+ suite.addTest(unittest.defaultTestLoader.loadTestsFromName(__name__))
+ return suite
diff --git a/monEnvTP/lib/python3.8/site-packages/zipp-3.7.0.dist-info/INSTALLER b/monEnvTP/lib/python3.8/site-packages/zipp-3.7.0.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/zipp-3.7.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/monEnvTP/lib/python3.8/site-packages/zipp-3.7.0.dist-info/LICENSE b/monEnvTP/lib/python3.8/site-packages/zipp-3.7.0.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..353924be0e59b9ad7e6c22848c2189398481821d
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/zipp-3.7.0.dist-info/LICENSE
@@ -0,0 +1,19 @@
+Copyright Jason R. Coombs
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/monEnvTP/lib/python3.8/site-packages/zipp-3.7.0.dist-info/METADATA b/monEnvTP/lib/python3.8/site-packages/zipp-3.7.0.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..b1308b5f6e2da8c30982a9bfb2b25aa9281d7cc5
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/zipp-3.7.0.dist-info/METADATA
@@ -0,0 +1,58 @@
+Metadata-Version: 2.1
+Name: zipp
+Version: 3.7.0
+Summary: Backport of pathlib-compatible object wrapper for zip files
+Home-page: https://github.com/jaraco/zipp
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+License: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Requires-Python: >=3.7
+License-File: LICENSE
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs'
+Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest (>=6) ; extra == 'testing'
+Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
+Requires-Dist: pytest-flake8 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing'
+Requires-Dist: jaraco.itertools ; extra == 'testing'
+Requires-Dist: func-timeout ; extra == 'testing'
+Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-mypy ; (platform_python_implementation != "PyPy") and extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/zipp.svg
+ :target: `PyPI link`_
+
+.. image:: https://img.shields.io/pypi/pyversions/zipp.svg
+ :target: `PyPI link`_
+
+.. _PyPI link: https://pypi.org/project/zipp
+
+.. image:: https://github.com/jaraco/zipp/workflows/tests/badge.svg
+ :target: https://github.com/jaraco/zipp/actions?query=workflow%3A%22tests%22
+ :alt: tests
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: Black
+
+.. .. image:: https://readthedocs.org/projects/skeleton/badge/?version=latest
+.. :target: https://skeleton.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2021-informational
+ :target: https://blog.jaraco.com/skeleton
+
+
+A pathlib-compatible Zipfile object wrapper. Official backport of the standard library
+`Path object <https://docs.python.org/3.8/library/zipfile.html#path-objects>`_.
+
+
diff --git a/monEnvTP/lib/python3.8/site-packages/zipp-3.7.0.dist-info/RECORD b/monEnvTP/lib/python3.8/site-packages/zipp-3.7.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..29b415e827e4bb507e1f6a76a7976b31fcc9cb3b
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/zipp-3.7.0.dist-info/RECORD
@@ -0,0 +1,8 @@
+__pycache__/zipp.cpython-38.pyc,,
+zipp-3.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+zipp-3.7.0.dist-info/LICENSE,sha256=2z8CRrH5J48VhFuZ_sR4uLUG63ZIeZNyL4xuJUKF-vg,1050
+zipp-3.7.0.dist-info/METADATA,sha256=ZLzgaXTyZX_MxTU0lcGfhdPY4CjFrT_3vyQ2Fo49pl8,2261
+zipp-3.7.0.dist-info/RECORD,,
+zipp-3.7.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
+zipp-3.7.0.dist-info/top_level.txt,sha256=iAbdoSHfaGqBfVb2XuR9JqSQHCoOsOtG6y9C_LSpqFw,5
+zipp.py,sha256=ajztOH-9I7KA_4wqDYygtHa6xUBVZgFpmZ8FE74HHHI,8425
diff --git a/monEnvTP/lib/python3.8/site-packages/zipp-3.7.0.dist-info/WHEEL b/monEnvTP/lib/python3.8/site-packages/zipp-3.7.0.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..becc9a66ea739ba941d48a749e248761cc6e658a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/zipp-3.7.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.1)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/monEnvTP/lib/python3.8/site-packages/zipp-3.7.0.dist-info/top_level.txt b/monEnvTP/lib/python3.8/site-packages/zipp-3.7.0.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..e82f676f82a3381fa909d1e6578c7a22044fafca
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/zipp-3.7.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+zipp
diff --git a/monEnvTP/lib/python3.8/site-packages/zipp.py b/monEnvTP/lib/python3.8/site-packages/zipp.py
new file mode 100644
index 0000000000000000000000000000000000000000..26b723c1fd3e25740e0268b8c9b50905c58c3d4a
--- /dev/null
+++ b/monEnvTP/lib/python3.8/site-packages/zipp.py
@@ -0,0 +1,329 @@
+import io
+import posixpath
+import zipfile
+import itertools
+import contextlib
+import sys
+import pathlib
+
+if sys.version_info < (3, 7):
+ from collections import OrderedDict
+else:
+ OrderedDict = dict
+
+
+__all__ = ['Path']
+
+
+def _parents(path):
+ """
+ Given a path with elements separated by
+ posixpath.sep, generate all parents of that path.
+
+ >>> list(_parents('b/d'))
+ ['b']
+ >>> list(_parents('/b/d/'))
+ ['/b']
+ >>> list(_parents('b/d/f/'))
+ ['b/d', 'b']
+ >>> list(_parents('b'))
+ []
+ >>> list(_parents(''))
+ []
+ """
+ return itertools.islice(_ancestry(path), 1, None)
+
+
+def _ancestry(path):
+ """
+ Given a path with elements separated by
+ posixpath.sep, generate all elements of that path
+
+ >>> list(_ancestry('b/d'))
+ ['b/d', 'b']
+ >>> list(_ancestry('/b/d/'))
+ ['/b/d', '/b']
+ >>> list(_ancestry('b/d/f/'))
+ ['b/d/f', 'b/d', 'b']
+ >>> list(_ancestry('b'))
+ ['b']
+ >>> list(_ancestry(''))
+ []
+ """
+ path = path.rstrip(posixpath.sep)
+ while path and path != posixpath.sep:
+ yield path
+ path, tail = posixpath.split(path)
+
+
+_dedupe = OrderedDict.fromkeys
+"""Deduplicate an iterable in original order"""
+
+
+def _difference(minuend, subtrahend):
+ """
+ Return items in minuend not in subtrahend, retaining order
+ with O(1) lookup.
+ """
+ return itertools.filterfalse(set(subtrahend).__contains__, minuend)
+
+
+class CompleteDirs(zipfile.ZipFile):
+ """
+ A ZipFile subclass that ensures that implied directories
+ are always included in the namelist.
+ """
+
+ @staticmethod
+ def _implied_dirs(names):
+ parents = itertools.chain.from_iterable(map(_parents, names))
+ as_dirs = (p + posixpath.sep for p in parents)
+ return _dedupe(_difference(as_dirs, names))
+
+ def namelist(self):
+ names = super(CompleteDirs, self).namelist()
+ return names + list(self._implied_dirs(names))
+
+ def _name_set(self):
+ return set(self.namelist())
+
+ def resolve_dir(self, name):
+ """
+ If the name represents a directory, return that name
+ as a directory (with the trailing slash).
+ """
+ names = self._name_set()
+ dirname = name + '/'
+ dir_match = name not in names and dirname in names
+ return dirname if dir_match else name
+
+ @classmethod
+ def make(cls, source):
+ """
+ Given a source (filename or zipfile), return an
+ appropriate CompleteDirs subclass.
+ """
+ if isinstance(source, CompleteDirs):
+ return source
+
+ if not isinstance(source, zipfile.ZipFile):
+ return cls(_pathlib_compat(source))
+
+ # Only allow for FastLookup when supplied zipfile is read-only
+ if 'r' not in source.mode:
+ cls = CompleteDirs
+
+ source.__class__ = cls
+ return source
+
+
+class FastLookup(CompleteDirs):
+ """
+ ZipFile subclass to ensure implicit
+ dirs exist and are resolved rapidly.
+ """
+
+ def namelist(self):
+ with contextlib.suppress(AttributeError):
+ return self.__names
+ self.__names = super(FastLookup, self).namelist()
+ return self.__names
+
+ def _name_set(self):
+ with contextlib.suppress(AttributeError):
+ return self.__lookup
+ self.__lookup = super(FastLookup, self)._name_set()
+ return self.__lookup
+
+
+def _pathlib_compat(path):
+ """
+ For path-like objects, convert to a filename for compatibility
+ on Python 3.6.1 and earlier.
+ """
+ try:
+ return path.__fspath__()
+ except AttributeError:
+ return str(path)
+
+
+class Path:
+ """
+ A pathlib-compatible interface for zip files.
+
+ Consider a zip file with this structure::
+
+ .
+ ├── a.txt
+ └── b
+ ├── c.txt
+ └── d
+ └── e.txt
+
+ >>> data = io.BytesIO()
+ >>> zf = zipfile.ZipFile(data, 'w')
+ >>> zf.writestr('a.txt', 'content of a')
+ >>> zf.writestr('b/c.txt', 'content of c')
+ >>> zf.writestr('b/d/e.txt', 'content of e')
+ >>> zf.filename = 'mem/abcde.zip'
+
+ Path accepts the zipfile object itself or a filename
+
+ >>> root = Path(zf)
+
+ From there, several path operations are available.
+
+ Directory iteration (including the zip file itself):
+
+ >>> a, b = root.iterdir()
+ >>> a
+ Path('mem/abcde.zip', 'a.txt')
+ >>> b
+ Path('mem/abcde.zip', 'b/')
+
+ name property:
+
+ >>> b.name
+ 'b'
+
+ join with divide operator:
+
+ >>> c = b / 'c.txt'
+ >>> c
+ Path('mem/abcde.zip', 'b/c.txt')
+ >>> c.name
+ 'c.txt'
+
+ Read text:
+
+ >>> c.read_text()
+ 'content of c'
+
+ existence:
+
+ >>> c.exists()
+ True
+ >>> (b / 'missing.txt').exists()
+ False
+
+ Coercion to string:
+
+ >>> import os
+ >>> str(c).replace(os.sep, posixpath.sep)
+ 'mem/abcde.zip/b/c.txt'
+
+ At the root, ``name``, ``filename``, and ``parent``
+ resolve to the zipfile. Note these attributes are not
+ valid and will raise a ``ValueError`` if the zipfile
+ has no filename.
+
+ >>> root.name
+ 'abcde.zip'
+ >>> str(root.filename).replace(os.sep, posixpath.sep)
+ 'mem/abcde.zip'
+ >>> str(root.parent)
+ 'mem'
+ """
+
+ __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
+
+ def __init__(self, root, at=""):
+ """
+ Construct a Path from a ZipFile or filename.
+
+ Note: When the source is an existing ZipFile object,
+ its type (__class__) will be mutated to a
+ specialized type. If the caller wishes to retain the
+ original type, the caller should either create a
+ separate ZipFile object or pass a filename.
+ """
+ self.root = FastLookup.make(root)
+ self.at = at
+
+ def open(self, mode='r', *args, pwd=None, **kwargs):
+ """
+ Open this entry as text or binary following the semantics
+ of ``pathlib.Path.open()`` by passing arguments through
+ to io.TextIOWrapper().
+ """
+ if self.is_dir():
+ raise IsADirectoryError(self)
+ zip_mode = mode[0]
+ if not self.exists() and zip_mode == 'r':
+ raise FileNotFoundError(self)
+ stream = self.root.open(self.at, zip_mode, pwd=pwd)
+ if 'b' in mode:
+ if args or kwargs:
+ raise ValueError("encoding args invalid for binary operation")
+ return stream
+ return io.TextIOWrapper(stream, *args, **kwargs)
+
+ @property
+ def name(self):
+ return pathlib.Path(self.at).name or self.filename.name
+
+ @property
+ def suffix(self):
+ return pathlib.Path(self.at).suffix or self.filename.suffix
+
+ @property
+ def suffixes(self):
+ return pathlib.Path(self.at).suffixes or self.filename.suffixes
+
+ @property
+ def stem(self):
+ return pathlib.Path(self.at).stem or self.filename.stem
+
+ @property
+ def filename(self):
+ return pathlib.Path(self.root.filename).joinpath(self.at)
+
+ def read_text(self, *args, **kwargs):
+ with self.open('r', *args, **kwargs) as strm:
+ return strm.read()
+
+ def read_bytes(self):
+ with self.open('rb') as strm:
+ return strm.read()
+
+ def _is_child(self, path):
+ return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
+
+ def _next(self, at):
+ return self.__class__(self.root, at)
+
+ def is_dir(self):
+ return not self.at or self.at.endswith("/")
+
+ def is_file(self):
+ return self.exists() and not self.is_dir()
+
+ def exists(self):
+ return self.at in self.root._name_set()
+
+ def iterdir(self):
+ if not self.is_dir():
+ raise ValueError("Can't listdir a file")
+ subs = map(self._next, self.root.namelist())
+ return filter(self._is_child, subs)
+
+ def __str__(self):
+ return posixpath.join(self.root.filename, self.at)
+
+ def __repr__(self):
+ return self.__repr.format(self=self)
+
+ def joinpath(self, *other):
+ next = posixpath.join(self.at, *map(_pathlib_compat, other))
+ return self._next(self.root.resolve_dir(next))
+
+ __truediv__ = joinpath
+
+ @property
+ def parent(self):
+ if not self.at:
+ return self.filename.parent
+ parent_at = posixpath.dirname(self.at.rstrip('/'))
+ if parent_at:
+ parent_at += '/'
+ return self._next(parent_at)
diff --git a/monEnvTP/lib64 b/monEnvTP/lib64
new file mode 120000
index 0000000000000000000000000000000000000000..7951405f85a569efbacc12fccfee529ef1866602
--- /dev/null
+++ b/monEnvTP/lib64
@@ -0,0 +1 @@
+lib
\ No newline at end of file
diff --git a/monEnvTP/pyvenv.cfg b/monEnvTP/pyvenv.cfg
new file mode 100644
index 0000000000000000000000000000000000000000..853404e23c0366b53610217d8f603a2f9dd1feeb
--- /dev/null
+++ b/monEnvTP/pyvenv.cfg
@@ -0,0 +1,3 @@
+home = /usr/bin
+include-system-site-packages = false
+version = 3.8.10
diff --git a/rendu.md b/monEnvTP/rendu.md
similarity index 100%
rename from rendu.md
rename to monEnvTP/rendu.md
diff --git a/serveur.py b/monEnvTP/serveur.py
similarity index 100%
rename from serveur.py
rename to monEnvTP/serveur.py
diff --git a/monEnvTP/share/python-wheels/CacheControl-0.12.6-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/CacheControl-0.12.6-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..69e46f489d3d286241a238ef91f81582a1210907
Binary files /dev/null and b/monEnvTP/share/python-wheels/CacheControl-0.12.6-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/appdirs-1.4.3-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/appdirs-1.4.3-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..9ccc8e2245a1da432e59317dc3867215bcd457b3
Binary files /dev/null and b/monEnvTP/share/python-wheels/appdirs-1.4.3-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/certifi-2019.11.28-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/certifi-2019.11.28-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..2cef4d27e29e236e56cd5e5737f127fd450da4f6
Binary files /dev/null and b/monEnvTP/share/python-wheels/certifi-2019.11.28-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/chardet-3.0.4-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/chardet-3.0.4-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..89dfc34a4e67bc01bf6ba2442621cc6dd544bb5a
Binary files /dev/null and b/monEnvTP/share/python-wheels/chardet-3.0.4-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/colorama-0.4.3-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/colorama-0.4.3-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..ad508f187ce764b84301587108c0f6c65413a4dd
Binary files /dev/null and b/monEnvTP/share/python-wheels/colorama-0.4.3-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/contextlib2-0.6.0-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/contextlib2-0.6.0-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..5d08a03a6ce658e25d62fa7d96a631da53faef14
Binary files /dev/null and b/monEnvTP/share/python-wheels/contextlib2-0.6.0-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/distlib-0.3.0-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/distlib-0.3.0-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..75a6e407e4168357d41d0efa21a55098358e8d98
Binary files /dev/null and b/monEnvTP/share/python-wheels/distlib-0.3.0-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/distro-1.4.0-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/distro-1.4.0-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..e94a3a498b11426479171ecac558e7eaa2b9ee40
Binary files /dev/null and b/monEnvTP/share/python-wheels/distro-1.4.0-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/html5lib-1.0.1-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/html5lib-1.0.1-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..f56e2979af697043882883ee1af06b30bb6edf83
Binary files /dev/null and b/monEnvTP/share/python-wheels/html5lib-1.0.1-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/idna-2.8-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/idna-2.8-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..caacbf0bccdbd429e71cb6e12e0ebd799fdbb1bb
Binary files /dev/null and b/monEnvTP/share/python-wheels/idna-2.8-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/ipaddr-2.2.0-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/ipaddr-2.2.0-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..df29e28f4355a25643fcdb4d0015aa21f066ba39
Binary files /dev/null and b/monEnvTP/share/python-wheels/ipaddr-2.2.0-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/lockfile-0.12.2-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/lockfile-0.12.2-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..4d2a1c63aaf03c39583a76ca58a604b198d2ed89
Binary files /dev/null and b/monEnvTP/share/python-wheels/lockfile-0.12.2-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/msgpack-0.6.2-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/msgpack-0.6.2-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..593d5ec8779d521e32c579528919f29d043c8602
Binary files /dev/null and b/monEnvTP/share/python-wheels/msgpack-0.6.2-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/packaging-20.3-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/packaging-20.3-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..153af458ae299aef4909c436c4d145c3c8bf0fb4
Binary files /dev/null and b/monEnvTP/share/python-wheels/packaging-20.3-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/pep517-0.8.2-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/pep517-0.8.2-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..9a41c2531755f2c65292bd62af636d08517778d8
Binary files /dev/null and b/monEnvTP/share/python-wheels/pep517-0.8.2-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/pip-20.0.2-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/pip-20.0.2-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..3fcefaed0c3e5778857407a96ff5f85c26cfb219
Binary files /dev/null and b/monEnvTP/share/python-wheels/pip-20.0.2-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/pkg_resources-0.0.0-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/pkg_resources-0.0.0-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..e066bc741edc8d6dfade490f18ec62ea7ffffc1f
Binary files /dev/null and b/monEnvTP/share/python-wheels/pkg_resources-0.0.0-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/progress-1.5-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/progress-1.5-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..ec396717939731c24396ffbf5f5dec912b0e2147
Binary files /dev/null and b/monEnvTP/share/python-wheels/progress-1.5-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/pyparsing-2.4.6-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/pyparsing-2.4.6-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..f6ab98884365c95348a1af2c98f82663863d5a61
Binary files /dev/null and b/monEnvTP/share/python-wheels/pyparsing-2.4.6-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/requests-2.22.0-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/requests-2.22.0-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..8dfb42d07e45ed16f222a4b8c145bdc581dbaf57
Binary files /dev/null and b/monEnvTP/share/python-wheels/requests-2.22.0-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/retrying-1.3.3-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/retrying-1.3.3-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..48b9fc004da63c7ae3dac0749dafed6ba66cacda
Binary files /dev/null and b/monEnvTP/share/python-wheels/retrying-1.3.3-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/setuptools-44.0.0-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/setuptools-44.0.0-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..b352c67e0fc367c841b8dbb6f76f8788bf8effe6
Binary files /dev/null and b/monEnvTP/share/python-wheels/setuptools-44.0.0-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/six-1.14.0-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/six-1.14.0-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..0e572b2e940857d726d5e3006096e74247c60dff
Binary files /dev/null and b/monEnvTP/share/python-wheels/six-1.14.0-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/toml-0.10.0-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/toml-0.10.0-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..929a5520c558c347a6bd5cfea1c8824ff083b8f0
Binary files /dev/null and b/monEnvTP/share/python-wheels/toml-0.10.0-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/urllib3-1.25.8-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/urllib3-1.25.8-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..e0851877d07e201e15640b32bab46edabbafdfe6
Binary files /dev/null and b/monEnvTP/share/python-wheels/urllib3-1.25.8-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/webencodings-0.5.1-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/webencodings-0.5.1-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..4fd5bfeb72c76d85a4433f322423ddc499fb43ac
Binary files /dev/null and b/monEnvTP/share/python-wheels/webencodings-0.5.1-py2.py3-none-any.whl differ
diff --git a/monEnvTP/share/python-wheels/wheel-0.34.2-py2.py3-none-any.whl b/monEnvTP/share/python-wheels/wheel-0.34.2-py2.py3-none-any.whl
new file mode 100644
index 0000000000000000000000000000000000000000..2fcda361e67827f696f6255a1b2cb5f90443dc8c
Binary files /dev/null and b/monEnvTP/share/python-wheels/wheel-0.34.2-py2.py3-none-any.whl differ